Cite Article
Classification of Air-Cured Tobacco Leaf Pests Using Pruning Convolutional Neural Networks and Transfer Learning
Choose citation formatBibTeX
@article{IJASEIT15950, author = {Dwiretno Istiyadi Swasono and Handayani Tjandrasa and Chastine Fatichah}, title = {Classification of Air-Cured Tobacco Leaf Pests Using Pruning Convolutional Neural Networks and Transfer Learning}, journal = {International Journal on Advanced Science, Engineering and Information Technology}, volume = {12}, number = {3}, year = {2022}, pages = {1229--1235}, keywords = {Convolutional Neural Network; Transfer learning; Pruning; Tobacco leaf pest.}, abstract = {Convolutional Neural Network (CNN) usually uses a large image dataset with many parameters. Small datasets require a small number of parameters. Existing standard (pre-trained) models such as Alexnet, VGG, Inception, and Resnet have been tested with high accuracy but have many parameters. For small datasets, too many parameters become less efficient and increase computation costs. The high computational costs make the model unsuitable for computers with limited resources such as embedded devices and mobile phones. This research proposes pruning on the depth of resnet50 architecture and adds a dimensionality reduction layer after the pruning point. This approach does not require a complex pruning criteria algorithm, so it is easy to implement. Resnet50 was chosen because it is a good performance with batch normalization and skip connections. We use transfer learning for Resnet50 weight. Pruning is carried out at a depth of the network by cutting at the layer of the activation function. Several pruning points were selected to produce several models with certain parameters. The more networks layer pruned, the smaller the number of parameters produced. We add a layer for channel reduction after pruned network to reduce the number of feature maps before entering the fully connected (FC) layer as a classifier. We retrained a new network using a 2000 tobacco leaf pest dataset split into 1600 training and 400 validation images with 4-classes. The result shows that the accuracy could be maintained equal to the unpruned network up to 100% accuracy and 74.38% reduction rate for the number of parameters. A higher reduction rate of the number of parameters up to 90.62% still provides high accuracy of validation data around 99.3%. These prove that our proposed method effectively maintained accuracy and reduced the number of parameters.}, issn = {2088-5334}, publisher = {INSIGHT - Indonesian Society for Knowledge and Human Development}, url = {http://ijaseit.insightsociety.org/index.php?option=com_content&view=article&id=9&Itemid=1&article_id=15950}, doi = {10.18517/ijaseit.12.3.15950} }
EndNote
%A Swasono, Dwiretno Istiyadi %A Tjandrasa, Handayani %A Fatichah, Chastine %D 2022 %T Classification of Air-Cured Tobacco Leaf Pests Using Pruning Convolutional Neural Networks and Transfer Learning %B 2022 %9 Convolutional Neural Network; Transfer learning; Pruning; Tobacco leaf pest. %! Classification of Air-Cured Tobacco Leaf Pests Using Pruning Convolutional Neural Networks and Transfer Learning %K Convolutional Neural Network; Transfer learning; Pruning; Tobacco leaf pest. %X Convolutional Neural Network (CNN) usually uses a large image dataset with many parameters. Small datasets require a small number of parameters. Existing standard (pre-trained) models such as Alexnet, VGG, Inception, and Resnet have been tested with high accuracy but have many parameters. For small datasets, too many parameters become less efficient and increase computation costs. The high computational costs make the model unsuitable for computers with limited resources such as embedded devices and mobile phones. This research proposes pruning on the depth of resnet50 architecture and adds a dimensionality reduction layer after the pruning point. This approach does not require a complex pruning criteria algorithm, so it is easy to implement. Resnet50 was chosen because it is a good performance with batch normalization and skip connections. We use transfer learning for Resnet50 weight. Pruning is carried out at a depth of the network by cutting at the layer of the activation function. Several pruning points were selected to produce several models with certain parameters. The more networks layer pruned, the smaller the number of parameters produced. We add a layer for channel reduction after pruned network to reduce the number of feature maps before entering the fully connected (FC) layer as a classifier. We retrained a new network using a 2000 tobacco leaf pest dataset split into 1600 training and 400 validation images with 4-classes. The result shows that the accuracy could be maintained equal to the unpruned network up to 100% accuracy and 74.38% reduction rate for the number of parameters. A higher reduction rate of the number of parameters up to 90.62% still provides high accuracy of validation data around 99.3%. These prove that our proposed method effectively maintained accuracy and reduced the number of parameters. %U http://ijaseit.insightsociety.org/index.php?option=com_content&view=article&id=9&Itemid=1&article_id=15950 %R doi:10.18517/ijaseit.12.3.15950 %J International Journal on Advanced Science, Engineering and Information Technology %V 12 %N 3 %@ 2088-5334
IEEE
Dwiretno Istiyadi Swasono,Handayani Tjandrasa and Chastine Fatichah,"Classification of Air-Cured Tobacco Leaf Pests Using Pruning Convolutional Neural Networks and Transfer Learning," International Journal on Advanced Science, Engineering and Information Technology, vol. 12, no. 3, pp. 1229-1235, 2022. [Online]. Available: http://dx.doi.org/10.18517/ijaseit.12.3.15950.
RefMan/ProCite (RIS)
TY - JOUR AU - Swasono, Dwiretno Istiyadi AU - Tjandrasa, Handayani AU - Fatichah, Chastine PY - 2022 TI - Classification of Air-Cured Tobacco Leaf Pests Using Pruning Convolutional Neural Networks and Transfer Learning JF - International Journal on Advanced Science, Engineering and Information Technology; Vol. 12 (2022) No. 3 Y2 - 2022 SP - 1229 EP - 1235 SN - 2088-5334 PB - INSIGHT - Indonesian Society for Knowledge and Human Development KW - Convolutional Neural Network; Transfer learning; Pruning; Tobacco leaf pest. N2 - Convolutional Neural Network (CNN) usually uses a large image dataset with many parameters. Small datasets require a small number of parameters. Existing standard (pre-trained) models such as Alexnet, VGG, Inception, and Resnet have been tested with high accuracy but have many parameters. For small datasets, too many parameters become less efficient and increase computation costs. The high computational costs make the model unsuitable for computers with limited resources such as embedded devices and mobile phones. This research proposes pruning on the depth of resnet50 architecture and adds a dimensionality reduction layer after the pruning point. This approach does not require a complex pruning criteria algorithm, so it is easy to implement. Resnet50 was chosen because it is a good performance with batch normalization and skip connections. We use transfer learning for Resnet50 weight. Pruning is carried out at a depth of the network by cutting at the layer of the activation function. Several pruning points were selected to produce several models with certain parameters. The more networks layer pruned, the smaller the number of parameters produced. We add a layer for channel reduction after pruned network to reduce the number of feature maps before entering the fully connected (FC) layer as a classifier. We retrained a new network using a 2000 tobacco leaf pest dataset split into 1600 training and 400 validation images with 4-classes. The result shows that the accuracy could be maintained equal to the unpruned network up to 100% accuracy and 74.38% reduction rate for the number of parameters. A higher reduction rate of the number of parameters up to 90.62% still provides high accuracy of validation data around 99.3%. These prove that our proposed method effectively maintained accuracy and reduced the number of parameters. UR - http://ijaseit.insightsociety.org/index.php?option=com_content&view=article&id=9&Itemid=1&article_id=15950 DO - 10.18517/ijaseit.12.3.15950
RefWorks
RT Journal Article ID 15950 A1 Swasono, Dwiretno Istiyadi A1 Tjandrasa, Handayani A1 Fatichah, Chastine T1 Classification of Air-Cured Tobacco Leaf Pests Using Pruning Convolutional Neural Networks and Transfer Learning JF International Journal on Advanced Science, Engineering and Information Technology VO 12 IS 3 YR 2022 SP 1229 OP 1235 SN 2088-5334 PB INSIGHT - Indonesian Society for Knowledge and Human Development K1 Convolutional Neural Network; Transfer learning; Pruning; Tobacco leaf pest. AB Convolutional Neural Network (CNN) usually uses a large image dataset with many parameters. Small datasets require a small number of parameters. Existing standard (pre-trained) models such as Alexnet, VGG, Inception, and Resnet have been tested with high accuracy but have many parameters. For small datasets, too many parameters become less efficient and increase computation costs. The high computational costs make the model unsuitable for computers with limited resources such as embedded devices and mobile phones. This research proposes pruning on the depth of resnet50 architecture and adds a dimensionality reduction layer after the pruning point. This approach does not require a complex pruning criteria algorithm, so it is easy to implement. Resnet50 was chosen because it is a good performance with batch normalization and skip connections. We use transfer learning for Resnet50 weight. Pruning is carried out at a depth of the network by cutting at the layer of the activation function. Several pruning points were selected to produce several models with certain parameters. The more networks layer pruned, the smaller the number of parameters produced. We add a layer for channel reduction after pruned network to reduce the number of feature maps before entering the fully connected (FC) layer as a classifier. We retrained a new network using a 2000 tobacco leaf pest dataset split into 1600 training and 400 validation images with 4-classes. The result shows that the accuracy could be maintained equal to the unpruned network up to 100% accuracy and 74.38% reduction rate for the number of parameters. A higher reduction rate of the number of parameters up to 90.62% still provides high accuracy of validation data around 99.3%. These prove that our proposed method effectively maintained accuracy and reduced the number of parameters. LK http://ijaseit.insightsociety.org/index.php?option=com_content&view=article&id=9&Itemid=1&article_id=15950 DO - 10.18517/ijaseit.12.3.15950