@article{gadiraju_vatsavai_2023, title={Remote Sensing Based Crop Type Classification Via Deep Transfer Learning}, volume={16}, ISSN={["2151-1535"]}, url={https://doi.org/10.1109/JSTARS.2023.3270141}, DOI={10.1109/JSTARS.2023.3270141}, abstractNote={Machine learning methods using aerial imagery (satellite and unmanned-aerial-vehicles-based imagery) have been extensively used for crop classification. Traditionally, per-pixel-based, object-based, and patch-based methods have been used for classifying crops worldwide. Recently, aided by the increased availability of powerful computing architectures such as graphical processing units, deep learning-based systems have become popular in other domains such as natural images. However, building complex deep neural networks for aerial imagery from scratch is a challenging affair, owing to the limited labeled data in the remote sensing domain and the multitemporal (phenology) and geographic variability associated with agricultural data. In this article, we discuss these challenges in detail. We then discuss various transfer learning methodologies that help overcome these challenges. Finally, we evaluate whether a transfer learning strategy of using pretrained networks from a different domain helps improve remote sensing image classification performance on a benchmark dataset. Our findings indicate that deep neural networks pretrained on a different domain dataset cannot be used as off-the-shelf feature extractors. However, using the pretrained network weights as initial weights for training on the remote sensing dataset or freezing the early layers of the pretrained network improves the performance compared to training the network from scratch.}, journal={IEEE JOURNAL OF SELECTED TOPICS IN APPLIED EARTH OBSERVATIONS AND REMOTE SENSING}, author={Gadiraju, Krishna Karthik and Vatsavai, Ranga Raju}, year={2023}, pages={4699–4712} } @article{gadiraju_chen_ramachandra_vatsavai_2022, title={Real-Time Change Detection At the Edge}, DOI={10.1109/ICMLA55696.2022.00130}, abstractNote={Detecting changes in real-time using remote sensing data is of paramount importance in areas such as crop health monitoring, weed detection, and disaster management. However, real-time change detection using remote sensing imagery faces several challenges: a) it requires real-time data extraction which is a challenge for traditional satellite imagery sources such as MODIS and LANDSAT due to the latency associated with collecting and processing the data. Due to the advances made in the past decade in drone technology, Unmanned Aerial Vehicles (UAVs) can be used for real-time data collection. However, a large percentage of this data will be unlabeled which limits the use of well-known supervised machine learning methods; b) from an infrastructure perspective, the cloud-edge solution of processing the data collected from UAVs (edge) only on the cloud is also constrained by latency and bandwidth-related issues. Due to these limitations, transferring large amounts of data between cloud and edge, or storing large amounts of information regarding past time periods on an edge device is infeasible. We can limit the amount of data transferred between the cloud and edge by performing analyses on-the-fly at the edge using low-power devices (edge devices) that can be connected to UAVs. However, edge devices have computational and memory bottlenecks, which would limit the usage of complex machine learning algorithms. In this paper, we demonstrate how an unsupervised GMM-based real-time change detection method at the edge can be used to identify weeds in real-time. We evaluate the scalability of our method on edge computing and traditional devices such as NVIDIA Jetson TX2, RTX 2080, and traditional Intel CPUs. We perform a case study for weed detection on images collected from UAVs. Our results demonstrate both the efficacy and computational efficiency of our method.}, journal={2022 21ST IEEE INTERNATIONAL CONFERENCE ON MACHINE LEARNING AND APPLICATIONS, ICMLA}, author={Gadiraju, Krishna Karthik and Chen, Zexi and Ramachandra, Bharathkumar and Vatsavai, Ranga Raju}, year={2022}, pages={776–781} }