Maintained by Difan Deng and Marius Lindauer.
The following list considers papers related to neural architecture search. It is by no means complete. If you miss a paper on the list, please let us know.
Please note that although NAS methods steadily improve, the quality of empirical evaluations in this field are still lagging behind compared to other areas in machine learning, AI and optimization. We would therefore like to share some best practices for empirical evaluations of NAS methods, which we believe will facilitate sustained and measurable progress in the field. If you are interested in a teaser, please read our blog post or directly jump to our checklist.
Transformers have gained increasing popularity in different domains. For a comprehensive list of papers focusing on Neural Architecture Search for Transformer-Based spaces, the awesome-transformer-search repo is all you need.
2025
(Ed.)
PQNAS: Mixed-precision Quantization-aware Neural Architecture Search with Pseudo Quantizer Collection
2025.
@collection{gao-icassp25a,
title = {PQNAS: Mixed-precision Quantization-aware Neural Architecture Search with Pseudo Quantizer},
author = {Tianxiao Gao and Li Guo and Shihao Wang and Shiai Zhu and Dajiang Zhou},
url = {https://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=10888233},
year = {2025},
date = {2025-03-01},
urldate = {2025-03-01},
booktitle = {2025 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},
keywords = {},
pubstate = {published},
tppubtype = {collection}
}
He, Zhimin; Chen, Hongxiang; Zhou, Yan; Situ, Haozhen; Li, Yongyao; Li, Lvzhou
Self-supervised representation learning for Bayesian quantum architecture search Journal Article
In: Phys. Rev. A, vol. 111, iss. 3, pp. 032403, 2025.
@article{PhysRevA.111.032403,
title = {Self-supervised representation learning for Bayesian quantum architecture search},
author = {Zhimin He and Hongxiang Chen and Yan Zhou and Haozhen Situ and Yongyao Li and Lvzhou Li},
url = {https://link.aps.org/doi/10.1103/PhysRevA.111.032403},
doi = {10.1103/PhysRevA.111.032403},
year = {2025},
date = {2025-03-01},
urldate = {2025-03-01},
journal = {Phys. Rev. A},
volume = {111},
issue = {3},
pages = {032403},
publisher = {American Physical Society},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Feng, Shiyang; Li, Zhaowei; Zhang, Bo; Chen, Tao
DSF2-NAS: Dual-Stage Feature Fusion via Network Architecture Search for Classification of Multimodal Remote Sensing Images Journal Article
In: IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing. , 2025.
@article{feng-ieeejstoaeors25a,
title = {DSF2-NAS: Dual-Stage Feature Fusion via Network Architecture Search for Classification of Multimodal Remote Sensing Images},
author = {Shiyang Feng and Zhaowei Li and Bo Zhang and Tao Chen},
url = {https://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=10904332},
year = {2025},
date = {2025-03-01},
urldate = {2025-03-01},
journal = { IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing. },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
(Ed.)
TinyDevID: TinyML-Driven IoT Devices IDentification Using Network Flow Data Collection
2025.
@collection{Rushikesh-csp25a,
title = {TinyDevID: TinyML-Driven IoT Devices IDentification Using Network Flow Data},
author = {Priyanka Rushikesh Chaudhary and Anand Agrawal and Rajib Ranjan Maiti},
url = {https://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=10885715},
year = {2025},
date = {2025-02-01},
urldate = {2025-02-01},
booktitle = {COMSNETS 2025 - Cybersecurity & Privacy Workshop (CSP)},
keywords = {},
pubstate = {published},
tppubtype = {collection}
}
Yu, Sixing
2025.
@phdthesis{yu-phd25a,
title = {Scalable and resource-efcient federated learning: Techniques for resource-constrained heterogeneous systems},
author = {Sixing Yu},
url = {https://www.proquest.com/docview/3165602177?pq-origsite=gscholar&fromopenview=true&sourcetype=Dissertations%20&%20Theses},
year = {2025},
date = {2025-02-01},
urldate = {2025-02-01},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
Fu, Jintao; Cong, Peng; Xu, Shuo; Chang, Jiahao; Liu, Ximing; Sun, Yuewen
Neural architecture search with Deep Radon Prior for sparse-view CT image reconstruction Journal Article
In: Med Phys , 2025.
@article{Fu-medphs25a,
title = { Neural architecture search with Deep Radon Prior for sparse-view CT image reconstruction },
author = {Jintao Fu and Peng Cong and Shuo Xu and Jiahao Chang and Ximing Liu and Yuewen Sun
},
url = {https://pubmed.ncbi.nlm.nih.gov/39930320/},
year = {2025},
date = {2025-02-01},
urldate = {2025-02-01},
journal = { Med Phys },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhao, Yi-Heng; Pang, Shen-Wen; Huang, Heng-Zhi; Wu, Shao-Wen; Sun, Shao-Hua; Liu, Zhen-Bing; Pan, Zhi-Chao
Automatic clustering of single-molecule break junction data through task-oriented representation learning Journal Article
In: Rare Metals , 2025.
@article{zhao-rarem25a,
title = {Automatic clustering of single-molecule break junction data through task-oriented representation learning},
author = {
Yi-Heng Zhao and Shen-Wen Pang and Heng-Zhi Huang and Shao-Wen Wu and Shao-Hua Sun and Zhen-Bing Liu and Zhi-Chao Pan
},
url = {https://link.springer.com/article/10.1007/s12598-024-03089-7},
year = {2025},
date = {2025-02-01},
urldate = {2025-02-01},
journal = { Rare Metals },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Huang, Tao
Efficient Deep Neural Architecture Design and Training PhD Thesis
2025.
@phdthesis{nokey,
title = {Efficient Deep Neural Architecture Design and Training},
author = { Huang, Tao },
url = {https://ses.library.usyd.edu.au/handle/2123/33598},
year = {2025},
date = {2025-02-01},
urldate = {2025-02-01},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
Herterich, Nils; Liu, Kai; Stein, Anthony
Multi-objective neural architecture search for real-time weed detection on embedded system Miscellaneous
2025.
@misc{Herterich,
title = {Multi-objective neural architecture search for real-time weed detection on embedded system},
author = {Nils Herterich and Kai Liu and Anthony Stein},
url = {https://dl.gi.de/server/api/core/bitstreams/29a49f8d-304e-4073-8a92-4bef6483c087/content},
year = {2025},
date = {2025-02-01},
keywords = {},
pubstate = {published},
tppubtype = {misc}
}
Tabak, Gabriel Couto; Molenaar, Dylan; Curi, Mariana
An evolutionary neural architecture search for item response theory autoencoders Journal Article
In: Behaviormetrika , 2025.
@article{nokey,
title = {An evolutionary neural architecture search for item response theory autoencoders},
author = {Gabriel Couto Tabak and Dylan Molenaar and Mariana Curi
},
url = {https://link.springer.com/article/10.1007/s41237-024-00250-5},
year = {2025},
date = {2025-01-27},
urldate = {2025-01-27},
journal = {Behaviormetrika },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hao, Debei; Pei, Songwei
MIG-DARTS: towards effective differentiable architecture search by gradually mitigating the initial-channel gap between search and evaluation Journal Article
In: Neural Computing and Applications, 2025.
@article{nokey,
title = {MIG-DARTS: towards effective differentiable architecture search by gradually mitigating the initial-channel gap between search and evaluation},
author = {
Debei Hao and Songwei Pei
},
url = {https://link.springer.com/article/10.1007/s00521-024-10681-6},
year = {2025},
date = {2025-01-09},
urldate = {2025-01-09},
journal = {Neural Computing and Applications},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
(Ed.)
2025.
@collection{nokey,
title = {H4H: Hybrid Convolution-Transformer Architecture Search for NPU-CIM Heterogeneous Systems for AR/VR Applications},
author = {Yiwei Zhao and Jinhui Chen and Sai Qian Zhang and Syed Shakib Sarwar and Kleber Hugo Stangherlin and Jorge Tomas Gomez and Jae-Sun Seo and Barbara De Salvo and Chiao Liu and Phillip B. Gibbons and Ziyun Li},
url = {https://www.pdl.cmu.edu/PDL-FTP/associated/ASP-DAC2025-1073-12.pdf},
year = {2025},
date = {2025-01-02},
urldate = {2025-01-02},
booktitle = {ASPDAC ’25},
keywords = {},
pubstate = {published},
tppubtype = {collection}
}
WANG, Ke; SONG, Yafei; XU, Yunfei; QUAN, Wen; NI, Peng; WANG, Peng; LI, Chenghai; ZHI, Xinyan
A novel automated neural network architecture search method of air target intent recognition Journal Article
In: Chinese Journal of Aeronautics, vol. 38, no. 6, pp. 103295, 2025, ISSN: 1000-9361.
@article{WANG2025103295b,
title = {A novel automated neural network architecture search method of air target intent recognition},
author = {Ke WANG and Yafei SONG and Yunfei XU and Wen QUAN and Peng NI and Peng WANG and Chenghai LI and Xinyan ZHI},
url = {https://www.sciencedirect.com/science/article/pii/S1000936124004448},
doi = {https://doi.org/10.1016/j.cja.2024.11.005},
issn = {1000-9361},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {Chinese Journal of Aeronautics},
volume = {38},
number = {6},
pages = {103295},
abstract = {Modern air battlefield operations are characterized by flexibility and change, and the battlefield evolves rapidly and intricately. However, traditional air target intent recognition methods, which mainly rely on manually designed neural network models, find it difficult to maintain sustained and excellent performance in such a complex and changing environment. To address the problem of the adaptability of neural network models in complex environments, we propose a lightweight Transformer model (TransATIR) with a strong adaptive adjustment capability, based on the characteristics of air target intent recognition and the neural network architecture search technique. After conducting extensive experiments, it has been proved that TransATIR can efficiently extract the deep feature information from battlefield situation data by utilizing the neural architecture search algorithm, in order to quickly and accurately identify the real intention of the target. The experimental results indicate that TransATIR significantly improves recognition accuracy compared to the existing state-of-the-art methods, and also effectively reduces the computational complexity of the model.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
WANG, Ke; SONG, Yafei; XU, Yunfei; QUAN, Wen; NI, Peng; WANG, Peng; LI, Chenghai; ZHI, Xinyan
A novel automated neural network architecture search method of air target intent recognition Journal Article
In: Chinese Journal of Aeronautics, vol. 38, no. 6, pp. 103295, 2025, ISSN: 1000-9361.
@article{WANG2025103295,
title = {A novel automated neural network architecture search method of air target intent recognition},
author = {Ke WANG and Yafei SONG and Yunfei XU and Wen QUAN and Peng NI and Peng WANG and Chenghai LI and Xinyan ZHI},
url = {https://www.sciencedirect.com/science/article/pii/S1000936124004448},
doi = {https://doi.org/10.1016/j.cja.2024.11.005},
issn = {1000-9361},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {Chinese Journal of Aeronautics},
volume = {38},
number = {6},
pages = {103295},
abstract = {Modern air battlefield operations are characterized by flexibility and change, and the battlefield evolves rapidly and intricately. However, traditional air target intent recognition methods, which mainly rely on manually designed neural network models, find it difficult to maintain sustained and excellent performance in such a complex and changing environment. To address the problem of the adaptability of neural network models in complex environments, we propose a lightweight Transformer model (TransATIR) with a strong adaptive adjustment capability, based on the characteristics of air target intent recognition and the neural network architecture search technique. After conducting extensive experiments, it has been proved that TransATIR can efficiently extract the deep feature information from battlefield situation data by utilizing the neural architecture search algorithm, in order to quickly and accurately identify the real intention of the target. The experimental results indicate that TransATIR significantly improves recognition accuracy compared to the existing state-of-the-art methods, and also effectively reduces the computational complexity of the model.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Amin, Md Hasibul; Mohammadi, Mohammadreza; Bakos, Jason D.; Zand, Ramtin
CrossNAS: A Cross-Layer Neural Architecture Search Framework for PIM Systems Technical Report
2025.
@techreport{amin2025crossnascrosslayerneuralarchitecture,
title = {CrossNAS: A Cross-Layer Neural Architecture Search Framework for PIM Systems},
author = {Md Hasibul Amin and Mohammadreza Mohammadi and Jason D. Bakos and Ramtin Zand},
url = {https://arxiv.org/abs/2505.22868},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Xie, Yilin; Zhang, Shiqiang; Qing, Jixiang; Misener, Ruth; Tsay, Calvin
Global optimization of graph acquisition functions for neural architecture search Technical Report
2025.
@techreport{xie2025globaloptimizationgraphacquisition,
title = {Global optimization of graph acquisition functions for neural architecture search},
author = {Yilin Xie and Shiqiang Zhang and Jixiang Qing and Ruth Misener and Calvin Tsay},
url = {https://arxiv.org/abs/2505.23640},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Sun, Yu; Zhang, Xianglin; Dong, Liang; Liu, Ning
In: Applied Soft Computing, vol. 179, pp. 113279, 2025, ISSN: 1568-4946.
@article{SUN2025113279,
title = {Multi-objective evolutionary neural architecture search for medical image analysis using transformer and large language models in advancing public health},
author = {Yu Sun and Xianglin Zhang and Liang Dong and Ning Liu},
url = {https://www.sciencedirect.com/science/article/pii/S1568494625005903},
doi = {https://doi.org/10.1016/j.asoc.2025.113279},
issn = {1568-4946},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {Applied Soft Computing},
volume = {179},
pages = {113279},
abstract = {The rapid growth of medical imaging data in modern healthcare networks demands sophisticated automated analysis methods that can maintain high accuracy while operating efficiently at scale. Current approaches using transformers and large language models (LLMs) face challenges balancing computational requirements with diagnostic precision across diverse healthcare settings. This paper presents TransMed-NAS (transformer medical neural architecture search), a multi-objective evolutionary neural architecture search framework that automatically discovers efficient hybrid architectures by integrating transformers and LLMs for medical image segmentation. Our approach leverages evolutionary computation to optimize segmentation accuracy and computational efficiency while incorporating medical domain knowledge through LLM guidance. The framework introduces several innovations: a hierarchical channel selection strategy that preserves clinically relevant features, a weight entanglement mechanism that accelerates architecture search through intelligent knowledge transfer, and a surrogate model acceleration technique that reduces computational overhead while maintaining reliability. Experimental results on the ISIC 2020 dataset demonstrate TransMed-NAS’s superior performance compared to state-of-the-art methods. Our small model variant achieves competitive accuracy (0.934 Dice score) with only 0.82M parameters, while our large variant establishes new benchmarks (0.947 Dice score) with significantly reduced computational requirements. Ablation studies confirm the effectiveness of each component, particularly highlighting how LLM integration enhances architecture search efficiency and clinical relevance. These results demonstrate TransMed-NAS’s potential to advance automated medical image analysis in resource-diverse healthcare settings, making sophisticated diagnostic capabilities more accessible to underserved communities.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wang, Chen; Guo, Tiezheng; Yang, Qingwen; Liu, Yanyi; Tang, Jiawei; Wen, Yingyou
A NAS-Based Risk Prediction Model and Interpretable System for Amyloidosis Journal Article
In: Computers, Materials and Continua, vol. 83, no. 3, pp. 5561-5574, 2025, ISSN: 1546-2218.
@article{WANG20255561,
title = {A NAS-Based Risk Prediction Model and Interpretable System for Amyloidosis},
author = {Chen Wang and Tiezheng Guo and Qingwen Yang and Yanyi Liu and Jiawei Tang and Yingyou Wen},
url = {https://www.sciencedirect.com/science/article/pii/S1546221825004837},
doi = {https://doi.org/10.32604/cmc.2025.063676},
issn = {1546-2218},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {Computers, Materials and Continua},
volume = {83},
number = {3},
pages = {5561-5574},
abstract = {Primary light chain amyloidosis is a rare hematologic disease with multi-organ involvement. Nearly one-third of patients with amyloidosis experience five or more consultations before diagnosis, which may lead to a poor prognosis due to delayed diagnosis. Early risk prediction based on artificial intelligence is valuable for clinical diagnosis and treatment of amyloidosis. For this disease, we propose an Evolutionary Neural Architecture Searching (ENAS) based risk prediction model, which achieves high-precision early risk prediction using physical examination data as a reference factor. To further enhance the value of clinic application, we designed a natural language-based interpretable system around the NAS-assisted risk prediction model for amyloidosis, which utilizes a large language model and Retrieval-Augmented Generation (RAG) to achieve further interpretation of the predicted conclusions. We also propose a document-based global semantic slicing approach in RAG to achieve more accurate slicing and improve the professionalism of the generated interpretations. Tests and implementation show that the proposed risk prediction model can be effectively used for early screening of amyloidosis and that the interpretation method based on the large language model and RAG can effectively provide professional interpretation of predicted results, which provides an effective method and means for the clinical applications of AI.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wei, Lan; Zhang, Dandan
A Dataset and Benchmarks for Deep Learning-Based Optical Microrobot Pose and Depth Perception Technical Report
2025.
@techreport{wei2025datasetbenchmarksdeeplearningbased,
title = {A Dataset and Benchmarks for Deep Learning-Based Optical Microrobot Pose and Depth Perception},
author = {Lan Wei and Dandan Zhang},
url = {https://arxiv.org/abs/2505.18303},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Aach, Marcel; Blanc, Cyril; Lintermann, Andreas; Grave, Kurt De
Optimizing edge AI models on HPC systems with the edge in the loop Technical Report
2025.
@techreport{aach2025optimizingedgeaimodels,
title = {Optimizing edge AI models on HPC systems with the edge in the loop},
author = {Marcel Aach and Cyril Blanc and Andreas Lintermann and Kurt De Grave},
url = {https://arxiv.org/abs/2505.19995},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Xie, Qing; Yu, Ruiyun
Expo-GAN: A Style Transfer Generative Adversarial Network for Exhibition Hall Design Based on Optimized Cyclic and Neural Architecture Search Journal Article
In: Computers, Materials and Continua, vol. 83, no. 3, pp. 4757-4774, 2025, ISSN: 1546-2218.
@article{XIE20254757,
title = {Expo-GAN: A Style Transfer Generative Adversarial Network for Exhibition Hall Design Based on Optimized Cyclic and Neural Architecture Search},
author = {Qing Xie and Ruiyun Yu},
url = {https://www.sciencedirect.com/science/article/pii/S1546221825004138},
doi = {https://doi.org/10.32604/cmc.2025.063345},
issn = {1546-2218},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {Computers, Materials and Continua},
volume = {83},
number = {3},
pages = {4757-4774},
abstract = {This study presents a groundbreaking method named Expo-GAN (Exposition-Generative Adversarial Network) for style transfer in exhibition hall design, using a refined version of the Cycle Generative Adversarial Network (CycleGAN). The primary goal is to enhance the transformation of image styles while maintaining visual consistency, an area where current CycleGAN models often fall short. These traditional models typically face difficulties in accurately capturing expansive features as well as the intricate stylistic details necessary for high-quality image transformation. To address these limitations, the research introduces several key modifications to the CycleGAN architecture. Enhancements to the generator involve integrating U-net with SpecTransformer modules. This integration incorporates the use of Fourier transform techniques coupled with multi-head self-attention mechanisms, which collectively improve the generator’s ability to depict both large-scale structural patterns and minute elements meticulously in the generated images. This enhancement allows the generator to achieve a more detailed and coherent fusion of styles, essential for exhibition hall designs where both broad aesthetic strokes and detailed nuances matter significantly. The study also proposes innovative changes to the discriminator by employing dilated convolution and global attention mechanisms. These are derived using the Differentiable Architecture Search (DARTS) Neural Architecture Search framework to expand the receptive field, which is crucial for recognizing comprehensive artistically styled images. By broadening the ability to discern complex artistic features, the model avoids previous pitfalls associated with style inconsistency and missing detailed features. Moreover, the traditional cyde-consistency loss function is replaced with the Learned Perceptual Image Patch Similarity (LPIPS) metric. This shift aims to significantly enhance the perceptual quality of the resultant images by prioritizing human-perceived similarities, which aligns better with user expectations and professional standards in design aesthetics. The experimental phase of this research demonstrates that this novel approach consistently outperforms the conventional CycleGAN across a broad range of datasets. Complementary ablation studies and qualitative assessments underscore its superiority, particularly in maintaining detail fidelity and style continuity. This is critical for creating a visually harmonious exhibition hall design where every detail contributes to the overall aesthetic appeal. The results illustrate that this refined approach effectively bridges the gap between technical capability and artistic necessity, marking a significant advancement in computational design methodologies.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zechen, Zheng; Xuelei, He; Fengjun, Zhao; Xiaowei, He
PSNAS-Net: Hybrid gradient-physical optimizationfor efficient neural architecture search in customized medical imaging analysis Journal Article
In: Expert Systems with Applications, vol. 288, pp. 128155, 2025, ISSN: 0957-4174.
@article{ZECHEN2025128155,
title = {PSNAS-Net: Hybrid gradient-physical optimizationfor efficient neural architecture search in customized medical imaging analysis},
author = {Zheng Zechen and He Xuelei and Zhao Fengjun and He Xiaowei},
url = {https://www.sciencedirect.com/science/article/pii/S0957417425017750},
doi = {https://doi.org/10.1016/j.eswa.2025.128155},
issn = {0957-4174},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {Expert Systems with Applications},
volume = {288},
pages = {128155},
abstract = {Neural architecture search (NAS) facilitates the automated construction of neural networks tailored to specific tasks and requirements, resulting in models that are more closely aligned with the target task’s demands. However, in many studies, the extensive design space, high search costs, and time-consuming evaluation calculations render NAS impractical for numerous medical data tasks. Addressing these challenges, this study introduces an efficient algorithm for searching deep learning architectures. Initially, we propose 19 fundamental rules to streamline the design space, thereby reducing its scale. To improve the efficiency of the algorithm, we designed a NAS framework (PSNAS-Net) for convolutional neural networks and VisionTransformer, which consists of two search stages: Firstly, the improved powell algorithm is used to determine the model range, and the population-based simulated annealing algorithm is utilized to expedite the search for the final model. During the neural architecture search process, we consider accuracy, parameters, FLOPs, and model stability as comprehensive evaluation objectives, we designed a robust, flexible, and comprehensive metric for model evaluation. The experimental results demonstrate that PSNAS-Net achieves significantly shorter search times (0.05-1.47 GPU Days) compared to 19 existing NAS methods, while discovering compact models (as small as 0.11M) with superior performance across five medical image benchmarks. This study offers a viable approach for model search that accommodates individualized requirements.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Becktepe, Jannis; Hennig, Leona; Oeltze-Jafra, Steffen; Lindauer, Marius
Auto-nnU-Net: Towards Automated Medical Image Segmentation Technical Report
2025.
@techreport{becktepe2025autonnunetautomatedmedicalimage,
title = {Auto-nnU-Net: Towards Automated Medical Image Segmentation},
author = {Jannis Becktepe and Leona Hennig and Steffen Oeltze-Jafra and Marius Lindauer},
url = {https://arxiv.org/abs/2505.16561},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Pinos, Michal; Klhufek, Jan; Mrazek, Vojtech; Sekanina, Lukas
Inference Energy Analysis in Context of Hardware-Aware NAS Proceedings Article
In: 2025 IEEE 28th International Symposium on Design and Diagnostics of Electronic Circuits and Systems (DDECS), pp. 161-164, 2025.
@inproceedings{11006674,
title = {Inference Energy Analysis in Context of Hardware-Aware NAS},
author = {Michal Pinos and Jan Klhufek and Vojtech Mrazek and Lukas Sekanina},
url = {https://ieeexplore.ieee.org/abstract/document/11006674},
doi = {10.1109/DDECS63720.2025.11006674},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {2025 IEEE 28th International Symposium on Design and Diagnostics of Electronic Circuits and Systems (DDECS)},
pages = {161-164},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Louis, Gani; Caves, Kenett; Bello, Sadis
Hybrid Neural Network Architectures: Integrating Convolutional and Recurrent Layers with Genetic Optimization for Anomaly Detection Journal Article
In: 2025.
@article{articlem,
title = {Hybrid Neural Network Architectures: Integrating Convolutional and Recurrent Layers with Genetic Optimization for Anomaly Detection},
author = {Gani Louis and Kenett Caves and Sadis Bello},
url = {https://www.researchgate.net/profile/Sadis-Bello/publication/391950569_Hybrid_Neural_Network_Architectures_Integrating_Convolutional_and_Recurrent_Layers_with_Genetic_Optimization_for_Anomaly_Detection/links/682e59a78a76251f22e4adfb/Hybrid-Neural-Network-Architectures-Integrating-Convolutional-and-Recurrent-Layers-with-Genetic-Optimization-for-Anomaly-Detection.pdf},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Phan, Quan Minh; Luong, Ngoc Hoang
From Hand-Crafted Metrics to Evolved Training-Free Performance Predictors for Neural Architecture Search via Genetic Programming Technical Report
2025.
@techreport{phan2025handcraftedmetricsevolvedtrainingfree,
title = {From Hand-Crafted Metrics to Evolved Training-Free Performance Predictors for Neural Architecture Search via Genetic Programming},
author = {Quan Minh Phan and Ngoc Hoang Luong},
url = {https://arxiv.org/abs/2505.15832},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Zheng, Jie; He, Chunlin; Man, Wenxing; Wang, Jing
Training-free multi-scale neural architecture search for high-incidence cancer prediction Journal Article
In: Engineering Applications of Artificial Intelligence, vol. 156, pp. 111089, 2025, ISSN: 0952-1976.
@article{ZHENG2025111089,
title = {Training-free multi-scale neural architecture search for high-incidence cancer prediction},
author = {Jie Zheng and Chunlin He and Wenxing Man and Jing Wang},
url = {https://www.sciencedirect.com/science/article/pii/S0952197625010905},
doi = {https://doi.org/10.1016/j.engappai.2025.111089},
issn = {0952-1976},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {Engineering Applications of Artificial Intelligence},
volume = {156},
pages = {111089},
abstract = {Deep neural networks excel in high-incidence cancer prediction; however, designing networks that predict specific cancers is time-consuming and requires expert. The neural architecture search method offers a way to automate network design and has shown success in natural image. However, the small and varying lesion sizes in cancer image pose challenges, and most neural architecture search methods are computationally expensive and exhibit low agent correlation. Therefore, we propose a training-free multi-scale neural architecture search method for high-incidence cancer prediction. We introduce a multi-scale search space to address varying lesion sizes; and identify optimal scale combinations for feature extraction. To reduce computational costs and improve agent correlation, we design a training-free agent that evaluates network performance based on convergence, expressiveness, trainability, and complexity, enabling efficient neural architecture search implementation. Our extensive experiments on the NAS-Bench-201, MedmnistV2, LC25000, BreakHis, and CRC-5000 datasets show that our method outperforms both manually designed networks and state-of-the-art neural architecture search methods. The results demonstrate average improvements of 4.2%, 1.88%, 79.45%, 34.31%, and 31.71% in accuracy, area under the curve, search time, and Kendall and Spearman correlation coefficients, respectively.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kocher, Nick; Wassermann, Christian; Hennig, Leona; Seng, Jonas; Hoos, Holger; Kersting, Kristian; Lindauer, Marius; Müller, Matthias
Guidelines for the Quality Assessment of Energy-Aware NAS Benchmarks Technical Report
2025.
@techreport{kocher2025guidelinesqualityassessmentenergyaware,
title = {Guidelines for the Quality Assessment of Energy-Aware NAS Benchmarks},
author = {Nick Kocher and Christian Wassermann and Leona Hennig and Jonas Seng and Holger Hoos and Kristian Kersting and Marius Lindauer and Matthias Müller},
url = {https://arxiv.org/abs/2505.15631},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Ardila, Diego Páez; Carvalho, Thiago; Saavedra, Santiago Vasquez; Niño, Cesar Valencia; Figueiredo, Karla; Vellasco, Marley
Quantum-Inspired NAS With Attention-Based Search Spaces in Medical Applications Proceedings Article
In: 2025 IEEE Symposium on Computational Intelligence in Health and Medicine Companion (CIHM Companion), pp. 1-5, 2025.
@inproceedings{11002695,
title = {Quantum-Inspired NAS With Attention-Based Search Spaces in Medical Applications},
author = {Diego Páez Ardila and Thiago Carvalho and Santiago Vasquez Saavedra and Cesar Valencia Niño and Karla Figueiredo and Marley Vellasco},
doi = {10.1109/CIHMCompanion65205.2025.11002695},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {2025 IEEE Symposium on Computational Intelligence in Health and Medicine Companion (CIHM Companion)},
pages = {1-5},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Cao, Bin; Deng, Huanyu; Hao, Yiming; Luo, Xiao
Multi-view information fusion based on federated multi-objective neural architecture search for MRI semantic segmentation Journal Article
In: Information Fusion, vol. 123, pp. 103301, 2025, ISSN: 1566-2535.
@article{CAO2025103301,
title = {Multi-view information fusion based on federated multi-objective neural architecture search for MRI semantic segmentation},
author = {Bin Cao and Huanyu Deng and Yiming Hao and Xiao Luo},
url = {https://www.sciencedirect.com/science/article/pii/S1566253525003744},
doi = {https://doi.org/10.1016/j.inffus.2025.103301},
issn = {1566-2535},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {Information Fusion},
volume = {123},
pages = {103301},
abstract = {With the rapid development of artificial intelligence, medical image semantic segmentation is being used more widely. However, centralized training can lead to privacy risks. At the same time, MRI provides multiple views that together describe the anatomical structure of a lesion, but a single view may not fully capture all features. Therefore, integrating multi-view information in a federated learning setting is a key challenge for improving model generalization. This study combines federated learning, neural architecture search (NAS) and data fusion techniques to address issues related to data privacy, cross-institutional data distribution differences and multi-view information fusion in medical imaging. To achieve this, we propose the FL-MONAS framework, which leverages the advantages of NAS and federated learning. It uses a Pareto-frontier-based multi-objective optimization strategy to effectively combine 2D MRI with 3D anatomical structures, improving model performance while ensuring data privacy. Experimental results show that FL-MONAS maintains strong segmentation performance even in non-IID scenarios, providing an efficient and privacy-friendly solution for medical image analysis.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Chabal, Daphnee; Muller, Tim; Zhang, Eloise; Sapra, Dolly; Laat, Cees; Mann, Zoltán Ádám
COLIBRI: Optimizing Multi-party Secure Neural Network Inference Time for Transformers Proceedings Article
In: Zlatolas, Lili Nemec; Rannenberg, Kai; Welzer, Tatjana; Garcia-Alfaro, Joaquin (Ed.): ICT Systems Security and Privacy Protection, pp. 17–31, Springer Nature Switzerland, Cham, 2025, ISBN: 978-3-031-92882-6.
@inproceedings{10.1007/978-3-031-92882-6_2,
title = {COLIBRI: Optimizing Multi-party Secure Neural Network Inference Time for Transformers},
author = {Daphnee Chabal and Tim Muller and Eloise Zhang and Dolly Sapra and Cees Laat and Zoltán Ádám Mann},
editor = {Lili Nemec Zlatolas and Kai Rannenberg and Tatjana Welzer and Joaquin Garcia-Alfaro},
isbn = {978-3-031-92882-6},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {ICT Systems Security and Privacy Protection},
pages = {17–31},
publisher = {Springer Nature Switzerland},
address = {Cham},
abstract = {Secure Neural Network Inference (SNNI) protocols enable privacy-preserving inference by ensuring the confidentiality of inputs, model weights, and outputs. However, large neural networks, particularly Transformers, face significant challenges in SNNI due to high computational costs and slow execution, as these networks are typically optimized for accuracy rather than secure inference speed. We present COLIBRI, a novel approach that optimizes neural networks for efficient SNNI using Neural Architecture Search (NAS). Unlike prior methods, COLIBRI directly incorporates SNNI execution time as an optimization objective, leveraging a prediction model to estimate execution time without repeatedly running costly SNNI protocols during NAS. Our results on Cityscapes, a complex image segmentation task, show that COLIBRI reduces SNNI execution time by 26–33% while maintaining accuracy, marking a significant advancement in secure AI deployment.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Rumiantsev, Pavel; Coates, Mark
Half Search Space is All You Need Technical Report
2025.
@techreport{rumiantsev2025halfsearchspaceneed,
title = {Half Search Space is All You Need},
author = {Pavel Rumiantsev and Mark Coates},
url = {https://arxiv.org/abs/2505.13586},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Trirat, Patara; Lee, Jae-Gil
MONAQ: Multi-Objective Neural Architecture Querying for Time-Series Analysis on Resource-Constrained Devices Technical Report
2025.
@techreport{trirat2025monaqmultiobjectiveneuralarchitecture,
title = {MONAQ: Multi-Objective Neural Architecture Querying for Time-Series Analysis on Resource-Constrained Devices},
author = {Patara Trirat and Jae-Gil Lee},
url = {https://arxiv.org/abs/2505.10607},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Hoang, Trong-Minh; Pham, Tuan-Anh; van-Nhan Nguyen,; Doan, Duc-Thang; Dao, Nhu-Ngoc
Leveraging Edge Intelligence for Solar Energy Management in Smart Grids Journal Article
In: IEEE Access, vol. 13, pp. 88093-88104, 2025.
@article{11005531,
title = {Leveraging Edge Intelligence for Solar Energy Management in Smart Grids},
author = {Trong-Minh Hoang and Tuan-Anh Pham and van-Nhan Nguyen and Duc-Thang Doan and Nhu-Ngoc Dao},
url = {https://ieeexplore.ieee.org/document/11005531},
doi = {10.1109/ACCESS.2025.3570595},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {IEEE Access},
volume = {13},
pages = {88093-88104},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gambella, Matteo; Solar, Vicente Javier Castro; Roveri, Manuel
SEAL: Searching Expandable Architectures for Incremental Learning Technical Report
2025.
@techreport{gambella2025sealsearchingexpandablearchitectures,
title = {SEAL: Searching Expandable Architectures for Incremental Learning},
author = {Matteo Gambella and Vicente Javier Castro Solar and Manuel Roveri},
url = {https://arxiv.org/abs/2505.10457},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Chen, Samuel Yen-Chi; Liu, Chen-Yu; Chen, Kuan-Cheng; Huang, Wei-Jia; Chang, Yen-Jui; Huang, Wei-Hao
Differentiable Quantum Architecture Search in Quantum-Enhanced Neural Network Parameter Generation Technical Report
2025.
@techreport{chen2025differentiablequantumarchitecturesearch,
title = {Differentiable Quantum Architecture Search in Quantum-Enhanced Neural Network Parameter Generation},
author = {Samuel Yen-Chi Chen and Chen-Yu Liu and Kuan-Cheng Chen and Wei-Jia Huang and Yen-Jui Chang and Wei-Hao Huang},
url = {https://arxiv.org/abs/2505.09653},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Wang, Yancheng; Jojic, Nebojsa; Yang, Yingzhen
Differentiable Channel Selection in Self-Attention For Person Re-Identification Technical Report
2025.
@techreport{wang2025differentiablechannelselectionselfattention,
title = {Differentiable Channel Selection in Self-Attention For Person Re-Identification},
author = {Yancheng Wang and Nebojsa Jojic and Yingzhen Yang},
url = {https://arxiv.org/abs/2505.08961},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Deepa, S.; S, Parthiban; S, Angel; M, Divyalakshmi
Deep Analysis and Detection of Skin Disease using YOLO-NAS Algorithm Proceedings Article
In: 2025 5th International Conference on Trends in Material Science and Inventive Materials (ICTMIM), pp. 1626-1631, 2025.
@inproceedings{10987944,
title = {Deep Analysis and Detection of Skin Disease using YOLO-NAS Algorithm},
author = {S. Deepa and Parthiban S and Angel S and Divyalakshmi M},
url = {https://ieeexplore.ieee.org/abstract/document/10987944},
doi = {10.1109/ICTMIM65579.2025.10987944},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {2025 5th International Conference on Trends in Material Science and Inventive Materials (ICTMIM)},
pages = {1626-1631},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Mecharbat, Lotfi Abdelkrim; Almakky, Ibrahim; Takac, Martin; Yaqub, Mohammad
MedNNS: Supernet-based Medical Task-Adaptive Neural Network Search Technical Report
2025.
@techreport{mecharbat2025mednnssupernetbasedmedicaltaskadaptiveb,
title = {MedNNS: Supernet-based Medical Task-Adaptive Neural Network Search},
author = {Lotfi Abdelkrim Mecharbat and Ibrahim Almakky and Martin Takac and Mohammad Yaqub},
url = {https://arxiv.org/abs/2504.15865},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Poddenige, Deshani Geethika; Seneviratne, Sachith; Senanayake, Damith; Niranjan, Mahesan; Suganthan, PN; Halgamuge, Saman
Arch-LLM: Taming LLMs for Neural Architecture Generation via Unsupervised Discrete Representation Learning Technical Report
2025.
@techreport{poddenige2025archllmtamingllmsneuralb,
title = {Arch-LLM: Taming LLMs for Neural Architecture Generation via Unsupervised Discrete Representation Learning},
author = {Deshani Geethika Poddenige and Sachith Seneviratne and Damith Senanayake and Mahesan Niranjan and PN Suganthan and Saman Halgamuge},
url = {https://arxiv.org/abs/2503.22063},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Mejia, Felix; Henao, John García; Hernandez, Carlos Barrios; Riveill, Michel
In: ACI Avances en Ciencias e Ingenierías, vol. 17, 2025.
@article{articlel,
title = {Efficiency in the classification of chest X-ray images through generative parallelization of the Neural Architecture SearchEficacia en la clasificación de imágenes de rayos Xde tórax mediante la paralelización generativa dela búsqueda de arquitectura neuronal},
author = {Felix Mejia and John García Henao and Carlos Barrios Hernandez and Michel Riveill},
url = {https://www.researchgate.net/publication/391791615_Efficiency_in_the_classification_of_chest_X-ray_images_through_generative_parallelization_of_the_Neural_Architecture_SearchEficacia_en_la_clasificacion_de_imagenes_de_rayos_Xde_torax_mediante_la_paral},
doi = {10.18272/9bp4dz26},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {ACI Avances en Ciencias e Ingenierías},
volume = {17},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Cao, Bin; Luo, Xiao; Liu, Xin; Li, Yun
Comprehensive-Forecast Multiobjective Genetic Programming for Neural Architecture Search Journal Article
In: IEEE Transactions on Evolutionary Computation, pp. 1-1, 2025.
@article{11003973,
title = {Comprehensive-Forecast Multiobjective Genetic Programming for Neural Architecture Search},
author = {Bin Cao and Xiao Luo and Xin Liu and Yun Li},
url = {https://ieeexplore.ieee.org/abstract/document/11003973},
doi = {10.1109/TEVC.2025.3570195},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {IEEE Transactions on Evolutionary Computation},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhai, Xiaotong; Li, Shu; Zhong, Guoqiang; Li, Tao; Zhang, Fuchang; Hedjam, Rachid
Generative neural architecture search Journal Article
In: Neurocomputing, vol. 642, pp. 130360, 2025, ISSN: 0925-2312.
@article{ZHAI2025130360,
title = {Generative neural architecture search},
author = {Xiaotong Zhai and Shu Li and Guoqiang Zhong and Tao Li and Fuchang Zhang and Rachid Hedjam},
url = {https://www.sciencedirect.com/science/article/pii/S092523122501032X},
doi = {https://doi.org/10.1016/j.neucom.2025.130360},
issn = {0925-2312},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {Neurocomputing},
volume = {642},
pages = {130360},
abstract = {Neural architecture search (NAS) is an important approach for automatic neural architecture design and has been applied to many tasks, such as image classification and object detection. However, most of the conventional NAS algorithms mainly focus on reducing the prohibitive computational cost, while choosing commonly used reinforcement learning (RL), evolutionary algorithm (EA) or gradient-based methods as their search strategy. In this paper, we propose a novel search strategy for NAS, called Generative NAS (GNAS). Specifically, we assume that high-performing convolutional neural networks adhere to a latent distribution, and design a generator to learn this distribution for generating neural architectures. Furthermore, in order to update the generator for better learning the latent distribution, we use the policy gradient and the performance of the generated CNNs on the validation datasets as a reward signal. To evaluate GNAS, we have conducted extensive experiments on the CIFAR-10, SVHN, MNIST, Fashion-MNIST and ImageNet datasets. The results demonstrate the effectiveness of GNAS compared to previous NAS strategies.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wani, M. Arif; Sultan, Bisma; Ali, Sarwat; Sofi, Mukhtar Ahmad
Introduction to Deep Learning Applications Book Chapter
In: Advances in Deep Learning, Volume 2, pp. 1–14, Springer Nature Singapore, Singapore, 2025, ISBN: 978-981-96-3498-9.
@inbook{Wani2025c,
title = {Introduction to Deep Learning Applications},
author = {M. Arif Wani and Bisma Sultan and Sarwat Ali and Mukhtar Ahmad Sofi},
url = {https://doi.org/10.1007/978-981-96-3498-9_1},
doi = {10.1007/978-981-96-3498-9_1},
isbn = {978-981-96-3498-9},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {Advances in Deep Learning, Volume 2},
pages = {1–14},
publisher = {Springer Nature Singapore},
address = {Singapore},
abstract = {Deep learning has emerged as a cornerstone of modern Artificial Intelligence (AI), offering unparalleled performance across a variety of complex tasks. Its ability to automatically learn features from vast amounts of data has transformed industries ranging from computer vision to natural language processing. Among the numerous applications of deep learning, three areas have witnessed significant progress: Neural Architecture Search (NAS), steganography, and medical applications. This chapter explores application of deep learning architectures in these three areas.},
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}
Wani, M. Arif; Sultan, Bisma; Ali, Sarwat; Sofi, Mukhtar Ahmad
Gradient-Based Neural Architecture Search Book Chapter
In: Advances in Deep Learning, Volume 2, pp. 31–44, Springer Nature Singapore, Singapore, 2025, ISBN: 978-981-96-3498-9.
@inbook{Wani2025b,
title = {Gradient-Based Neural Architecture Search},
author = {M. Arif Wani and Bisma Sultan and Sarwat Ali and Mukhtar Ahmad Sofi},
url = {https://doi.org/10.1007/978-981-96-3498-9_3},
doi = {10.1007/978-981-96-3498-9_3},
isbn = {978-981-96-3498-9},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {Advances in Deep Learning, Volume 2},
pages = {31–44},
publisher = {Springer Nature Singapore},
address = {Singapore},
abstract = {The rapid advancement of deep learning has revolutionized numerous fields, from computer vision to natural language processing. However, the effectiveness of deep learning models heavily relies on the choice of their architectures. Traditionally, designing these architectures has been a labor-intensive and expert-driven process. To address this challenge, Gradient-based Neural Architecture Search (NAS) has emerged as a promising solution, aiming to automate the architecture design process.},
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}
Gu, XiaoTong; Tang, Shengyu; Cao, Yiming; Yu, Changdong
Underwater object detection in sonar imagery with detection transformer and Zero-shot neural architecture search Technical Report
2025.
@techreport{gu2025underwaterobjectdetectionsonar,
title = {Underwater object detection in sonar imagery with detection transformer and Zero-shot neural architecture search},
author = {XiaoTong Gu and Shengyu Tang and Yiming Cao and Changdong Yu},
url = {https://arxiv.org/abs/2505.06694},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Cerioli, Alessandro; Petrosino, Lorenzo; Sasso, Daniele; Laroche, Clément; Piechowiak, Tobias; Pezzarossa, Luca; Merone, Mario; Vollero, Luca; Sabatini, Anna
Efficient Detection of Microplastics on Edge Devices With Tailored Compiler for TinyML Applications Journal Article
In: IEEE Access, vol. 13, pp. 90970-90982, 2025.
@article{10990265,
title = {Efficient Detection of Microplastics on Edge Devices With Tailored Compiler for TinyML Applications},
author = {Alessandro Cerioli and Lorenzo Petrosino and Daniele Sasso and Clément Laroche and Tobias Piechowiak and Luca Pezzarossa and Mario Merone and Luca Vollero and Anna Sabatini},
url = {https://ieeexplore.ieee.org/document/10990265},
doi = {10.1109/ACCESS.2025.3567816},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {IEEE Access},
volume = {13},
pages = {90970-90982},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhu, Chenghong; Wu, Xian; Zhang, Hao-Kai; Wu, Sixuan; Li, Guangxi; Wang, Xin
Scalable Quantum Architecture Search via Landscape Analysis Technical Report
2025.
@techreport{zhu2025scalablequantumarchitecturesearch,
title = {Scalable Quantum Architecture Search via Landscape Analysis},
author = {Chenghong Zhu and Xian Wu and Hao-Kai Zhang and Sixuan Wu and Guangxi Li and Xin Wang},
url = {https://arxiv.org/abs/2505.05380},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Wani, M. Arif; Sultan, Bisma; Ali, Sarwat; Sofi, Mukhtar Ahmad
Evolutionary Algorithm-Based Neural Architecture Search Book Chapter
In: Advances in Deep Learning, Volume 2, pp. 15–30, Springer Nature Singapore, Singapore, 2025, ISBN: 978-981-96-3498-9.
@inbook{Wani2025,
title = {Evolutionary Algorithm-Based Neural Architecture Search},
author = {M. Arif Wani and Bisma Sultan and Sarwat Ali and Mukhtar Ahmad Sofi},
url = {https://doi.org/10.1007/978-981-96-3498-9_2},
doi = {10.1007/978-981-96-3498-9_2},
isbn = {978-981-96-3498-9},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {Advances in Deep Learning, Volume 2},
pages = {15–30},
publisher = {Springer Nature Singapore},
address = {Singapore},
abstract = {The increasing demand for specialized neural network architectures that cater to specific tasks has given rise to automated methods for architecture design, alleviating the need for manual, labor-intensive processes. Neural Architecture Search (NAS) has emerged as a key solution, enabling the discovery of optimized neural networks without human intervention. Among the various approaches to NAS, Evolutionary Algorithm-based NAS has proven to be particularly effective due to its ability to efficiently navigate the vast search space of neural architectures by employing biologically inspired optimization techniques.},
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}
Guarrasi, Valerio; Mogensen, Klara; Tassinari, Sara; Qvarlander, Sara; Soda, Paolo
Timing Is Everything: Finding the Optimal Fusion Points in Multimodal Medical Imaging Technical Report
2025.
@techreport{guarrasi2025timingeverythingfindingoptimal,
title = {Timing Is Everything: Finding the Optimal Fusion Points in Multimodal Medical Imaging},
author = {Valerio Guarrasi and Klara Mogensen and Sara Tassinari and Sara Qvarlander and Paolo Soda},
url = {https://arxiv.org/abs/2505.02467},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}