@article{3035227, title = "BAS-ADAM: An ADAM based approach to improve the performance of beetle antennae search optimizer", author = "Khan, A.H. and Cao, X. and Li, S. and Katsikis, V.N. and Liao, L.", journal = "IEEE/CAA Journal of Automatica Sinica", year = "2020", volume = "7", number = "2", pages = "461-471", publisher = "Institute of Electrical and Electronics Engineers, Inc. (IEEE)", issn = "2329-9266, 2329-9274", doi = "10.1109/JAS.2020.1003048", keywords = "Functions; Particle swarm optimization (PSO), Bench-mark problems; Convergence behaviors; Fast convergence rate; Gradient based optimization algorithms; Gradient estimation; Non-convex objective functions; Nonconvex functions; Particle swarm optimizers, Iterative methods", abstract = "In this paper, we propose enhancements to Beetle Antennae search BAS algorithm, called BAS-ADAM, to smoothen the convergence behavior and avoid trapping in local-minima for a highly non-convex objective function. We achieve this by adaptively adjusting the step-size in each iteration using the adaptive moment estimation ADAM update rule. The proposed algorithm also increases the convergence rate in a narrow valley. A key feature of the ADAM update rule is the ability to adjust the step-size for each dimension separately instead of using the same step-size. Since ADAM is traditionally used with gradient-based optimization algorithms, therefore we first propose a gradient estimation model without the need to differentiate the objective function. Resultantly, it demonstrates excellent performance and fast convergence rate in searching for the optimum of non-convex functions. The efficiency of the proposed algorithm was tested on three different benchmark problems, including the training of a high-dimensional neural network. The performance is compared with particle swarm optimizer PSO and the original BAS algorithm. © 2014 Chinese Association of Automation." }