Zhichao, Lu; Ran, Cheng; Yaochu, Jin; Chen, Tan Kay; Kalyanmoy, Deb
Neural Architecture Search as Multiobjective Optimization Benchmarks: Problem Formulation and Performance Assessment Journal Article
In: IEEE Transactions on Evolutionary Computation, 2023, ISSN: 1941-0026.
@article{lu_neural_2022,
title = {Neural Architecture Search as Multiobjective Optimization Benchmarks: Problem Formulation and Performance Assessment},
author = {Lu Zhichao and Cheng Ran and Jin Yaochu and Tan Kay Chen and Deb Kalyanmoy},
url = {https://ieeexplore.ieee.org/abstract/document/10004638
https://github.com/EMI-Group/EvoXBench
},
doi = {10.1109/TEVC.2022.3233364},
issn = {1941-0026},
year = {2023},
date = {2023-01-02},
urldate = {2023-01-02},
journal = {IEEE Transactions on Evolutionary Computation},
abstract = {The ongoing advancements in network architecture design have led to remarkable achievements in deep learning across various challenging computer vision tasks. Meanwhile, the development of neural architecture search (NAS) has provided promising approaches to automating the design of network architectures for lower prediction error. Recently, the emerging application scenarios of deep learning (e.g., autonomous driving) have raised higher demands for network architectures considering multiple design criteria: number of parameters/weights, number of floating-point operations, inference latency, among others. From an optimization point of view, the NAS tasks involving multiple design criteria are intrinsically multiobjective optimization problems; hence, it is reasonable to adopt evolutionary multiobjective optimization (EMO) algorithms for tackling them. Nonetheless, there is still a clear gap confining the related research along this pathway: on the one hand, there is a lack of a general problem formulation of NAS tasks from an optimization point of view; on the other hand, there are challenges in conducting benchmark assessments of EMO algorithms on NAS tasks. To bridge the gap: (i) we formulate NAS tasks into general multi-objective optimization problems and analyze the complex characteristics from an optimization point of view; (ii) we present an end-to-end pipeline, dubbed EvoXBench, to generate benchmark test problems for EMO algorithms to run efficiently -without the requirement of GPUs or Pytorch/Tensorflow; (iii) we instantiate two test suites comprehensively covering two datasets, seven search spaces, and three hardware devices, involving up to eight objectives. Based on the above, we validate the proposed test suites using six representative EMO algorithms and provide some empirical analyses. The code of EvoXBench is available at https://github.com/EMI-Group/EvoXBench.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhichao, Lu; Ran, Cheng; Shihua, Huang; Haoming, Zhang; Changxiao, Qiu; Fan, Yang
Surrogate-assisted Multiobjective Neural Architecture Search for Real-time Semantic Segmentation Journal Article
In: IEEE Transactions on Artificial Intelligence, 2022, ISSN: 2691-4581.
@article{lu_surrogate-assisted_2022,
title = {Surrogate-assisted Multiobjective Neural Architecture Search for Real-time Semantic Segmentation},
author = {Lu Zhichao and Cheng Ran and Huang Shihua and Zhang Haoming and Qiu Changxiao and Yang Fan},
url = {https://ieeexplore.ieee.org/abstract/document/9916102},
doi = {10.1109/TAI.2022.3213532},
issn = {2691-4581},
year = {2022},
date = {2022-10-11},
urldate = {2022-10-11},
journal = {IEEE Transactions on Artificial Intelligence},
abstract = {The architectural advancements in deep neural networks have led to remarkable leap-forwards across a broad array of computer vision tasks. Instead of relying on human expertise, neural architecture search (NAS) has emerged as a promising avenue towards automating the design of architectures. While recent achievements on image classification have suggested opportunities, the promises of NAS have yet to be thoroughly assessed on more challenging tasks of semantic segmentation. The main challenges of applying NAS to semantic segmentation arise from two aspects: i) high-resolution images to be processed; ii) additional requirement of real-time inference speed (i.e. real-time semantic segmentation) for applications such as autonomous driving. To meet such challenges, we propose a surrogate-assisted multi-objective method in this paper. Through a series of customized prediction models, our method effectively transforms the original NAS task to an ordinary multi-objective optimization problem. Followed by a hierarchical pre-screening criterion for in-fill selection, our method progressively achieves a set of efficient architectures trading-off between segmentation accuracy and inference speed. Empirical evaluations on three benchmark datasets together with an application using Huawei Atlas 200 DK suggest that our method can identify architectures significantly outperforming existing state-of-the-art architectures designed both manually by human experts and automatically by other NAS methods. Code is available from https://github.com/mikelzc1990/nas-semantic-segmentation .},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Cheng, He; Ran, Cheng; Lianghao, Li; Chen, Tan Kay; Yaochu, Jin
Large-scale Multiobjective Optimization via Reformulated Decision Variable Analysis Journal Article
In: IEEE Transactions on Evolutionary Computation, 2022, ISSN: 1941-0026.
@article{he_large-scale_2022,
title = {Large-scale Multiobjective Optimization via Reformulated Decision Variable Analysis},
author = {He Cheng and Cheng Ran and Li Lianghao and Tan Kay Chen and Jin Yaochu},
url = {https://ieeexplore.ieee.org/abstract/document/9914641},
doi = {10.1109/TEVC.2022.3213006},
issn = {1941-0026},
year = {2022},
date = {2022-10-10},
urldate = {2022-10-10},
journal = {IEEE Transactions on Evolutionary Computation},
abstract = {With the rising number of large-scale multiobjective optimization problems (LSMOPs) from academia and industries, some multiobjective evolutionary algorithms (MOEAs) with different decision variable handling strategies have been proposed. Decision variable analysis (DVA) is widely used in large-scale optimization, aiming at identifying the connection between each decision variable and the objectives, and grouping those interacting decision variables to reduce the complexity of LSMOPs. Despite their effectiveness, existing DVA techniques require the unbearable cost of function evaluations for solving LSMOPs. We propose a reformulation based approach for efficient DVA to address this deficiency. Then a large-scale MOEA is proposed based on reformulated DVA, namely LERD. Specifically, the DVA process is reformulated into an optimization problem with binary decision variables, aiming to approximate different grouping results. Afterwards, each group of decision variables is used for convergence-related or diversity-related optimization. The effectiveness and efficiency of the reformulation based DVA are validated by replacing the corresponding DVA techniques in two large-scale MOEAs. Experiments in comparison with six state-of-the-art large-scale MOEAs on LSMOPs with up to 2000 decision variables have shown the promising performance of LERD.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hui, Bai; Ruimin, Shen; Yue, Lin; Botian, Xu; Ran, Cheng
Lamarckian Platform: Pushing the Boundaries of Evolutionary Reinforcement Learning Towards Asynchronous Commercial Games Journal Article
In: IEEE Transactions on Games, 2022, ISSN: 2475-1510.
@article{bai_lamarckian_2022,
title = {Lamarckian Platform: Pushing the Boundaries of Evolutionary Reinforcement Learning Towards Asynchronous Commercial Games},
author = {Bai Hui and Shen Ruimin and Lin Yue and Xu Botian and Cheng Ran},
url = {https://ieeexplore.ieee.org/abstract/document/9897069
https://github.com/lamarckian/lamarckian},
doi = {10.1109/TG.2022.3208324},
issn = {2475-1510},
year = {2022},
date = {2022-09-21},
urldate = {2022-09-21},
journal = {IEEE Transactions on Games},
abstract = {Despite the emerging progress of integrating evolutionary computation into reinforcement learning, the absence of a high-performance platform endowing composability and massive parallelism causes non-trivial difficulties for research and applications related to asynchronous commercial games. Here we introduce Lamarckian – an open-source platform featuring support for evolutionary reinforcement learning scalable to distributed computing resources. To improve the training speed and data efficiency, Lamarckian adopts optimized communication methods and an asynchronous evolutionary reinforcement learning workflow. To meet the demand for an asynchronous interface by commercial games and various methods, Lamarckian tailors an asynchronous Markov Decision Process interface and designs an object-oriented software architecture with decoupled modules. In comparison with the state-of-the-art RLlib, we empirically demonstrate the unique advantages of Lamarckian on benchmark tests with up to 6000 CPU cores: i) both the sampling efficiency and training speed are doubled when running PPO on Google football game; ii) the training speed is 13 times faster when running PBT+PPO on Pong game. Moreover, we also present two use cases: i) how Lamarckian is applied to generating behavior-diverse game AI; ii) how Lamarckian is applied to game balancing tests for an asynchronous commercial game.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Huang, Shihua; He, Cheng; Cheng, Ran
SoloGAN: Multi-domain Multimodal Unpaired Image-to-Image Translation via a Single Generative Adversarial Network Journal Article
In: IEEE Transactions on Artificial Intelligence, 2022, ISSN: 2691-4581.
@article{huang2022multimodal,
title = {SoloGAN: Multi-domain Multimodal Unpaired Image-to-Image Translation via a Single Generative Adversarial Network},
author = {Shihua Huang and Cheng He and Ran Cheng},
url = {https://ieeexplore.ieee.org/abstract/document/9811405
https://github.com/EMI-Group/SoloGAN},
issn = {2691-4581},
year = {2022},
date = {2022-06-30},
urldate = {2022-06-30},
journal = {IEEE Transactions on Artificial Intelligence},
publisher = {IEEE},
abstract = {Despite significant advances in image-to-image (I2I) translation with generative adversarial networks (GANs), it remains challenging to effectively translate an image to a set of diverse images in multiple target domains using a pair of generators and discriminators. Existing multimodal I2I translation methods adopt multiple domain-specific content encoders for different domains, where each domain-specific content encoder is trained with images from the same domain only. Nevertheless, we argue that the content (domain-invariance) features should be learned from images among all of the domains. Consequently, each domain-specific content encoder of existing schemes fails to extract the domain-invariant features efficiently. To address this issue, we present a flexible and general SoloGAN model for efficient multimodal I2I translation among multiple domains with unpaired data. In contrast to existing methods, the SoloGAN algorithm uses a single projection discriminator with an additional auxiliary classifier and shares the encoder and generator for all domains. As such, the SoloGAN model can be trained effectively with images from all domains so that the domain-invariance content representation can be efficiently extracted. Qualitative and quantitative results over a wide range of datasets against several counterparts and variants of the SoloGAN model demonstrate the merits of the method, especially for challenging I2I translation tasks, i.e. tasks that involve extreme shape variations or need to keep the complex backgrounds unchanged after translations. Furthermore, we demonstrate the contribution of each component using ablation studies.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wang, Teng; Jiang, Wenhao; Lu, Zhichao; Zheng, Feng; Cheng, Ran; Yin, Chengguo; Luo, Ping
VLMixer: Unpaired Vision-Language Pre-training via Cross-Modal CutMix Conference
2022.
@conference{wang2022vlmixer,
title = {VLMixer: Unpaired Vision-Language Pre-training via Cross-Modal CutMix},
author = {Teng Wang and Wenhao Jiang and Zhichao Lu and Feng Zheng and Ran Cheng and Chengguo Yin and Ping Luo},
url = {https://arxiv.org/abs/2206.08919},
year = {2022},
date = {2022-06-17},
urldate = {2022-06-17},
journal = {arXiv preprint arXiv:2206.08919},
abstract = {Existing vision-language pre-training (VLP) methods primarily rely on paired image-text datasets, which are either annotated by enormous human labors, or crawled from the internet followed by elaborate data cleaning techniques. To reduce the dependency on well-aligned image-text pairs, it is promising to directly leverage the large-scale text-only and image-only corpora. This paper proposes a data augmentation method, namely cross-modal CutMix (CMC), for implicit cross-modal alignment learning in unpaired VLP. Specifically, CMC transforms natural sentences from the textual view into a multi-modal view, where visually-grounded words in a sentence are randomly replaced by diverse image patches with similar semantics. There are several appealing proprieties of the proposed CMC. First, it enhances the data diversity while keeping the semantic meaning intact for tackling problems where the aligned data are scarce; Second, by attaching cross-modal noise on uni-modal data, it guides models to learn token-level interactions across modalities for better denoising. Furthermore, we present a new unpaired VLP method, dubbed as VLMixer, that integrates CMC with contrastive learning to pull together the uni-modal and multi-modal views for better instance-level alignments among different modalities. Extensive experiments on five downstream tasks show that VLMixer could surpass previous state-of-the-art unpaired VLP methods.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Bai, Hui; Cheng, Ran; Yazdani, Danial; Tan, Kay Chen; Jin, Yaochu
Evolutionary Large-Scale Dynamic Optimization Using Bilevel Variable Grouping Journal Article
In: IEEE Transactions on Cybernetics, 2022, ISSN: 2168-2267.
@article{bai2022evolutionary,
title = {Evolutionary Large-Scale Dynamic Optimization Using Bilevel Variable Grouping},
author = {Hui Bai and Ran Cheng and Danial Yazdani and Kay Chen Tan and Yaochu Jin},
url = {https://ieeexplore.ieee.org/abstract/document/9772492},
issn = {2168-2267},
year = {2022},
date = {2022-05-11},
journal = {IEEE Transactions on Cybernetics},
publisher = {IEEE},
abstract = {Variable grouping provides an efficient approach to large-scale optimization, and multipopulation strategies are effective for both large-scale optimization and dynamic optimization. However, variable grouping is not well studied in large-scale dynamic optimization when cooperating with multipopulation strategies. Specifically, when the numbers/sizes of the variable subcomponents are large, the performance of the algorithms will be substantially degraded. To address this issue, we propose a bilevel variable grouping (BLVG)-based framework. First, the primary grouping applies a state-of-the-art variable grouping method based on variable interaction analysis to group the variables into subcomponents. Second, the secondary grouping further groups the subcomponents into variable cells, that is, combination variable cells and decomposition variable cells. We then tailor a multipopulation strategy to process the two types of variable cells efficiently in a cooperative coevolutionary (CC) way. As indicated by the empirical study on large-scale dynamic optimization problems (DOPs) of up to 300 dimensions, the proposed framework outperforms several state-of-the-art frameworks for large-scale dynamic optimization.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Huang, Changwu; Li, Lianghao; He, Cheng; Cheng, Ran; Yao, Xin
Adaptive Multiobjective Evolutionary Algorithm for Large-scale Transformer Ratio Error Estimation Journal Article
In: Memetic Computing, vol. 14, no. 2, pp. 237–251, 2022.
@article{huang2022adaptive,
title = {Adaptive Multiobjective Evolutionary Algorithm for Large-scale Transformer Ratio Error Estimation},
author = {Changwu Huang and Lianghao Li and Cheng He and Ran Cheng and Xin Yao},
url = {https://link.springer.com/article/10.1007/s12293-022-00368-7},
year = {2022},
date = {2022-05-04},
journal = {Memetic Computing},
volume = {14},
number = {2},
pages = {237--251},
publisher = {Springer},
abstract = {As a typical large-scale multiobjective optimization problem extracted from real-world applications, the voltage transformer ratio error estimation (TREE) problem is challenging for existing evolutionary algorithms (EAs). Due to the large number of decision variables in the problems, existing algorithms cannot solve TREE problems efficiently. Besides, most EAs may fail to balance the convergence enhancement and diversity maintenance, leading to the trap in local optima even at the early stage of the evolution. This work proposes an adaptive large-scale multiobjective EA (LSMOEA) to handle the TREE problems with thousands of decision variables. Generally, multiple efficient offspring generation and environmental selection strategies selected from some representative LSMOEAs are included. Then an adaptive selection strategy is used to determine which offspring generation and environmental selection operators are used in each generation of the evolution. Thus, the search behavior of the proposed algorithm evolves along with the evolution process, the balance between convergence and diversity is maintained, and the proposed algorithm is expected to solve TREE problems effectively and efficiently. Experimental results show that the proposed algorithm achieves significant performance improvement due to the adaptive selection of different operators, providing an effective and efficient approach for large-scale optimization problems.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yuan, Zhongju; Li, Genghui; Wang, Zhenkun; Sun, Jianyong; Cheng, Ran
RL-CSL: A Combinatorial Optimization Method Using Reinforcement Learning and Contrastive Self-Supervised Learning Journal Article
In: IEEE Transactions on Emerging Topics in Computational Intelligence, 2022, ISBN: 2471-285X.
@article{yuan2022rl,
title = {RL-CSL: A Combinatorial Optimization Method Using Reinforcement Learning and Contrastive Self-Supervised Learning},
author = {Zhongju Yuan and Genghui Li and Zhenkun Wang and Jianyong Sun and Ran Cheng},
url = {https://ieeexplore.ieee.org/document/9690950},
isbn = {2471-285X},
year = {2022},
date = {2022-02-25},
journal = {IEEE Transactions on Emerging Topics in Computational Intelligence},
publisher = {IEEE},
abstract = {Reinforcement learning-based methods have shown great potential in solving combinatorial optimization problems. However, the related research has not been mature in terms of both models and training methods. This paper proposes a method based on reinforcement learning and contrastive self-supervised learning. To be specific, the proposed method uses an attention model to learn a policy for generating solutions and combines a contrastive self-supervised learning model to learn the attention encoder in the way of node-by-node. Correspondingly, a two-phase learning method, including node-wise learning and solution-wise learning, is adopted to train the attention model and the contrastive self-supervised model jointly and collaboratively. The performance of the proposed method has been verified by numerical experiments on various combinatorial optimization problems.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Cao, Lei; Ye, Chun-ming; Cheng, Ran; Wang, Zhen-kun
Memory-based Variable Neighborhood Search for Green Vehicle Routing Problem with Passing-by Drivers: A Comprehensive Perspective Journal Article
In: Complex & Intelligent Systems, pp. 1–19, 2022.
@article{cao2022memory,
title = {Memory-based Variable Neighborhood Search for Green Vehicle Routing Problem with Passing-by Drivers: A Comprehensive Perspective},
author = {Lei Cao and Chun-ming Ye and Ran Cheng and Zhen-kun Wang},
url = {https://link.springer.com/article/10.1007/s40747-022-00661-5},
year = {2022},
date = {2022-02-08},
journal = {Complex & Intelligent Systems},
pages = {1--19},
publisher = {Springer},
abstract = {A business delivery model with professional vehicles as well as occasional passing-by vehicles is investigated in this paper. The drivers deliver parcels from the distribution center to customers and the passing-by driver can get a certain amount of compensation in return. To give a satisfactory solution from the perspective of platform owner, customers, professional drivers, occasional drivers, and authority, a multi-layer comprehensive model is proposed. To effectively solve the proposed model, we introduce an improved variable neighborhood search (VNS) with a memory-based restart mechanism. The new algorithm is evaluated on instances derived from Solomon’s benchmark and real-life beer delivery instances. Taguchi experiment is used to tune parameters in the proposed VNS, followed by component analysis and real-life experiments. Experimental results indicate that the proposed strategies are effective and the new delivery model in this paper has some advantages over traditional and single-delivery ones from the comprehensive perspectives of stakeholders in the crowdsourcing logistics system.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hu, Shengran; Cheng, Ran; He, Cheng; Lu, Zhichao; Wang, Jing; Zhang, Miao
Accelerating Multi-Objective Neural Architecture Search by Random-Weight Evaluation Journal Article
In: Complex & Intelligent Systems, pp. 1–10, 2021.
@article{hu2021accelerating,
title = {Accelerating Multi-Objective Neural Architecture Search by Random-Weight Evaluation},
author = {Shengran Hu and Ran Cheng and Cheng He and Zhichao Lu and Jing Wang and Miao Zhang},
url = {https://link.springer.com/article/10.1007/s40747-021-00594-5},
year = {2021},
date = {2021-12-04},
journal = {Complex & Intelligent Systems},
pages = {1--10},
publisher = {Springer},
abstract = {For the goal of automated design of high-performance deep convolutional neural networks (CNNs), neural architecture search (NAS) methodology is becoming increasingly important for both academia and industries. Due to the costly stochastic gradient descent training of CNNs for performance evaluation, most existing NAS methods are computationally expensive for real-world deployments. To address this issue, we first introduce a new performance estimation metric, named random-weight evaluation (RWE) to quantify the quality of CNNs in a cost-efficient manner. Instead of fully training the entire CNN, the RWE only trains its last layer and leaves the remainders with randomly initialized weights, which results in a single network evaluation in seconds. Second, a complexity metric is adopted for multi-objective NAS to balance the model size and performance. Overall, our proposed method obtains a set of efficient models with state-of-the-art performance in two real-world search spaces. Then the results obtained on the CIFAR-10 dataset are transferred to the ImageNet dataset to validate the practicality of the proposed algorithm. Moreover, ablation studies on NAS-Bench-301 datasets reveal the effectiveness of the proposed RWE in estimating the performance compared to existing methods.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wang, Teng; Zhang, Ruimao; Lu, Zhichao; Zheng, Feng; Cheng, Ran; Luo, Ping
End-to-End Dense Video Captioning with Parallel Decoding Inproceedings
In: Proceedings of the IEEE/CVF International Conference on Computer Vision, pp. 6847–6857, 2021.
@inproceedings{wang2021end,
title = {End-to-End Dense Video Captioning with Parallel Decoding},
author = {Teng Wang and Ruimao Zhang and Zhichao Lu and Feng Zheng and Ran Cheng and Ping Luo},
url = {https://openaccess.thecvf.com/content/ICCV2021/html/Wang_End-to-End_Dense_Video_Captioning_With_Parallel_Decoding_ICCV_2021_paper.html},
year = {2021},
date = {2021-11-17},
booktitle = {Proceedings of the IEEE/CVF International Conference on Computer Vision},
pages = {6847--6857},
abstract = {Dense video captioning aims to generate multiple associated captions with their temporal locations from the video. Previous methods follow a sophisticated "localize-then-describe" scheme, which heavily relies on numerous hand-crafted components. In this paper, we proposed a simple yet effective framework for end-to-end dense video captioning with parallel decoding (PDVC), by formulating the dense caption generation as a set prediction task. In practice, through stacking a newly proposed event counter on the top of a transformer decoder, the PDVC precisely segments the video into a number of event pieces under the holistic understanding of the video content, which effectively increases the coherence and readability of predicted captions. Compared with prior arts, the PDVC has several appealing advantages: (1) Without relying on heuristic non-maximum suppression or a recurrent event sequence selection network to remove redundancy, PDVC directly produces an event set with an appropriate size; (2) In contrast to adopting the two-stage scheme, we feed the enhanced representations of event queries into the localization head and caption head in parallel, making these two sub-tasks deeply interrelated and mutually promoted through the optimization; (3) Without bells and whistles, extensive experiments on ActivityNet Captions and YouCook2 show that PDVC is capable of producing high-quality captioning results, surpassing the state-of-the-art two-stage methods when its localization accuracy is on par with them. Code is available at https://github.com/ttengwang/PDVC.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Huang, Shihua; Lu, Zhichao; Cheng, Ran; He, Cheng
FaPN: Feature-aligned Pyramid Network for Dense Image Prediction Inproceedings
In: 2021 IEEE/CVF International Conference on Computer Vision (ICCV), pp. 844-853, 2021.
@inproceedings{9709978,
title = {FaPN: Feature-aligned Pyramid Network for Dense Image Prediction},
author = {Shihua Huang and Zhichao Lu and Ran Cheng and Cheng He},
url = {https://ieeexplore.ieee.org/document/9709978},
doi = {10.1109/ICCV48922.2021.00090},
year = {2021},
date = {2021-10-10},
booktitle = {2021 IEEE/CVF International Conference on Computer Vision (ICCV)},
pages = {844-853},
abstract = {Recent advancements in deep neural networks have made remarkable leap-forwards in dense image prediction. However, the issue of feature alignment remains as neglected by most existing approaches for simplicity. Direct pixel addition between upsampled and local features leads to feature maps with misaligned contexts that, in turn, translate to mis-classifications in prediction, especially on object boundaries. In this paper, we propose a feature alignment module that learns transformation offsets of pixels to contextually align upsampled higher-level features; and another feature selection module to emphasize the lower-level features with rich spatial details. We then integrate these two modules in a top-down pyramidal architecture and present the Feature-aligned Pyramid Network (FaPN). Extensive experimental evaluations on four dense prediction tasks and four datasets have demonstrated the efficacy of FaPN, yielding an overall improvement of 1.2 - 2.6 points in AP / mIoU over FPN when paired with Faster / Mask R-CNN. In particular, our FaPN achieves the state-of-the-art of 56.7% mIoU on ADE20K when integrated within Mask-Former. The code is available from https://github.com/EMI-Group/FaPN.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Chen, Yiming; Zhang, Yan; Zhang, Chen; Lee, Grandee; Cheng, Ran; Li, Haizhou
Revisiting Self-Training for Few-Shot Learning of Language Model Conference
2021.
@conference{chen2021revisiting,
title = {Revisiting Self-Training for Few-Shot Learning of Language Model},
author = {Yiming Chen and Yan Zhang and Chen Zhang and Grandee Lee and Ran Cheng and Haizhou Li},
url = {https://arxiv.org/abs/2110.01256},
year = {2021},
date = {2021-10-04},
journal = {arXiv preprint arXiv:2110.01256},
abstract = {As unlabeled data carry rich task-relevant information, they are proven useful for few-shot learning of language model. The question is how to effectively make use of such data. In this work, we revisit the self-training technique for language model fine-tuning and present a state-of-the-art prompt-based few-shot learner, SFLM. Given two views of a text sample via weak and strong augmentation techniques, SFLM generates a pseudo label on the weakly augmented version. Then, the model predicts the same pseudo label when fine-tuned with the strongly augmented version. This simple approach is shown to outperform other state-of-the-art supervised and semi-supervised counterparts on six sentence classification and six sentence-pair classification benchmarking tasks. In addition, SFLM only relies on a few in-domain unlabeled data. We conduct a comprehensive analysis to demonstrate the robustness of our proposed approach under various settings, including augmentation techniques, model scale, and few-shot knowledge transfer across tasks.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Tan, Hao; Cheng, Ran; Huang, Shihua; He, Cheng; Qiu, Changxiao; Yang, Fan; Luo, Ping
RelativeNAS: Relative Neural Architecture Search via Slow-Fast Learning Journal Article
In: IEEE Transactions on Neural Networks and Learning Systems, 2021, ISBN: 2162-237X.
@article{tan2021relativenas,
title = {RelativeNAS: Relative Neural Architecture Search via Slow-Fast Learning},
author = {Hao Tan and Ran Cheng and Shihua Huang and Cheng He and Changxiao Qiu and Fan Yang and Ping Luo},
url = {https://ieeexplore.ieee.org/abstract/document/9488309},
isbn = {2162-237X},
year = {2021},
date = {2021-07-16},
journal = {IEEE Transactions on Neural Networks and Learning Systems},
publisher = {IEEE},
abstract = {Despite the remarkable successes of convolutional neural networks (CNNs) in computer vision, it is time-consuming and error-prone to manually design a CNN. Among various neural architecture search (NAS) methods that are motivated to automate designs of high-performance CNNs, the differentiable NAS and population-based NAS are attracting increasing interests due to their unique characters. To benefit from the merits while overcoming the deficiencies of both, this work proposes a novel NAS method, RelativeNAS. As the key to efficient search, RelativeNAS performs joint learning between fast learners (i.e., decoded networks with relatively lower loss value) and slow learners in a pairwise manner. Moreover, since RelativeNAS only requires low-fidelity performance estimation to distinguish each pair of fast learner and slow learner, it saves certain computation costs for training the candidate architectures. The proposed RelativeNAS brings several unique advantages: 1) it achieves state-of-the-art performances on ImageNet with top-1 error rate of 24.88%, that is, outperforming DARTS and AmoebaNet-B by 1.82% and 1.12%, respectively; 2) it spends only 9 h with a single 1080Ti GPU to obtain the discovered cells, that is, 3.75x and 7875x faster than DARTS and AmoebaNet, respectively; and 3) it provides that the discovered cells obtained on CIFAR-10 can be directly transferred to object detection, semantic segmentation, and keypoint detection, yielding competitive results of 73.1% mAP on PASCAL VOC, 78.7% mIoU on Cityscapes, and 68.5% AP on MSCOCO, respectively. The implementation of RelativeNAS is available at https://github.com/EMI-Group/RelativeNAS.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
He, Cheng; Tan, Hao; Huang, Shihua; Cheng, Ran
Efficient Evolutionary Neural Architecture Search by Modular Inheritable Crossover Journal Article
In: Swarm and Evolutionary Computation, vol. 64, pp. 100894, 2021, ISSN: 2210-6502.
@article{he_efficient_2021,
title = {Efficient Evolutionary Neural Architecture Search by Modular Inheritable Crossover},
author = {Cheng He and Hao Tan and Shihua Huang and Ran Cheng},
url = {https://www.sciencedirect.com/science/article/pii/S2210650221000559},
doi = {10.1016/j.swevo.2021.100894},
issn = {2210-6502},
year = {2021},
date = {2021-01-01},
urldate = {2021-08-01},
journal = {Swarm and Evolutionary Computation},
volume = {64},
pages = {100894},
abstract = {Deep neural networks are widely used in the domain of image classification, and a large number of excellent deep neural networks have been proposed in recent years. However, hand-crafted neural networks often require human experts for elaborate designs, which can be time-consuming and error-prone. Hence, neural architecture search (NAS) methods have been proposed to design model architecture automatically. The evolutionary NAS methods have achieved encouraging results due to the global search capability of evolutionary algorithms. Nevertheless, most existing evolutionary NAS methods use only the mutation operator to generate offspring architectures. Consequently, the generated architectures could be pretty different from their parent architectures, failing to inherit the modular information to accelerate the convergence rate. We propose an efficient evolutionary method using a tailored crossover operator to address this deficiency, which enables the offspring architectures to inherit from their parent architectures. Moreover, we combine it with mutation operators under the framework of the evolutionary algorithm. Experimental results on both the CIFAR-10 and CIFAR-100 tasks show that our proposed evolutionary NAS method has achieved state-of-the-art results.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Lin, Jianqing; He, Cheng; Cheng, Ran
Dimension Dropout for Evolutionary High-Dimensional Expensive Multiobjective Optimization Inproceedings
In: Ishibuchi, Hisao; Zhang, Qingfu; Cheng, Ran; Li, Ke; Li, Hui; Wang, Handing; Zhou, Aimin (Ed.): Evolutionary Multi-Criterion Optimization, pp. 567–579, Springer International Publishing, Cham, 2021, ISBN: 978-3-030-72062-9.
@inproceedings{lin_dimension_2021,
title = {Dimension Dropout for Evolutionary High-Dimensional Expensive Multiobjective Optimization},
author = {Jianqing Lin and Cheng He and Ran Cheng},
editor = {Hisao Ishibuchi and Qingfu Zhang and Ran Cheng and Ke Li and Hui Li and Handing Wang and Aimin Zhou},
doi = {10.1007/978-3-030-72062-9_45},
isbn = {978-3-030-72062-9},
year = {2021},
date = {2021-01-01},
booktitle = {Evolutionary Multi-Criterion Optimization},
pages = {567--579},
publisher = {Springer International Publishing},
address = {Cham},
series = {Lecture Notes in Computer Science},
abstract = {In the past decades, a number of surrogate-assisted evolutionary algorithms (SAEAs) have been developed to solve expensive multiobjective optimization problems (EMOPs). However, most existing SAEAs focus on low-dimensional optimization problems, since a large number of training samples are required (which is unrealistic for EMOPs) to build an accurate surrogate model for high-dimensional problems. In this paper, an SAEA with Dimension Dropout is proposed to solve high-dimensional EMOPs. At each iteration of the proposed algorithm, it randomly selects a part of the decision variables by Dimension Dropout, and then optimizes the selected decision variables with the assistance of surrogate models. To balance the convergence and diversity, those candidate solutions with good diversity are modified by replacing the selected decision variables with those optimized ones (i.e., decision variables from some better-converged candidate solutions). Eventually, the new candidate solutions are evaluated using expensive functions to update the archive. Empirical studies on ten benchmark problems with up to 200 decision variables demonstrate the competitiveness of the proposed algorithm.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
He, Cheng; Cheng, Ran; Tian, Ye; Zhang, Xingyi; Tan, Kay Chen; Jin, Yaochu
Paired Offspring Generation for Constrained Large-Scale Multiobjective Optimization Journal Article
In: IEEE Transactions on Evolutionary Computation, vol. 25, no. 3, pp. 448–462, 2021, ISSN: 1941-0026.
@article{he_paired_2021,
title = {Paired Offspring Generation for Constrained Large-Scale Multiobjective Optimization},
author = {Cheng He and Ran Cheng and Ye Tian and Xingyi Zhang and Kay Chen Tan and Yaochu Jin},
doi = {10.1109/TEVC.2020.3047835},
issn = {1941-0026},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Evolutionary Computation},
volume = {25},
number = {3},
pages = {448--462},
abstract = {Constrained multiobjective optimization problems (CMOPs) widely exist in real-world applications, and they are challenging for conventional evolutionary algorithms (EAs) due to the existence of multiple constraints and objectives. When the number of objectives or decision variables is scaled up in CMOPs, the performance of EAs may degenerate dramatically and may fail to obtain any feasible solutions. To address this issue, we propose a paired offspring generation-based multiobjective EA for constrained large-scale optimization. The general idea is to emphasize the role of offspring generation in reproducing some promising feasible or useful infeasible offspring solutions. We first adopt a small set of reference vectors for constructing several subpopulations with a fixed number of neighborhood solutions. Then, a pairing strategy is adopted to determine some pairwise parent solutions for offspring generation. Consequently, the pairwise parent solutions, which could be infeasible, may guide the generation of well-converged solutions to cross the infeasible region(s) effectively. The proposed algorithm is evaluated on CMOPs with up to 1000 decision variables and ten objectives. Moreover, each component in the proposed algorithm is examined in terms of its effect on the overall algorithmic performance. Experimental results on a variety of existing and our tailored test problems demonstrate the effectiveness of the proposed algorithm in constrained large-scale multiobjective optimization.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Li, Lianghao; He, Cheng; Cheng, Ran; Pan, Linqiang
Large-scale Multiobjective Optimization via Problem Decomposition and Reformulation Inproceedings
In: 2021 IEEE Congress on Evolutionary Computation (CEC), pp. 2149–2155, 2021.
@inproceedings{li_large-scale_2021,
title = {Large-scale Multiobjective Optimization via Problem Decomposition and Reformulation},
author = {Lianghao Li and Cheng He and Ran Cheng and Linqiang Pan},
doi = {10.1109/CEC45853.2021.9504820},
year = {2021},
date = {2021-01-01},
booktitle = {2021 IEEE Congress on Evolutionary Computation (CEC)},
pages = {2149--2155},
abstract = {Large-scale multiobjective optimization problems (LSMOPs) are challenging for existing approaches due to the complexity of objective functions and the massive volume of decision space. Some large-scale multiobjective evolutionary algorithms (LSMOEAs) have recently been proposed, which have shown their effectiveness in solving some benchmarks and real-world applications. They merely focus on handling the massive volume of decision space and ignore the complexity of LSMOPs in terms of objective functions. The complexity issue is also important since the complexity grows along with the increment in the number of decision variables. Our previous study proposed a framework to accelerate evolutionary large-scale multiobjective optimization via problem reformulation for handling large-scale decision variables. Here, we investigate the effectiveness of LSMOF combined with decomposition-based MOEA (MOEA/D), aiming to handle the complexity of LSMOPs in both the decision and objective spaces. Specifically, MOEA/D is embedded in LSMOF via two different strategies, and the proposed algorithm is tested on various benchmark LSMOPs. Experimental results indicate the encouraging performance improvement benefited from the solution of the complexity issue in large-scale multiobjective optimization.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Wang, Jing; Li, Runze; He, Cheng; Chen, Haixin; Cheng, Ran; Zhai, Chen; Zhang, Miao
An Inverse Design Method for Supercritical Airfoil Based on Conditional Generative Models Journal Article
In: Chinese Journal of Aeronautics, 2021, ISSN: 1000-9361.
@article{wang_inverse_2021,
title = {An Inverse Design Method for Supercritical Airfoil Based on Conditional Generative Models},
author = {Jing Wang and Runze Li and Cheng He and Haixin Chen and Ran Cheng and Chen Zhai and Miao Zhang},
url = {https://www.sciencedirect.com/science/article/pii/S1000936121000662},
doi = {10.1016/j.cja.2021.03.006},
issn = {1000-9361},
year = {2021},
date = {2021-01-01},
urldate = {2021-08-01},
journal = {Chinese Journal of Aeronautics},
abstract = {Inverse design has long been an efficient and powerful design tool in the aircraft industry. In this paper, a novel inverse design method for supercritical airfoils is proposed based on generative models in deep learning. A Conditional Variational AutoEncoder (CVAE) and an integrated generative network CVAE-GAN that combines the CVAE with the Wasserstein Generative Adversarial Networks (WGAN), are conducted as generative models. They are used to generate target wall Mach distributions for the inverse design that matches specified features, such as locations of suction peak, shock and aft loading. Qualitative and quantitative results show that both adopted generative models can generate diverse and realistic wall Mach number distributions satisfying the given features. The CVAE-GAN model outperforms the CVAE model and achieves better reconstruction accuracies for all the samples in the dataset. Furthermore, a deep neural network for nonlinear mapping is adopted to obtain the airfoil shape corresponding to the target wall Mach number distribution. The performances of the designed deep neural network are fully demonstrated and a smoothness measurement is proposed to quantify small oscillations in the airfoil surface, proving the authenticity and accuracy of the generated airfoil shapes.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yazdani, Danial; Cheng, Ran; Yazdani, Donya; Branke, Jürgen; Jin, Yaochu; Yao, Xin
A Survey of Evolutionary Continuous Dynamic Optimization Over Two Decades—Part B Journal Article
In: IEEE Transactions on Evolutionary Computation, vol. 25, no. 4, pp. 630–650, 2021, ISSN: 1941-0026.
@article{yazdani_survey_2021,
title = {A Survey of Evolutionary Continuous Dynamic Optimization Over Two Decades—Part B},
author = {Danial Yazdani and Ran Cheng and Donya Yazdani and Jürgen Branke and Yaochu Jin and Xin Yao},
doi = {10.1109/TEVC.2021.3060012},
issn = {1941-0026},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Evolutionary Computation},
volume = {25},
number = {4},
pages = {630--650},
abstract = {This article presents the second Part of a two-Part survey that reviews evolutionary dynamic optimization (EDO) for single-objective unconstrained continuous problems over the last two decades. While in the first part, we reviewed the components of dynamic optimization algorithms (DOAs); in this part, we present an in-depth review of the most commonly used benchmark problems, performance analysis methods, static optimization methods used in the framework of DOAs, and real-world applications. Compared to the previous works, this article provides a new taxonomy for the benchmark problems used in the field based on their baseline functions and dynamics. In addition, this survey classifies the commonly used performance indicators into fitness/error-based and efficiency-based ones. Different types of plots used in the literature for analyzing the performance and behavior of algorithms are also reviewed. Furthermore, the static optimization algorithms that are modified and utilized in the framework of DOAs as the optimization components are covered. We then comprehensively review some real-world dynamic problems that are optimized by EDO methods. Finally, some challenges and opportunities are pointed out for future directions.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
He, Cheng; Huang, Shihua; Cheng, Ran; Tan, Kay Chen; Jin, Yaochu
Evolutionary Multiobjective Optimization Driven by Generative Adversarial Networks (GANs) Journal Article
In: IEEE Transactions on Cybernetics, vol. 51, no. 6, pp. 3129–3142, 2021, ISSN: 2168-2275.
@article{he_evolutionary_2021,
title = {Evolutionary Multiobjective Optimization Driven by Generative Adversarial Networks (GANs)},
author = {Cheng He and Shihua Huang and Ran Cheng and Kay Chen Tan and Yaochu Jin},
doi = {10.1109/TCYB.2020.2985081},
issn = {2168-2275},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Cybernetics},
volume = {51},
number = {6},
pages = {3129--3142},
abstract = {Recently, increasing works have been proposed to drive evolutionary algorithms using machine-learning models. Usually, the performance of such model-based evolutionary algorithms is highly dependent on the training qualities of the adopted models. Since it usually requires a certain amount of data (i.e., the candidate solutions generated by the algorithms) for model training, the performance deteriorates rapidly with the increase of the problem scales due to the curse of dimensionality. To address this issue, we propose a multiobjective evolutionary algorithm driven by the generative adversarial networks (GANs). At each generation of the proposed algorithm, the parent solutions are first classified into real and fake samples to train the GANs; then the offspring solutions are sampled by the trained GANs. Thanks to the powerful generative ability of the GANs, our proposed algorithm is capable of generating promising offspring solutions in high-dimensional decision space with limited training data. The proposed algorithm is tested on ten benchmark problems with up to 200 decision variables. The experimental results on these test problems demonstrate the effectiveness of the proposed algorithm.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yazdani, Danial; Cheng, Ran; Yazdani, Donya; Branke, Jürgen; Jin, Yaochu; Yao, Xin
A Survey of Evolutionary Continuous Dynamic Optimization Over Two Decades—Part A Journal Article
In: IEEE Transactions on Evolutionary Computation, vol. 25, no. 4, pp. 609–629, 2021, ISSN: 1941-0026, (Conference Name: IEEE Transactions on Evolutionary Computation).
@article{yazdani_survey_2021-1,
title = {A Survey of Evolutionary Continuous Dynamic Optimization Over Two Decades—Part A},
author = {Danial Yazdani and Ran Cheng and Donya Yazdani and Jürgen Branke and Yaochu Jin and Xin Yao},
doi = {10.1109/TEVC.2021.3060014},
issn = {1941-0026},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Evolutionary Computation},
volume = {25},
number = {4},
pages = {609--629},
abstract = {Many real-world optimization problems are dynamic. The field of dynamic optimization deals with such problems where the search space changes over time. In this two-part article, we present a comprehensive survey of the research in evolutionary dynamic optimization for single-objective unconstrained continuous problems over the last two decades. In Part A of this survey, we propose a new taxonomy for the components of dynamic optimization algorithms (DOAs), namely, convergence detection, change detection, explicit archiving, diversity control, and population division and management. In comparison to the existing taxonomies, the proposed taxonomy covers some additional important components, such as convergence detection and computational resource allocation. Moreover, we significantly expand and improve the classifications of diversity control and multipopulation methods, which are underrepresented in the existing taxonomies. We then provide detailed technical descriptions and analysis of different components according to the suggested taxonomy. Part B of this survey provides an in-depth analysis of the most commonly used benchmark problems, performance analysis methods, static optimization algorithms used as the optimization components in the DOAs, and dynamic real-world applications. Finally, several opportunities for future work are pointed out.},
note = {Conference Name: IEEE Transactions on Evolutionary Computation},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Tan, Hao; Cheng, Ran; Huang, Shihua; He, Cheng; Qiu, Changxiao; Yang, Fan; Luo, Ping
RelativeNAS: Relative Neural Architecture Search via Slow-Fast Learning Journal Article
In: IEEE Transactions on Neural Networks and Learning Systems, pp. 1–15, 2021, ISSN: 2162-2388, (Conference Name: IEEE Transactions on Neural Networks and Learning Systems).
@article{tan_relativenas_2021,
title = {RelativeNAS: Relative Neural Architecture Search via Slow-Fast Learning},
author = {Hao Tan and Ran Cheng and Shihua Huang and Cheng He and Changxiao Qiu and Fan Yang and Ping Luo},
doi = {10.1109/TNNLS.2021.3096658},
issn = {2162-2388},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Neural Networks and Learning Systems},
pages = {1--15},
abstract = {Despite the remarkable successes of convolutional neural networks (CNNs) in computer vision, it is time-consuming and error-prone to manually design a CNN. Among various neural architecture search (NAS) methods that are motivated to automate designs of high-performance CNNs, the differentiable NAS and population-based NAS are attracting increasing interests due to their unique characters. To benefit from the merits while overcoming the deficiencies of both, this work proposes a novel NAS method, RelativeNAS. As the key to efficient search, RelativeNAS performs joint learning between fast learners (i.e., decoded networks with relatively lower loss value) and slow learners in a pairwise manner. Moreover, since RelativeNAS only requires low-fidelity performance estimation to distinguish each pair of fast learner and slow learner, it saves certain computation costs for training the candidate architectures. The proposed RelativeNAS brings several unique advantages: 1) it achieves state-of-the-art performances on ImageNet with top-1 error rate of 24.88%, that is, outperforming DARTS and AmoebaNet-B by 1.82% and 1.12%, respectively; 2) it spends only 9 h with a single 1080Ti GPU to obtain the discovered cells, that is, 3.75x and 7875x faster than DARTS and AmoebaNet, respectively; and 3) it provides that the discovered cells obtained on CIFAR-10 can be directly transferred to object detection, semantic segmentation, and keypoint detection, yielding competitive results of 73.1% mAP on PASCAL VOC, 78.7% mIoU on Cityscapes, and 68.5% AP on MSCOCO, respectively. The implementation of RelativeNAS is available at https://github.com/EMI-Group/RelativeNAS.},
note = {Conference Name: IEEE Transactions on Neural Networks and Learning Systems},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Lin, Jianqing; He, Cheng; Cheng, Ran
Adaptive Dropout for High-dimensional Expensive Multiobjective Optimization Journal Article
In: Complex & Intelligent Systems, 2021, ISSN: 2198-6053.
@article{lin_adaptive_2021,
title = {Adaptive Dropout for High-dimensional Expensive Multiobjective Optimization},
author = {Jianqing Lin and Cheng He and Ran Cheng},
url = {https://doi.org/10.1007/s40747-021-00362-5},
doi = {10.1007/s40747-021-00362-5},
issn = {2198-6053},
year = {2021},
date = {2021-01-01},
urldate = {2021-08-26},
journal = {Complex & Intelligent Systems},
abstract = {Various works have been proposed to solve expensive multiobjective optimization problems (EMOPs) using surrogate-assisted evolutionary algorithms (SAEAs) in recent decades. However, most existing methods focus on EMOPs with less than 30 decision variables, since a large number of training samples are required to build an accurate surrogate model for high-dimensional EMOPs, which is unrealistic for expensive multiobjective optimization. To address this issue, we propose an SAEA with an adaptive dropout mechanism. Specifically, this mechanism takes advantage of the statistical differences between different solution sets in the decision space to guide the selection of some crucial decision variables. A new infill criterion is then proposed to optimize the selected decision variables with the assistance of surrogate models. Moreover, the optimized decision variables are extended to new full-length solutions, and then the new candidate solutions are evaluated using expensive functions to update the archive. The proposed algorithm is tested on different benchmark problems with up to 200 decision variables compared to some state-of-the-art SAEAs. The experimental results have demonstrated the promising performance and computational efficiency of the proposed algorithm in high-dimensional expensive multiobjective optimization.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hu, Shengran; Cheng, Ran; He, Cheng; Lu, Zhichao
Multi-objective Neural Architecture Search with Almost No Training Inproceedings
In: Ishibuchi, Hisao; Zhang, Qingfu; Cheng, Ran; Li, Ke; Li, Hui; Wang, Handing; Zhou, Aimin (Ed.): Evolutionary Multi-Criterion Optimization, pp. 492–503, Springer International Publishing, Cham, 2021, ISBN: 978-3-030-72062-9.
@inproceedings{hu_multi-objective_2021,
title = {Multi-objective Neural Architecture Search with Almost No Training},
author = {Shengran Hu and Ran Cheng and Cheng He and Zhichao Lu},
editor = {Hisao Ishibuchi and Qingfu Zhang and Ran Cheng and Ke Li and Hui Li and Handing Wang and Aimin Zhou},
doi = {10.1007/978-3-030-72062-9_39},
isbn = {978-3-030-72062-9},
year = {2021},
date = {2021-01-01},
booktitle = {Evolutionary Multi-Criterion Optimization},
pages = {492--503},
publisher = {Springer International Publishing},
address = {Cham},
series = {Lecture Notes in Computer Science},
abstract = {In the recent past, neural architecture search (NAS) has attracted increasing attention from both academia and industries. Despite the steady stream of impressive empirical results, most existing NAS algorithms are computationally prohibitive to execute due to the costly iterations of stochastic gradient descent (SGD) training. In this work, we propose an effective alternative, dubbed Random-Weight Evaluation (RWE), to rapidly estimate the performance of network architectures. By just training the last linear classification layer, RWE reduces the computational cost of evaluating an architecture from hours to seconds. When integrated within an evolutionary multi-objective algorithm, RWE obtains a set of efficient architectures with state-of-the-art performance on CIFAR-10 with less than two hours’ searching on a single GPU card. Ablation studies on rank-order correlations and transfer learning experiments to ImageNet have further validated the effectiveness of RWE.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
He, Cheng; Cheng, Ran
Population Sizing of Evolutionary Large-Scale Multiobjective Optimization Inproceedings
In: Ishibuchi, Hisao; Zhang, Qingfu; Cheng, Ran; Li, Ke; Li, Hui; Wang, Handing; Zhou, Aimin (Ed.): Evolutionary Multi-Criterion Optimization, pp. 41–52, Springer International Publishing, Cham, 2021, ISBN: 978-3-030-72062-9.
@inproceedings{he_population_2021,
title = {Population Sizing of Evolutionary Large-Scale Multiobjective Optimization},
author = {Cheng He and Ran Cheng},
editor = {Hisao Ishibuchi and Qingfu Zhang and Ran Cheng and Ke Li and Hui Li and Handing Wang and Aimin Zhou},
doi = {10.1007/978-3-030-72062-9_4},
isbn = {978-3-030-72062-9},
year = {2021},
date = {2021-01-01},
booktitle = {Evolutionary Multi-Criterion Optimization},
pages = {41--52},
publisher = {Springer International Publishing},
address = {Cham},
series = {Lecture Notes in Computer Science},
abstract = {Large-scale multiobjective optimization problems (LSMOPs) are emerging and widely existed in real-world applications, which involve a large number of decision variables and multiple conflicting objectives. Evolutionary algorithms (EAs) are naturally suitable for multiobjective optimization due to their population-based property, allowing the search of optima simultaneously. Nevertheless, LSMOPs are challenging for conventional EAs, mainly due to the huge volume of search space in LSMOPs. Thus, it is important to explore the impact of the population sizing on the performance of conventional multiobjective EAs (MOEAs) in solving LSMOPs. In this work, we compare several representative MOEAs with different settings of population sizes on some transformer ratio error estimation (TREE) problems in the power system. These test cases are defined on combinations of three population sizes, three TREE problems, and five MOEAs. Our results indicate that the performances of conventional MOEAs with different population sizes in solving LSMOPs are different. The impact of population sizing is most significant for differential evolution based and particle swarm based MOEAs.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Li, Lianghao; He, Cheng; Cheng, Ran; Pan, Linqiang
Manifold Learning Inspired Mating Restriction for Evolutionary Constrained Multiobjective Optimization Inproceedings
In: Ishibuchi, Hisao; Zhang, Qingfu; Cheng, Ran; Li, Ke; Li, Hui; Wang, Handing; Zhou, Aimin (Ed.): Evolutionary Multi-Criterion Optimization, pp. 296–307, Springer International Publishing, Cham, 2021, ISBN: 978-3-030-72062-9.
@inproceedings{li_manifold_2021,
title = {Manifold Learning Inspired Mating Restriction for Evolutionary Constrained Multiobjective Optimization},
author = {Lianghao Li and Cheng He and Ran Cheng and Linqiang Pan},
editor = {Hisao Ishibuchi and Qingfu Zhang and Ran Cheng and Ke Li and Hui Li and Handing Wang and Aimin Zhou},
doi = {10.1007/978-3-030-72062-9_24},
isbn = {978-3-030-72062-9},
year = {2021},
date = {2021-01-01},
booktitle = {Evolutionary Multi-Criterion Optimization},
pages = {296--307},
publisher = {Springer International Publishing},
address = {Cham},
series = {Lecture Notes in Computer Science},
abstract = {Mating restriction strategies are capable of restricting the distribution of parent solutions for effective offspring generation in evolutionary algorithms (EAs). Studies have shown the importance of these strategies in improving the performance of EAs for multiobjective optimization. Our previous study proposed a specific manifold learning inspired mating restriction (MLMR) strategy. It has shown promising capability of solving multiobjective optimization problems (MOPs) with complicated Pareto set shapes. However, the effect of mating restriction strategies in solving constrained MOPs is yet to be well studied. Here, we investigate the effectiveness of MLMR for solving constrained MOPs. The MLMR strategy is embedded into some representative multiobjective EAs and tested on various benchmark constrained MOPs. Experimental results indicate the encouraging performance of MLMR in constrained multiobjective optimization.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yazdani, Danial; Cheng, Ran; He, Cheng; Branke, Jürgen
Adaptive Control of Subpopulations in Evolutionary Dynamic Optimization Journal Article
In: IEEE Transactions on Cybernetics, vol. 52, no. 7, pp. 6476-6489, 2020, ISBN: 2168-2267.
@article{9284465,
title = {Adaptive Control of Subpopulations in Evolutionary Dynamic Optimization},
author = {Danial Yazdani and Ran Cheng and Cheng He and Jürgen Branke},
url = {https://ieeexplore.ieee.org/abstract/document/9284465},
doi = {10.1109/TCYB.2020.3036100},
isbn = {2168-2267},
year = {2020},
date = {2020-12-07},
journal = {IEEE Transactions on Cybernetics},
volume = {52},
number = {7},
pages = {6476-6489},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yazdani, Danial; Omidvar, Mohammad Nabi; Cheng, Ran; Branke, Jürgen; Nguyen, Trung Thanh; Yao, Xin
Benchmarking continuous dynamic optimization: Survey and generalized test suite Journal Article
In: IEEE Transactions on Cybernetics, 2020, ISSN: 2168-2267.
@article{yazdani2020benchmarking,
title = {Benchmarking continuous dynamic optimization: Survey and generalized test suite},
author = {Danial Yazdani and Mohammad Nabi Omidvar and Ran Cheng and Jürgen Branke and Trung Thanh Nguyen and Xin Yao},
url = {https://ieeexplore.ieee.org/abstract/document/9167424},
issn = {2168-2267},
year = {2020},
date = {2020-08-14},
journal = {IEEE Transactions on Cybernetics},
publisher = {IEEE},
abstract = {Dynamic changes are an important and inescapable aspect of many real-world optimization problems. Designing algorithms to find and track desirable solutions while facing challenges of dynamic optimization problems is an active research topic in the field of swarm and evolutionary computation. To evaluate and compare the performance of algorithms, it is imperative to use a suitable benchmark that generates problem instances with different controllable characteristics. In this article, we give a comprehensive review of existing benchmarks and investigate their shortcomings in capturing different problem features. We then propose a highly configurable benchmark suite, the generalized moving peaks benchmark, capable of generating problem instances whose components have a variety of properties, such as different levels of ill-conditioning, variable interactions, shape, and complexity. Moreover, components generated by the proposed benchmark can be highly dynamic with respect to the gradients, heights, optimum locations, condition numbers, shapes, complexities, and variable interactions. Finally, several well-known optimizers and dynamic optimization algorithms are chosen to solve generated problems by the proposed benchmark. The experimental results show the poor performance of the existing methods in facing new challenges posed by the addition of new properties.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
He, Cheng; Cheng, Ran; Yazdani, Danial
Adaptive Offspring Generation for Evolutionary Large-Scale Multiobjective Optimization Journal Article
In: IEEE Transactions on Systems, Man, and Cybernetics: Systems, pp. 786 – 798, 2020, ISSN: 2168-2216.
@article{he_adaptive_2020,
title = {Adaptive Offspring Generation for Evolutionary Large-Scale Multiobjective Optimization},
author = {Cheng He and Ran Cheng and Danial Yazdani},
url = {https://ieeexplore.ieee.org/abstract/document/9138459},
doi = {10.1109/TSMC.2020.3003926},
issn = {2168-2216},
year = {2020},
date = {2020-07-10},
journal = {IEEE Transactions on Systems, Man, and Cybernetics: Systems},
pages = {786 - 798},
abstract = {Offspring generation plays an important role in evolutionary multiobjective optimization. However, generating promising candidate solutions effectively in high-dimensional spaces is particularly challenging. To address this issue, we propose an adaptive offspring generation method for large-scale multiobjective optimization. First, a preselection strategy is proposed to select a balanced parent population, and then these parent solutions are used to construct direction vectors in the decision spaces for reproducing promising offspring solutions. Specifically, two kinds of direction vectors are adaptively used to generate offspring solutions. The first kind takes advantage of the dominated solutions to generate offspring solutions toward the Pareto optimal set (PS) for convergence enhancement, while the other kind uses those nondominated solutions to spread the solutions over the PS for diversity maintenance. The proposed offspring generation method can be embedded in many existing multiobjective evolutionary algorithms (EAs) for large-scale multiobjective optimization. Experiments are conducted to reveal the mechanism of our proposed adaptive reproduction strategy and validate its effectiveness. Experimental results on some large-scale multiobjective optimization problems have demonstrated the competitive performance of our proposed algorithm in comparison with five state-of-the-art large-scale EAs.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Chen, Yiming; Pan, Tianci; He, Cheng; Cheng, Ran
Efficient Evolutionary Deep Neural Architecture Search (NAS) by Noisy Network Morphism Mutation Inproceedings
In: Pan, Linqiang; Liang, Jing; Qu, Boyang (Ed.): Bio-inspired Computing: Theories and Applications, pp. 497–508, Springer, Singapore, 2020, ISBN: 9789811534157.
@inproceedings{chen_efficient_2020,
title = {Efficient Evolutionary Deep Neural Architecture Search (NAS) by Noisy Network Morphism Mutation},
author = {Yiming Chen and Tianci Pan and Cheng He and Ran Cheng},
editor = {Linqiang Pan and Jing Liang and Boyang Qu},
doi = {10.1007/978-981-15-3415-7_41},
isbn = {9789811534157},
year = {2020},
date = {2020-01-01},
booktitle = {Bio-inspired Computing: Theories and Applications},
pages = {497--508},
publisher = {Springer},
address = {Singapore},
series = {Communications in Computer and Information Science},
abstract = {Deep learning has achieved enormous breakthroughs in the field of image recognition. However, due to the time-consuming and error-prone process in discovering novel neural architecture, it remains a challenge for designing a specific network in handling a particular task. Hence, many automated neural architecture search methods are proposed to find suitable deep neural network architecture for a specific task without human experts. Nevertheless, these methods are still computationally/economically expensive, since they require a vast amount of computing resource and/or computational time. In this paper, we propose several network morphism mutation operators with extra noise, and further redesign the macro-architecture based on the classical network. The proposed methods are embedded in an evolutionary algorithm and tested on CIFAR-10 classification task. Experimental results indicate the capability of our proposed method in discovering powerful neural architecture which has achieved a classification error 2.55% with only 4.7M parameters on CIFAR-10 within 12 GPU-hours.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
He, Cheng; Cheng, Ran; Tian, Ye; Zhang, Xingyi
Iterated Problem Reformulation for Evolutionary Large-Scale Multiobjective Optimization Inproceedings
In: 2020 IEEE Congress on Evolutionary Computation (CEC), pp. 1–8, 2020.
@inproceedings{he_iterated_2020,
title = {Iterated Problem Reformulation for Evolutionary Large-Scale Multiobjective Optimization},
author = {Cheng He and Ran Cheng and Ye Tian and Xingyi Zhang},
doi = {10.1109/CEC48606.2020.9185553},
year = {2020},
date = {2020-01-01},
booktitle = {2020 IEEE Congress on Evolutionary Computation (CEC)},
pages = {1--8},
abstract = {Due to the curse of dimensionality, two main issues remain challenging for applying evolutionary algorithms (EAs) to large-scale multiobjective optimization. The first issue is how to improve the efficiency of EAs for reducing computation cost. The second one is how to improve the diversity maintenance of EAs to avoid local optima. Nevertheless, these two issues are somehow conflicting with each other, and thus it is crucial to strike a balance between them in practice. Thereby, we propose an iterated problem reformulation based EA for large-scale multiobjective optimization, where the problem reformulation based method and the decomposition based method are used iteratively to address the aforementioned issues. The proposed method is compared with several state-of-the-art EAs on a variety of large-scale multiobjective optimization problems. Experimental results demonstrate the effectiveness of our proposed iterated method in large-scale multiobjective optimization.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yazdani, Danial; Omidvar, Mohammad Nabi; Cheng, Ran; Branke, Jürgen; Nguyen, Trung Thanh; Yao, Xin
Benchmarking Continuous Dynamic Optimization: Survey and Generalized Test Suite Journal Article
In: IEEE Transactions on Cybernetics, pp. 1–14, 2020, ISSN: 2168-2275.
@article{yazdani_benchmarking_2020,
title = {Benchmarking Continuous Dynamic Optimization: Survey and Generalized Test Suite},
author = {Danial Yazdani and Mohammad Nabi Omidvar and Ran Cheng and Jürgen Branke and Trung Thanh Nguyen and Xin Yao},
doi = {10.1109/TCYB.2020.3011828},
issn = {2168-2275},
year = {2020},
date = {2020-01-01},
journal = {IEEE Transactions on Cybernetics},
pages = {1--14},
abstract = {Dynamic changes are an important and inescapable aspect of many real-world optimization problems. Designing algorithms to find and track desirable solutions while facing challenges of dynamic optimization problems is an active research topic in the field of swarm and evolutionary computation. To evaluate and compare the performance of algorithms, it is imperative to use a suitable benchmark that generates problem instances with different controllable characteristics. In this article, we give a comprehensive review of existing benchmarks and investigate their shortcomings in capturing different problem features. We then propose a highly configurable benchmark suite, the generalized moving peaks benchmark, capable of generating problem instances whose components have a variety of properties, such as different levels of ill-conditioning, variable interactions, shape, and complexity. Moreover, components generated by the proposed benchmark can be highly dynamic with respect to the gradients, heights, optimum locations, condition numbers, shapes, complexities, and variable interactions. Finally, several well-known optimizers and dynamic optimization algorithms are chosen to solve generated problems by the proposed benchmark. The experimental results show the poor performance of the existing methods in facing new challenges posed by the addition of new properties.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
He, Cheng; Cheng, Ran; Zhang, Chuanji; Tian, Ye; Chen, Qin; Yao, Xin
Evolutionary Large-Scale Multiobjective Optimization for Ratio Error Estimation of Voltage Transformers Journal Article
In: IEEE Transactions on Evolutionary Computation, vol. 24, no. 5, pp. 868–881, 2020, ISSN: 1941-0026.
@article{he_evolutionary_2020,
title = {Evolutionary Large-Scale Multiobjective Optimization for Ratio Error Estimation of Voltage Transformers},
author = {Cheng He and Ran Cheng and Chuanji Zhang and Ye Tian and Qin Chen and Xin Yao},
doi = {10.1109/TEVC.2020.2967501},
issn = {1941-0026},
year = {2020},
date = {2020-01-01},
journal = {IEEE Transactions on Evolutionary Computation},
volume = {24},
number = {5},
pages = {868--881},
abstract = {Ratio error (RE) estimation of the voltage transformers (VTs) plays an important role in modern power delivery systems. Existing RE estimation methods mainly focus on periodical calibration but ignore the time-varying property. Consequently, it is difficult to efficiently estimate the state of the VTs in real time. To address this issue, we formulate a time-varying RE estimation (TREE) problem into a large-scale multiobjective optimization problem, where the multiple objectives and inequality constraints are formulated by statistical and physical rules extracted from the power delivery systems. Furthermore, a set of TREE problems from different substations is systematically formulated into a benchmark test suite for characterizing their different properties. The formulation of these TREE problems not only transfers an expensive RE estimation task to a relatively cheaper optimization problem but also promotes the research in large-scale multiobjective optimization by providing a real-world benchmark test suite with complex variable interactions and correlations to different objectives. To the best of our knowledge, this is the first time to formulate a real-world problem into a benchmark test suite for large-scale multiobjective optimization, and it is also the first work proposing to solve TREE problems via evolutionary multiobjective optimization.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hou, Zhanglu; He, Cheng; Cheng, Ran
Reformulating Preferences into Constraints for Evolutionary Multi- and Many-objective Optimization Journal Article
In: Information Sciences, vol. 541, pp. 1–15, 2020, ISSN: 0020-0255.
@article{hou_reformulating_2020,
title = {Reformulating Preferences into Constraints for Evolutionary Multi- and Many-objective Optimization},
author = {Zhanglu Hou and Cheng He and Ran Cheng},
url = {https://www.sciencedirect.com/science/article/pii/S0020025520305223},
doi = {10.1016/j.ins.2020.05.103},
issn = {0020-0255},
year = {2020},
date = {2020-01-01},
urldate = {2021-08-01},
journal = {Information Sciences},
volume = {541},
pages = {1--15},
abstract = {Despite that the reference point based preference articulation plays a vital role in evolutionary multi- and many-objective optimization, three issues remain challenging. First, the performance of reference point based preference articulation largely depends on the location of the reference point. Second, the parameter settings for controlling the region of interest are not robust to the Pareto optimal fronts with different complicated shapes. Third, most existing methods have poor scalability to the number of objectives. To meet these challenges, we propose to reformulate preferences into constraints for evolutionary multi- and many-objective optimization. Extensive experiments on a variety of benchmark problems are conducted to demonstrate the effectiveness of our proposed method.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kong, Yanguo; Kong, Xiangyi; He, Cheng; Liu, Changsong; Wang, Liting; Su, Lijuan; Gao, Jun; Guo, Qi; Cheng, Ran
Constructing an Automatic Diagnosis and Severity-classification Model for Acromegaly Using Facial Photographs by Deep Learning Journal Article
In: Journal of Hematology & Oncology, vol. 13, no. 1, pp. 88, 2020, ISSN: 1756-8722.
@article{kong_constructing_2020,
title = {Constructing an Automatic Diagnosis and Severity-classification Model for Acromegaly Using Facial Photographs by Deep Learning},
author = {Yanguo Kong and Xiangyi Kong and Cheng He and Changsong Liu and Liting Wang and Lijuan Su and Jun Gao and Qi Guo and Ran Cheng},
url = {https://doi.org/10.1186/s13045-020-00925-y},
doi = {10.1186/s13045-020-00925-y},
issn = {1756-8722},
year = {2020},
date = {2020-01-01},
urldate = {2021-08-26},
journal = {Journal of Hematology & Oncology},
volume = {13},
number = {1},
pages = {88},
abstract = {Due to acromegaly’s insidious onset and slow progression, its diagnosis is usually delayed, thus causing severe complications and treatment difficulty. A convenient screening method is imperative. Based on our previous work, we herein developed a new automatic diagnosis and severity-classification model for acromegaly using facial photographs by deep learning on the data of 2148 photographs at different severity levels. Each photograph was given a score reflecting its severity (range 1textasciitilde3). Our developed model achieved a prediction accuracy of 90.7% on the internal test dataset and outperformed the performance of ten junior internal medicine physicians (89.0%). The prospect of applying this model to real clinical practices is promising due to its potential health economic benefits.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Cheng, Ran; Omidvar, Mohammad Nabi; Gandomi, Amir H; Sendhoff, Bernhard; Menzel, Stefan; Yao, Xin
Solving Incremental Optimization Problems via Cooperative Coevolution Journal Article
In: IEEE Transactions on Evolutionary Computation, vol. 23, no. 5, pp. 762–775, 2019, ISSN: 1089-778X.
@article{cheng_solving_2019,
title = {Solving Incremental Optimization Problems via Cooperative Coevolution},
author = {Ran Cheng and Mohammad Nabi Omidvar and Amir H Gandomi and Bernhard Sendhoff and Stefan Menzel and Xin Yao},
doi = {10.1109/TEVC.2018.2883599},
issn = {1089-778X},
year = {2019},
date = {2019-10-01},
journal = {IEEE Transactions on Evolutionary Computation},
volume = {23},
number = {5},
pages = {762--775},
abstract = {Engineering designs can involve multiple stages, where at each stage, the design models are incrementally modified and optimized. In contrast to traditional dynamic optimization problems, where the changes are caused by some objective factors, the changes in such incremental optimization problems (IOPs) are usually caused by the modifications made by the decision makers during the design process. While existing work in the literature is mainly focused on traditional dynamic optimization, little research has been dedicated to solving such IOPs. In this paper, we study how to adopt cooperative coevolution to efficiently solve a specific type of IOPs, namely, those with increasing decision variables. First, we present a benchmark function generator on the basis of some basic formulations of IOPs with increasing decision variables and exploitable modular structure. Then, we propose a contribution-based cooperative coevolutionary framework coupled with an incremental grouping method for dealing with them. On one hand, the benchmark function generator is capable of generating various benchmark functions with various characteristics. On the other hand, the proposed framework is promising in solving such problems in terms of both optimization accuracy and computational efficiency. In addition, the proposed method is further assessed using a real-world application, i.e., the design optimization of a stepped cantilever beam.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
He, Cheng; Cheng, Ran; Jin, Yaochu; Yao, Xin
Surrogate-Assisted Expensive Many-Objective Optimization by Model Fusion Inproceedings
In: 2019 IEEE Congress on Evolutionary Computation (CEC), pp. 1672–1679, 2019, ISBN: 978-1-7281-2153-6.
@inproceedings{he_surrogate-assisted_2019,
title = {Surrogate-Assisted Expensive Many-Objective Optimization by Model Fusion},
author = {Cheng He and Ran Cheng and Yaochu Jin and Xin Yao},
url = {https://ieeexplore.ieee.org/abstract/document/8790155},
doi = {10.1109/CEC.2019.8790155},
isbn = {978-1-7281-2153-6},
year = {2019},
date = {2019-08-08},
booktitle = {2019 IEEE Congress on Evolutionary Computation (CEC)},
pages = {1672--1679},
abstract = {Surrogate-assisted evolutionary algorithms have played an important role in expensive optimization where a small number of real-objective function evaluations are allowed. Usually, the surrogate models are used for the same purpose, e.g., to approximate the real-objective function or the aggregation fitness function. However, there is little work on surrogate-assisted optimization by model fusion, i.e., different surrogate models are fused for different purposes to improve the performance of the algorithm. In this work, we propose a surrogate-assisted approach by model fusion for solving expensive many-objective optimization problems, in which the Kriging assisted objective function approximation method is fused with the classifier assisted approach. The proposed algorithm is compared with some state-of-the-art surrogate-assisted algorithms on DTLZ problems and a real-world problem, and some encouraging results have been achieved by our proposed model fusion based approach.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
He, Cheng; Li, Lianghao; Tian, Ye; Zhang, Xingyi; Cheng, Ran; Jin, Yaochu; Yao, Xin
Accelerating Large-Scale Multiobjective Optimization via Problem Reformulation Journal Article
In: IEEE Transactions on Evolutionary Computation, vol. 23, no. 6, pp. 949–961, 2019, ISSN: 1941-0026.
@article{he_accelerating_2019,
title = {Accelerating Large-Scale Multiobjective Optimization via Problem Reformulation},
author = {Cheng He and Lianghao Li and Ye Tian and Xingyi Zhang and Ran Cheng and Yaochu Jin and Xin Yao},
url = {https://ieeexplore.ieee.org/abstract/document/8628263},
doi = {10.1109/TEVC.2019.2896002},
issn = {1941-0026},
year = {2019},
date = {2019-01-27},
journal = {IEEE Transactions on Evolutionary Computation},
volume = {23},
number = {6},
pages = {949--961},
abstract = {In this paper, we propose a framework to accelerate the computational efficiency of evolutionary algorithms on large-scale multiobjective optimization. The main idea is to track the Pareto optimal set (PS) directly via problem reformulation. To begin with, the algorithm obtains a set of reference directions in the decision space and associates them with a set of weight variables for locating the PS. Afterwards, the original large-scale multiobjective optimization problem is reformulated into a low-dimensional single-objective optimization problem. In the reformulated problem, the decision space is reconstructed by the weight variables and the objective space is reduced by an indicator function. Thanks to the low dimensionality of the weight variables and reduced objective space, a set of quasi-optimal solutions can be obtained efficiently. Finally, a multiobjective evolutionary algorithm is used to spread the quasi-optimal solutions over the approximate Pareto optimal front evenly. Experiments have been conducted on a variety of large-scale multiobjective problems with up to 5000 decision variables. Four different types of representative algorithms are embedded into the proposed framework and compared with their original versions, respectively. Furthermore, the proposed framework has been compared with two state-of-the-art algorithms for large-scale multiobjective optimization. The experimental results have demonstrated the significant improvement benefited from the framework in terms of its performance and computational efficiency in large-scale multiobjective optimization.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Cheng, Ran; He, Cheng; Jin, Yaochu; Yao, Xin
Model-based Evolutionary Algorithms: a Short Survey Journal Article
In: Complex & Intelligent Systems, vol. 4, no. 4, pp. 283–292, 2018, ISSN: 2198-6053.
@article{cheng_model-based_2018,
title = {Model-based Evolutionary Algorithms: a Short Survey},
author = {Ran Cheng and Cheng He and Yaochu Jin and Xin Yao},
editor = { },
url = {https://doi.org/10.1007/s40747-018-0080-1},
doi = {10.1007/s40747-018-0080-1},
issn = {2198-6053},
year = {2018},
date = {2018-08-07},
urldate = {2021-08-01},
journal = {Complex & Intelligent Systems},
volume = {4},
number = {4},
pages = {283--292},
abstract = {The evolutionary algorithms (EAs) are a family of nature-inspired algorithms widely used for solving complex optimization problems. Since the operators (e.g. crossover, mutation, selection) in most traditional EAs are developed on the basis of fixed heuristic rules or strategies, they are unable to learn the structures or properties of the problems to be optimized. To equip the EAs with learning abilities, recently, various model-based evolutionary algorithms (MBEAs) have been proposed. This survey briefly reviews some representative MBEAs by considering three different motivations of using models. First, the most commonly seen motivation of using models is to estimate the distribution of the candidate solutions. Second, in evolutionary multi-objective optimization, one motivation of using models is to build the inverse models from the objective space to the decision space. Third, when solving computationally expensive problems, models can be used as surrogates of the fitness functions. Based on the review, some further discussions are also given.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}