52 lines
2.1 KiB
BibTeX
52 lines
2.1 KiB
BibTeX
|
|
@Book{Numerical-Optimization-2006,
|
|||
|
|
author = {Jorge Nocedal and Stephen J. Wright},
|
|||
|
|
publisher = {Springer},
|
|||
|
|
title = {Numerical Optimization},
|
|||
|
|
year = {2006},
|
|||
|
|
address = {New York, NY, USA},
|
|||
|
|
edition = {2e},
|
|||
|
|
}
|
|||
|
|
|
|||
|
|
@article{convergence_lbfgs,
|
|||
|
|
title = {On the limited memory BFGS method for large scale optimization},
|
|||
|
|
volume = {45},
|
|||
|
|
DOI = {10.1007/bf01589116},
|
|||
|
|
number = {1–3},
|
|||
|
|
journal = {Mathematical Programming},
|
|||
|
|
author = {Liu, Dong C. and Nocedal, Jorge},
|
|||
|
|
year = {1989},
|
|||
|
|
month = {8},
|
|||
|
|
pages = {503–528}
|
|||
|
|
}
|
|||
|
|
|
|||
|
|
@ARTICLE{BenchmarkTools,
|
|||
|
|
author = {{Chen}, Jiahao and {Revels}, Jarrett},
|
|||
|
|
title = "{Robust benchmarking in noisy environments}",
|
|||
|
|
journal = {arXiv e-prints},
|
|||
|
|
keywords = {Computer Science - Performance, 68N30, B.8.1, D.2.5},
|
|||
|
|
year = 2016,
|
|||
|
|
month = 8,
|
|||
|
|
eid = {arXiv:1608.04295},
|
|||
|
|
archivePrefix = {arXiv},
|
|||
|
|
eprint = {1608.04295},
|
|||
|
|
primaryClass = {cs.PF},
|
|||
|
|
adsurl = {https://ui.adsabs.harvard.edu/abs/2016arXiv160804295C},
|
|||
|
|
adsnote = {Provided by the SAO/NASA Astrophysics Data System}
|
|||
|
|
}
|
|||
|
|
|
|||
|
|
@INPROCEEDINGS {Dogleg,
|
|||
|
|
author = {N. Ampazis and S. Spirou and S. Perantonis},
|
|||
|
|
booktitle = {Neural Networks, IEEE - INNS - ENNS International Joint Conference on},
|
|||
|
|
title = {Training Feedforward Neural Networks with the Dogleg Method and BFGS Hessian Updates},
|
|||
|
|
year = {2000},
|
|||
|
|
volume = {2},
|
|||
|
|
issn = {1098-7576},
|
|||
|
|
pages = {1138},
|
|||
|
|
abstract = {In this paper, we introduce an advanced optimization algorithm for training feedforward neural networks. The algorithm combines the BFGS Hessian update formula with a special case of trust region techniques, called the Dogleg method, as an altenative technique to line search methods. Simulations regarding classification and function approximation problems are presented which reveal a clear improvement both in convergence and success rates over standard BFGS implementations.},
|
|||
|
|
doi = {10.1109/IJCNN.2000.857827},
|
|||
|
|
url = {https://doi.ieeecomputersociety.org/10.1109/IJCNN.2000.857827},
|
|||
|
|
publisher = {IEEE Computer Society},
|
|||
|
|
address = {Los Alamitos, CA, USA},
|
|||
|
|
month = 7
|
|||
|
|
}
|