-
Notifications
You must be signed in to change notification settings - Fork 1
/
bibliography.bib
198 lines (187 loc) · 13.9 KB
/
bibliography.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
@article{byrdLimitedMemoryAlgorithm1995,
title = {A {{Limited Memory Algorithm}} for {{Bound Constrained Optimization}}},
author = {Byrd, Richard H. and Lu, Peihuang and Nocedal, Jorge and Zhu, Ciyou},
year = {1995},
month = sep,
journal = {SIAM Journal on Scientific Computing},
volume = {16},
number = {5},
pages = {1190--1208},
publisher = {{Society for Industrial and Applied Mathematics}},
issn = {1064-8275},
doi = {10.1137/0916069},
abstract = {An algorithm for solving large nonlinear optimization problems with simple bounds is described. It is based on the gradient projection method and uses a limited memory BFGS matrix to approximate the Hessian of the objective function. It is shown how to take advantage of the form of the limited memory approximation to implement the algorithm efficiently. The results of numerical tests on a set of large problems are reported.},
keywords = {49,65,bound constrained optimization,large-scale optimization,limited memory method,nonlinear optimization,quasi-Newton method},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/3RC267TJ/Byrd et al. - 1995 - A Limited Memory Algorithm for Bound Constrained O.pdf}
}
% == BibTeX quality report for byrdLimitedMemoryAlgorithm1995:
% ? Title looks like it was stored in title-case in Zotero
% ? unused Journal abbreviation ("SIAM J. Sci. Comput.")
% ? unused Library catalog ("epubs.siam.org (Atypon)")
% ? unused Url ("https://epubs.siam.org/doi/10.1137/0916069")
@article{byrdRepresentationsQuasiNewtonMatrices1994,
title = {Representations of Quasi-{{Newton}} Matrices and Their Use in Limited Memory Methods},
author = {Byrd, Richard H. and Nocedal, Jorge and Schnabel, Robert B.},
year = {1994},
journal = {Mathematical Programming},
volume = {63},
number = {1-3},
pages = {129--156},
issn = {0025-5610},
doi = {10.1007/BF01582063},
abstract = {We derive compact representations of BFGS and symmetric rank-one matrices for optimization. These representations allow us to efficiently implement limited memory methods for large constrained optimization problems. In particular, we discuss how to compute projections of limited memory matrices onto subspaces. We also present a compact representation of the matrices generated by Broyden's update for solving systems of nonlinear equations.},
keywords = {constrained optimization,large-scale optimization,limited memory method,Quasi-Newton method},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/RF6XK7MX/Byrd et al. - 1992 - Representations of quasi-Newton matrices and their.pdf}
}
% == BibTeX quality report for byrdRepresentationsQuasiNewtonMatrices1994:
% ? unused Library catalog ("Northwestern Scholars")
% ? unused Url ("http://www.scopus.com/inward/record.url?scp=0028319529&partnerID=8YFLogxK")
@article{byrdStochasticQuasiNewtonMethod2016,
title = {A {{Stochastic Quasi-Newton Method}} for {{Large-Scale Optimization}}},
author = {Byrd, R. H. and Hansen, S. L. and Nocedal, Jorge and Singer, Y.},
year = {2016},
month = jan,
journal = {SIAM Journal on Optimization},
volume = {26},
number = {2},
pages = {1008--1031},
publisher = {{Society for Industrial and Applied Mathematics}},
issn = {1052-6234},
doi = {10.1137/140954362},
abstract = {This paper describes how to incorporate sampled curvature information in a Newton-CG method and in a limited memory quasi-Newton method for statistical learning. The motivation for this work stems from supervised machine learning applications involving a very large number of training points. We follow a batch approach, also known in the stochastic optimization literature as a sample average approximation approach. Curvature information is incorporated in two subsampled Hessian algorithms, one based on a matrix-free inexact Newton iteration and one on a preconditioned limited memory BFGS iteration. A crucial feature of our technique is that Hessian-vector multiplications are carried out with a significantly smaller sample size than is used for the function and gradient. The efficiency of the proposed methods is illustrated using a machine learning application involving speech recognition.},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/P5KU7UHZ/Byrd et al. - 2016 - A Stochastic Quasi-Newton Method for Large-Scale O.pdf}
}
% == BibTeX quality report for byrdStochasticQuasiNewtonMethod2016:
% ? Title looks like it was stored in title-case in Zotero
% ? unused Journal abbreviation ("SIAM J. Optim.")
% ? unused Library catalog ("epubs.siam.org (Atypon)")
% ? unused Url ("https://epubs.siam.org/doi/10.1137/140954362")
@misc{COLLET_LBFGSB_2024,
title = {Lbfgsb - {{A}} Pure Python Implementation.},
author = {Collet, Antoine},
year = {2024},
month = may,
doi = {10.5281/zenodo.11384588},
abstract = {A python impementation of the famous L-BFGS-B quasi-Newton solver [1]. This code is a python port of the famous implementation of Limited-memory Broyden-Fletcher-Goldfarb-Shanno (L-BFGS), algorithm 778 written in Fortran [2,3] (last update in 2011). Note that this is not a wrapper such as `minimize`` in scipy but a complete reimplementation (pure python). The original Fortran code can be found here: https://dl.acm.org/doi/10.1145/279232.279236 ~ The aim of this reimplementation was threefold. First, familiarize ourselves with the code, its logic and inner optimizations. Second, gain access to certain~parameters that are hard-coded in the Fortran code and cannot be modified (typically wolfe conditions parameters for the line search). Third,~implement additional functionalities that require significant modification of the code core. References ---------- [1] R. H. Byrd, P. Lu and J. Nocedal. A Limited Memory Algorithm for Bound~Constrained Optimization, (1995), SIAM Journal on Scientific and~Statistical Computing, 16, 5, pp. 1190-1208. [2] C. Zhu, R. H. Byrd and J. Nocedal. L-BFGS-B: Algorithm 778: L-BFGS-B,~FORTRAN routines for large scale bound constrained optimization (1997),~ACM Transactions on Mathematical Software, 23, 4, pp. 550 - 560. [3] J.L. Morales and J. Nocedal. L-BFGS-B: Remark on Algorithm 778: L-BFGS-B,~FORTRAN routines for large scale bound constrained optimization (2011),~ACM Transactions on Mathematical Software, 38, 1.},
howpublished = {Zenodo},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/9JLD5JEX/11384588.html}
}
% == BibTeX quality report for COLLET_LBFGSB_2024:
% ? unused Url ("https://zenodo.org/records/11384588")
@phdthesis{henaoLBFGSBNSOptimizerNonSmooth2014,
title = {An {{L-BFGS-B-NS Optimizer}} for {{Non-Smooth Functions}}},
author = {Henao, Wilmer},
year = {2014},
month = may,
address = {{New York}},
langid = {english},
school = {New York University},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/W4DBJVTB/Henao - An L-BFGS-B-NS Optimizer for Non-Smooth Functions.pdf}
}
% == BibTeX quality report for henaoLBFGSBNSOptimizerNonSmooth2014:
% ? Title looks like it was stored in title-case in Zotero
% ? unused Library catalog ("Zotero")
% ? unused Url ("https://cs.nyu.edu/~overton/mstheses/henao/msthesis.pdf")
@article{liuLimitedMemoryBFGS1989,
title = {On the Limited Memory {{BFGS}} Method for Large Scale Optimization},
author = {Liu, Dong C. and Nocedal, Jorge},
year = {1989},
month = aug,
journal = {Mathematical Programming},
volume = {45},
number = {1},
pages = {503--528},
issn = {1436-4646},
doi = {10.1007/BF01589116},
abstract = {We study the numerical performance of a limited memory quasi-Newton method for large scale optimization, which we call the L-BFGS method. We compare its performance with that of the method developed by Buckley and LeNir (1985), which combines cycles of BFGS steps and conjugate direction steps. Our numerical tests indicate that the L-BFGS method is faster than the method of Buckley and LeNir, and is better able to use additional storage to accelerate convergence. We show that the L-BFGS method can be greatly accelerated by means of a simple scaling. We then compare the L-BFGS method with the partitioned quasi-Newton method of Griewank and Toint (1982a). The results show that, for some problems, the partitioned quasi-Newton method is clearly superior to the L-BFGS method. However we find that for other problems the L-BFGS method is very competitive due to its low iteration cost. We also study the convergence properties of the L-BFGS method, and prove global convergence on uniformly convex problems.},
langid = {english},
keywords = {conjugate gradient method,Large scale nonlinear optimization,limited memory methods,partitioned quasi-Newton method},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/9ZGP72WP/Liu and Nocedal - 1989 - On the limited memory BFGS method for large scale .pdf}
}
% == BibTeX quality report for liuLimitedMemoryBFGS1989:
% ? unused Library catalog ("Springer Link")
% ? unused Url ("https://doi.org/10.1007/BF01589116")
@article{moralesRemarkAlgorithm7782011,
title = {Remark on ``Algorithm 778: {{L-BFGS-B}}: {{Fortran}} Subroutines for Large-Scale Bound Constrained Optimization''},
shorttitle = {Remark on ``algorithm 778},
author = {Morales, Jos{\'e} Luis and Nocedal, Jorge},
year = {2011},
month = dec,
journal = {ACM Transactions on Mathematical Software},
volume = {38},
number = {1},
pages = {7:1--7:4},
issn = {0098-3500},
doi = {10.1145/2049662.2049669},
abstract = {This remark describes an improvement and a correction to Algorithm 778. It is shown that the performance of the algorithm can be improved significantly by making a relatively simple modification to the subspace minimization phase. The correction concerns an error caused by the use of routine dpmeps to estimate machine precision.},
keywords = {constrained optimization,infeasibility,Nonlinear programming},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/8PLYH2LK/Morales and Nocedal - 2011 - Remark on “algorithm 778 L-BFGS-B Fortran subrou.pdf}
}
% == BibTeX quality report for moralesRemarkAlgorithm7782011:
% ? unused Journal abbreviation ("ACM Trans. Math. Softw.")
% ? unused Library catalog ("ACM Digital Library")
% ? unused Url ("https://doi.org/10.1145/2049662.2049669")
@book{nocedalNumericalOptimization1999,
title = {Numerical {{Optimization}}},
editor = {Nocedal, Jorge and Wright, Stephen J.},
year = {1999},
series = {Springer {{Series}} in {{Operations Research}} and {{Financial Engineering}}},
publisher = {{Springer-Verlag}},
address = {{New York}},
doi = {10.1007/b98874},
isbn = {978-0-387-98793-4},
langid = {english},
keywords = {algorithms,linear optimization,nonlinear optimization,optimization,quadratic programming,Quasi-Newton method},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/9ZY7E29E/Nocedal and Wright - 1999 - Numerical Optimization.pdf}
}
% == BibTeX quality report for nocedalNumericalOptimization1999:
% ? Title looks like it was stored in title-case in Zotero
% ? unused Library catalog ("DOI.org (Crossref)")
% ? unused Url ("http://link.springer.com/10.1007/b98874")
@article{nocedalUpdatingQuasiNewtonMatrices1980,
title = {Updating Quasi-{{Newton}} Matrices with Limited Storage},
author = {Nocedal, Jorge},
year = {1980},
journal = {Mathematics of Computation},
volume = {35},
number = {151},
pages = {773--782},
issn = {0025-5718, 1088-6842},
doi = {10.1090/S0025-5718-1980-0572855-7},
abstract = {Advancing research. Creating connections.},
langid = {english},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/J5STB399/Nocedal - 1980 - Updating quasi-Newton matrices with limited storag.pdf}
}
% == BibTeX quality report for nocedalUpdatingQuasiNewtonMatrices1980:
% ? unused Journal abbreviation ("Math. Comp.")
% ? unused Library catalog ("www.ams.org")
% ? unused Url ("https://www.ams.org/mcom/1980-35-151/S0025-5718-1980-0572855-7/")
@misc{QuestionsGradientScaling,
title = {Questions for Gradient Scaling and Adjoint Source in Inverse Design},
journal = {Ansys Learning Forum | Ansys Innovation Space},
abstract = {Hi! I'm new to inverse design and I have some questions when using Photonic Inverse Design Python API, I would greatly appreciate it if I could receive your answers and clarification!First question is how to set up an adjoint light source in FDTD? According to the literature cited on the official website(Photonic Inverse Design Overview [\ldots ]},
langid = {american},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/89N43GFM/questions-for-gradient-scaling-and-adjoint-source-in-inverse-design.html}
}
% == BibTeX quality report for QuestionsGradientScaling:
% ? unused Url ("https://innovationspace.ansys.com/forum/forums/topic/questions-for-gradient-scaling-and-adjoint-source-in-inverse-design/")
@article{zhuAlgorithm778LBFGSB1997,
title = {Algorithm 778: {{L-BFGS-B}}: {{Fortran}} Subroutines for Large-Scale Bound-Constrained Optimization},
shorttitle = {Algorithm 778},
author = {Zhu, Ciyou and Byrd, Richard H. and Lu, Peihuang and Nocedal, Jorge},
year = {1997},
month = dec,
journal = {ACM Transactions on Mathematical Software},
volume = {23},
number = {4},
pages = {550--560},
issn = {0098-3500},
doi = {10.1145/279232.279236},
abstract = {L-BFGS-B is a limited-memory algorithm for solving large nonlinear optimization problems subject to simple bounds on the variables. It is intended for problems in which information on the Hessian matrix is difficult to obtain, or for large dense problems. L-BFGS-B can also be used for unconstrained problems and in this case performs similarly to its predessor, algorithm L-BFGS (Harwell routine VA15). The algorithm is implemented in Fortran 77.},
keywords = {large-scale optimization,limited-memory method,nonlinear optimization,variable metric method},
file = {/home/acollet/Documents/BIBLIOGRAPHY/storage/KUJH2WBQ/Zhu et al. - 1997 - Algorithm 778 L-BFGS-B Fortran subroutines for l.pdf}
}
% == BibTeX quality report for zhuAlgorithm778LBFGSB1997:
% ? unused Journal abbreviation ("ACM Trans. Math. Softw.")
% ? unused Library catalog ("Dec. 1997")
% ? unused Url ("https://doi.org/10.1145/279232.279236")