Coverage for HARK / estimation.py: 95%
252 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-08 05:31 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-08 05:31 +0000
1"""Functions for estimating structural models, including optimization methods
2and bootstrapping tools.
3"""
5import csv
6import multiprocessing
7import warnings
8from copy import deepcopy # For replicating complex objects
9from time import time # Used to time execution
11import estimagic as em
12import numpy as np # Numerical Python
13from joblib import Parallel, delayed
14from scipy.optimize import fmin, fmin_powell # off-the-shelf minimizers
16from HARK.core import AgentType
18__all__ = [
19 "minimize_nelder_mead",
20 "minimize_powell",
21 "bootstrap_sample_from_data",
22 "parallelNelderMead",
23]
26def minimize_nelder_mead(
27 objective_func,
28 parameter_guess,
29 verbose=False,
30 which_vars=None,
31 **kwargs,
32):
33 """Minimizes the objective function using the Nelder-Mead simplex algorithm,
34 starting from an initial parameter guess.
36 Parameters
37 ----------
38 objective_func : function
39 The function to be minimized. It should take only a single argument, which
40 should be a list representing the parameters to be estimated.
41 parameter_guess : [float]
42 A starting point for the Nelder-Mead algorithm, which must be a valid
43 input for objective_func.
44 which_vars : np.array or None
45 Array of booleans indicating which parameters should be estimated. When
46 not provided, estimation is performed on all parameters.
47 verbose : boolean
48 A flag for the amount of output to print.
50 Returns
51 -------
52 xopt : [float]
53 The values that minimize objective_func.
55 """
56 # Specify a temporary "modified objective function" that restricts parameters to be estimated
57 if which_vars is None:
58 which_vars = np.ones(len(parameter_guess), dtype=bool)
60 def objective_func_mod(params):
61 params_full = np.copy(parameter_guess)
62 params_full[which_vars] = params
63 out = objective_func(params_full)
64 return out
66 # convert parameter guess to np array to slice it with boolean array
67 parameter_guess_mod = np.array(parameter_guess)[which_vars]
69 # Execute the minimization, starting from the given parameter guess
70 t0 = time() # Time the process
71 OUTPUT = fmin(
72 objective_func_mod,
73 parameter_guess_mod,
74 full_output=1,
75 disp=verbose,
76 **kwargs,
77 )
78 t1 = time()
80 # Extract values from optimization output:
81 xopt = OUTPUT[0] # Parameters that minimize function.
82 fopt = OUTPUT[1] # Value of function at minimum: ``fopt = func(xopt)``.
83 optiter = OUTPUT[2] # Number of iterations performed.
84 funcalls = OUTPUT[3] # Number of function calls made.
85 warnflag = OUTPUT[4] # warnflag : int
86 # 1 : Maximum number of function evaluations made.
87 # 2 : Maximum number of iterations reached.
88 # Check that optimization succeeded:
89 if warnflag != 0:
90 warnings.warn(
91 "Minimization failed! xopt="
92 + str(xopt)
93 + ", fopt="
94 + str(fopt)
95 + ", optiter="
96 + str(optiter)
97 + ", funcalls="
98 + str(funcalls)
99 + ", warnflag="
100 + str(warnflag),
101 )
102 xopt_full = np.copy(parameter_guess)
103 xopt_full[which_vars] = xopt
105 # Display and return the results:
106 if verbose:
107 print("Time to estimate is " + str(t1 - t0) + " seconds.")
108 return xopt_full
111def minimize_powell(objective_func, parameter_guess, verbose=False):
112 """Minimizes the objective function using a derivative-free Powell algorithm,
113 starting from an initial parameter guess.
115 Parameters
116 ----------
117 objective_func : function
118 The function to be minimized. It should take only a single argument, which
119 should be a list representing the parameters to be estimated.
120 parameter_guess : [float]
121 A starting point for the Powell algorithm, which must be a valid
122 input for objective_func.
123 verbose : boolean
124 A flag for the amount of output to print.
126 Returns
127 -------
128 xopt : [float]
129 The values that minimize objective_func.
131 """
132 # Execute the minimization, starting from the given parameter guess
133 t0 = time() # Time the process
134 OUTPUT = fmin_powell(
135 objective_func,
136 parameter_guess,
137 full_output=1,
138 maxiter=1000,
139 disp=verbose,
140 )
141 t1 = time()
143 # Extract values from optimization output:
144 xopt = OUTPUT[0] # Parameters that minimize function.
145 fopt = OUTPUT[1] # Value of function at minimum: ``fopt = func(xopt)``.
146 direc = OUTPUT[2]
147 optiter = OUTPUT[3] # Number of iterations performed.
148 funcalls = OUTPUT[4] # Number of function calls made.
149 warnflag = OUTPUT[5] # warnflag : int
150 # 1 : Maximum number of function evaluations made.
151 # 2 : Maximum number of iterations reached.
153 # Check that optimization succeeded:
154 if warnflag != 0:
155 warnings.warn(
156 "Minimization failed! xopt="
157 + str(xopt)
158 + ", fopt="
159 + str(fopt)
160 + ", direc="
161 + str(direc)
162 + ", optiter="
163 + str(optiter)
164 + ", funcalls="
165 + str(funcalls)
166 + ", warnflag="
167 + str(warnflag),
168 )
170 # Display and return the results:
171 if verbose:
172 print("Time to estimate is " + str(t1 - t0) + " seconds.")
173 return xopt
176def bootstrap_sample_from_data(data, weights=None, seed=0):
177 """Samples rows from the input array of data, generating a new data array with
178 an equal number of rows (records). Rows are drawn with equal probability
179 by default, but probabilities can be specified with weights (must sum to 1).
181 Parameters
182 ----------
183 data : np.array
184 An array of data, with each row representing a record.
185 weights : np.array
186 A weighting array with length equal to data.shape[0].
187 seed : int
188 A seed for the random number generator.
190 Returns
191 -------
192 new_data : np.array
193 A resampled version of input data.
195 """
196 # Set up the random number generator
197 RNG = np.random.default_rng(seed)
198 N = data.shape[0]
200 # Set up weights
201 if weights is not None:
202 cutoffs = np.cumsum(weights)
203 else:
204 cutoffs = np.linspace(0, 1, N)
206 # Draw random indices
207 indices = np.searchsorted(cutoffs, RNG.uniform(size=N))
209 # Create a bootstrapped sample
210 new_data = deepcopy(data[indices,])
211 return new_data
214def _initialize_nelder_mead(guess, perturb):
215 """Construct the initial simplex and related bookkeeping when starting fresh.
217 Parameters
218 ----------
219 guess : np.array
220 Initial starting point for the simplex.
221 perturb : np.array or None
222 Perturbation vector for the simplex. If None, each parameter is
223 perturbed by 10% of its value (or by 0.1 when the value is zero).
225 Returns
226 -------
227 simplex : np.array
228 The initial simplex of shape (N, K).
229 fvals : np.array
230 Array of NaN values of length N, to be filled by evaluation.
231 dim_count : int
232 Number of parameters being optimized.
233 N : int
234 Number of points in the simplex (dim_count + 1).
235 K : int
236 Total number of parameters in guess.
237 iters : int
238 Initial iteration counter (0).
239 evals : int
240 Initial evaluation counter (0).
242 """
243 if perturb is None: # Default: perturb each parameter by 10%
244 perturb = 0.1 * guess
245 perturb[guess == 0] = 0.1
247 params_to_opt = np.where(perturb != 0)[0] # Indices of which parameters to optimize
248 dim_count = params_to_opt.size # Number of parameters to search over
249 N = dim_count + 1 # Number of points in simplex
250 K = guess.size # Total number of parameters
251 simplex = np.tile(guess, (N, 1))
252 for j in range(
253 dim_count,
254 ): # Perturb each parameter to optimize by the specified distance
255 simplex[j + 1, params_to_opt[j]] = (
256 simplex[j + 1, params_to_opt[j]] + perturb[params_to_opt[j]]
257 )
259 # Initialize iteration and evaluation counts, plus a 1D array of function values
260 fvals = np.zeros(dim_count + 1) + np.nan
262 iters = 0
263 evals = 0
265 return simplex, fvals, dim_count, N, K, iters, evals
268def _evaluate_and_sort_initial_simplex(
269 obj_func, simplex, fvals, N, parallel, use_parallel, verbose, savefreq, name
270):
271 """Evaluate the objective function at each vertex of the initial simplex and
272 sort the simplex by function value from best to worst.
274 Parameters
275 ----------
276 obj_func : function
277 The objective function to be minimized.
278 simplex : np.array
279 The initial simplex of shape (N, K).
280 fvals : np.array
281 Placeholder array of function values (filled here).
282 N : int
283 Number of points in the simplex.
284 parallel : joblib.Parallel
285 Configured Parallel object for parallel evaluation.
286 use_parallel : bool
287 Whether to use parallel evaluation.
288 verbose : int
289 Verbosity level; messages are printed when verbose > 0.
290 savefreq : int or None
291 Save frequency in iterations; saves immediately after initialization
292 when not None.
293 name : str or None
294 Filename stem for saving progress.
296 Returns
297 -------
298 simplex : np.array
299 The sorted simplex (best vertex first).
300 fvals : np.array
301 Function values at each vertex, sorted to match simplex.
303 """
304 temp_simplex = list(simplex) # Evaluate the initial simplex
305 if use_parallel:
306 fvals = np.array(parallel(delayed(obj_func)(params) for params in temp_simplex))
307 else:
308 fvals = np.array([obj_func(params) for params in temp_simplex])
310 # Reorder the initial simplex
311 order = np.argsort(fvals)
312 fvals = fvals[order]
313 simplex = simplex[order, :]
314 fmin = fvals[0]
315 f_dist = np.abs(fmin - fvals[-1])
316 x_dist = np.max(
317 np.sqrt(np.sum((simplex - np.tile(simplex[0, :], (N, 1))) ** 2.0, axis=1)),
318 )
319 if verbose > 0:
320 print(
321 "Evaluated the initial simplex: fmin="
322 + str(fmin)
323 + ", f_dist="
324 + str(f_dist)
325 + ", x_dist="
326 + str(x_dist),
327 )
328 if savefreq is not None:
329 save_nelder_mead_data(name, simplex, fvals, 0, N)
330 if verbose > 0:
331 print("Saved search progress in " + name + ".txt")
333 return simplex, fvals
336def _nelder_mead_iteration(
337 obj_func,
338 simplex,
339 fvals,
340 N,
341 K,
342 P,
343 j_list,
344 opt_params,
345 s_param,
346 parallel,
347 use_parallel,
348 verbose,
349):
350 """Perform one iteration of the parallel Nelder-Mead algorithm.
352 Attempts to update the P worst vertices of the simplex. When every
353 attempted update leaves the subsimplex unchanged, a shrink step is
354 performed instead, re-evaluating all vertices except the best one.
356 Parameters
357 ----------
358 obj_func : function
359 The objective function to be minimized.
360 simplex : np.array
361 The current simplex of shape (N, K).
362 fvals : np.array
363 Function values at each vertex, sorted best to worst.
364 N : int
365 Number of points in the simplex.
366 K : int
367 Total number of parameters.
368 P : int
369 Degree of parallelization (number of worst vertices to update).
370 j_list : range
371 Indices of the vertices to attempt updating (range(N - P, N)).
372 opt_params : list
373 Three-element list [r_param, c_param, e_param] for reflection,
374 contraction, and expansion.
375 s_param : float
376 Shrink parameter.
377 parallel : joblib.Parallel
378 Configured Parallel object for parallel evaluation.
379 use_parallel : bool
380 Whether to use parallel evaluation.
381 verbose : int
382 Verbosity level; messages are printed when verbose > 0.
384 Returns
385 -------
386 simplex : np.array
387 The updated and re-sorted simplex.
388 fvals : np.array
389 Updated function values sorted to match simplex.
390 new_evals : int
391 Number of objective function evaluations performed this iteration.
393 """
394 # Update the P worst points of the simplex
395 if use_parallel:
396 output = parallel(
397 delayed(parallel_nelder_mead_worker)(
398 obj_func,
399 simplex,
400 fvals,
401 j,
402 P,
403 opt_params,
404 )
405 for j in j_list
406 )
407 else:
408 output = [
409 parallel_nelder_mead_worker(obj_func, simplex, fvals, j, P, opt_params)
410 for j in j_list
411 ]
413 # Extract the output for each node
414 new_subsimplex = np.zeros((P, K)) + np.nan
415 new_vals = np.zeros(P) + np.nan
416 new_evals = 0
417 for i in range(P):
418 new_subsimplex[i, :] = output[i][0]
419 new_vals[i] = output[i][1]
420 new_evals += output[i][2]
422 # Check whether any updates actually happened
423 old_subsimplex = simplex[(N - P) : N, :]
424 if np.max(np.abs(new_subsimplex - old_subsimplex)) == 0:
425 if verbose > 0:
426 print("Updated the simplex, but must perform a shrink step.")
427 # If every attempted update was unsuccessful, must shrink the simplex
428 simplex = s_param * np.tile(simplex[0, :], (N, 1)) + (1.0 - s_param) * simplex
429 temp_simplex = list(simplex[1:N, :])
430 if use_parallel:
431 fvals = np.array(
432 [fvals[0]]
433 + parallel(delayed(obj_func)(params) for params in temp_simplex),
434 )
435 else:
436 fvals = np.array([fvals[0]] + [obj_func(params) for params in temp_simplex])
437 new_evals += N - 1
438 else:
439 if verbose > 0:
440 print("Updated the simplex successfully.")
441 # Otherwise, update the simplex with the new results
442 simplex[(N - P) : N, :] = new_subsimplex
443 fvals[(N - P) : N] = new_vals
445 # Reorder the simplex from best to worst
446 order = np.argsort(fvals)
447 fvals = fvals[order]
448 simplex = simplex[order, :]
450 return simplex, fvals, new_evals
453def _check_nm_termination(
454 iters, evals, fvals, simplex, N, maxiter, maxeval, ftol, xtol
455):
456 """Check whether any Nelder-Mead terminal condition has been satisfied.
458 Parameters
459 ----------
460 iters : int
461 Number of completed iterations.
462 evals : int
463 Cumulative number of function evaluations.
464 fvals : np.array
465 Current function values, sorted best to worst.
466 simplex : np.array
467 Current simplex of shape (N, K).
468 N : int
469 Number of points in the simplex.
470 maxiter : int
471 Maximum allowed iterations.
472 maxeval : int
473 Maximum allowed function evaluations.
474 ftol : float
475 Absolute function-value tolerance for convergence.
476 xtol : float
477 Absolute parameter-space tolerance for convergence.
479 Returns
480 -------
481 go : bool
482 False when at least one terminal condition is satisfied.
483 fmin : float
484 Best objective function value found so far.
485 f_dist : float
486 Absolute difference between the best and worst function values.
487 x_dist : float
488 Maximum Euclidean distance from the best vertex to any other vertex.
490 """
491 fmin = fvals[0]
492 f_dist = np.abs(fmin - fvals[-1])
493 x_dist = np.max(
494 np.sqrt(np.sum((simplex - np.tile(simplex[0, :], (N, 1))) ** 2.0, axis=1)),
495 )
497 go = True
498 if iters >= maxiter:
499 go = False
500 print("Maximum iterations reached, terminating unsuccessfully.")
501 if evals >= maxeval:
502 go = False
503 print("Maximum evaluations reached, terminating unsuccessfully.")
504 if f_dist < ftol:
505 go = False
506 print("Function tolerance reached, terminating successfully.")
507 if x_dist < xtol:
508 go = False
509 print("Parameter tolerance reached, terminating successfully.")
511 return go, fmin, f_dist, x_dist
514def parallelNelderMead(
515 obj_func,
516 guess,
517 perturb=None,
518 P=1,
519 ftol=1e-8,
520 xtol=1e-8,
521 maxiter=np.inf,
522 maxeval=np.inf,
523 r_param=1.0,
524 e_param=1.0,
525 c_param=0.5,
526 s_param=0.5,
527 maxthreads=None,
528 name=None,
529 resume=False,
530 savefreq=None,
531 verbose=1,
532):
533 """A parallel implementation of the Nelder-Mead minimization algorithm, as
534 described in Lee and Wiswall. For long optimization procedures, it can
535 save progress between iterations and resume later.
537 Parameters
538 ----------
539 obj_func : function
540 The objective function to be minimized. Takes a single 1D array as input.
541 guess : np.array
542 Initial starting point for the simplex, representing an input for obj_func.
543 perturb : np.array
544 Perturbation vector for the simplex, of the same length as an input to
545 obj_func. If perturb[j] is non-zero, a simplex point will be created
546 that perturbs the j-th element of guess by perturb[j]; if it is zero,
547 then the j-th parameter of obj_func will not be optimized over. By
548 default, perturb=None, indicating that all parameters should be optimized,
549 with an initial perturbation of 0.1*guess.
550 P : int
551 Degree of parallelization: the number of vertices of the simplex to try
552 to update on each iteration of the process.
553 ftol : float
554 Absolute tolerance of the objective function for convergence. If suc-
555 cessive iterations return minimum function values that differ by less
556 than ftol, the process terminates successfully.
557 xtol : float
558 Absolute tolerance of the input values for convergence. If the maximum
559 distance between the current minimum point and the worst point in the
560 simplex is less than xtol, then the process terminates successfully.
561 maxiter : int
562 Maximum number of Nelder-Mead iterations; reaching iters=maxiter is
563 reported as an "unsuccessful" minimization.
564 maxeval : int
565 Maximum number of evaluations of obj_func (across all processes); reaching
566 evals=maxeval is reported as an "unsuccessful" minimization.
567 r_param: float
568 Parameter indicating magnitude of the reflection point calculation.
569 e_param: float
570 Parameter indicating magnitude of the expansion point calculation.
571 c_param: float
572 Parameter indicating magnitude of the contraction point calculation.
573 s_param: float
574 Parameter indicating magnitude of the shrink calculation.
575 maxthreads : int
576 The maximum number of CPU cores that the optimization should use,
577 regardless of the size of the problem.
578 name : string
579 A filename for (optionally) saving the progress of the Nelder-Mead search,
580 and for resuming a previous search (when resume=True). Useful for long
581 searches that could potentially be interrupted by computer down time.
582 resume : boolean
583 An indicator for whether the search should resume from earlier progress.
584 When True, the process will load a progress file named in input name.
585 savefreq : int
586 When not None, search progress will be saved to name.txt every savefreq
587 iterations, to be loaded later with resume=True).
588 verbose : int
589 Indicator for the verbosity of the optimization routine. Higher values
590 generate more text output; verbose=0 produces no text output.
592 Returns
593 -------
594 min_point : np.array
595 The input that minimizes obj_func, as found by the minimization.
596 fmin : float
597 The minimum of obj_func; fmin = obj_func(min_point).
599 """
600 # Setup: resume from saved state or build the initial simplex from scratch
601 if resume:
602 simplex, fvals, iters, evals = load_nelder_mead_data(name)
603 dim_count = fvals.size - 1
604 N = dim_count + 1 # Number of points in simplex
605 K = simplex.shape[1] # Total number of parameters
606 else:
607 simplex, fvals, dim_count, N, K, iters, evals = _initialize_nelder_mead(
608 guess, perturb
609 )
611 # Make sure degree of parallelization is not illegal
612 if P > N - 1:
613 warnings.warn(
614 "Requested degree of simplex parallelization is "
615 + str(P)
616 + ", but dimension of optimization problem is only "
617 + str(N - 1)
618 + ".",
619 )
620 warnings.warn(
621 "Degree of parallelization has been reduced to " + str(N - 1) + "."
622 )
623 P = N - 2
625 # Create the pool of worker processes
626 cpu_cores = multiprocessing.cpu_count() # Total number of available CPU cores
627 cores_to_use = min(cpu_cores, dim_count)
628 if maxthreads is not None: # Cap the number of cores if desired
629 cores_to_use = min(cores_to_use, maxthreads)
630 parallel = Parallel(n_jobs=cores_to_use)
631 use_parallel = cores_to_use > 1
633 # Evaluate the initial simplex when starting fresh; otherwise just report status
634 if not resume:
635 simplex, fvals = _evaluate_and_sort_initial_simplex(
636 obj_func, simplex, fvals, N, parallel, use_parallel, verbose, savefreq, name
637 )
638 evals += N
639 elif verbose > 0:
640 print(
641 "Resuming search after "
642 + str(iters)
643 + " iterations and "
644 + str(evals)
645 + " function evaluations.",
646 )
648 # Initialize some inputs for the multithreader
649 j_list = range(N - P, N)
650 opt_params = [r_param, c_param, e_param]
652 # Run the Nelder-Mead algorithm until a terminal condition is met
653 go = True
654 while go:
655 t_start = time()
656 iters += 1
657 if verbose > 0:
658 print("Beginning iteration #" + str(iters) + " now.")
660 simplex, fvals, new_evals = _nelder_mead_iteration(
661 obj_func,
662 simplex,
663 fvals,
664 N,
665 K,
666 P,
667 j_list,
668 opt_params,
669 s_param,
670 parallel,
671 use_parallel,
672 verbose,
673 )
674 evals += new_evals
676 go, fmin, f_dist, x_dist = _check_nm_termination(
677 iters, evals, fvals, simplex, N, maxiter, maxeval, ftol, xtol
678 )
680 t_end = time()
681 if verbose > 0:
682 t_iter = t_end - t_start
683 print(
684 "Finished iteration #"
685 + str(iters)
686 + " with "
687 + str(new_evals)
688 + " evaluations ("
689 + str(evals)
690 + " cumulative) in "
691 + str(t_iter)
692 + " seconds.",
693 )
694 print(
695 "Simplex status: fmin="
696 + str(fmin)
697 + ", f_dist="
698 + str(f_dist)
699 + ", x_dist="
700 + str(x_dist),
701 )
703 # Save the progress of the estimation if desired
704 if savefreq is not None:
705 if (iters % savefreq) == 0:
706 save_nelder_mead_data(name, simplex, fvals, iters, evals)
707 if verbose > 0:
708 print("Saved search progress in " + name + ".txt")
710 # Return the results
711 xopt = simplex[0, :]
712 return xopt
715def save_nelder_mead_data(name, simplex, fvals, iters, evals):
716 """Stores the progress of a parallel Nelder-Mead search in a text file so that
717 it can be resumed later (after manual termination or a crash).
719 Parameters
720 ----------
721 name : string
722 Name of the txt file in which to store search progress.
723 simplex : np.array
724 The current state of the simplex of parameter guesses.
725 fvals : np.array
726 The objective function value at each row of simplex.
727 iters : int
728 The number of completed Nelder-Mead iterations.
729 evals : int
730 The cumulative number of function evaluations in the search process.
732 Returns
733 -------
734 None
736 """
737 N = simplex.shape[0] # Number of points in simplex
739 with open(name + ".txt", "w", newline="") as f:
740 my_writer = csv.writer(f, delimiter=",")
741 my_writer.writerow(simplex.shape)
742 my_writer.writerow([iters, evals])
743 for n in range(N):
744 my_writer.writerow(simplex[n, :])
745 my_writer.writerow(fvals)
748def load_nelder_mead_data(name):
749 """Reads the progress of a parallel Nelder-Mead search from a text file, as
750 created by save_nelder_mead_data().
752 Parameters
753 ----------
754 name : string
755 Name of the txt file from which to read search progress.
757 Returns
758 -------
759 simplex : np.array
760 The current state of the simplex of parameter guesses.
761 fvals : np.array
762 The objective function value at each row of simplex.
763 iters : int
764 The number of completed Nelder-Mead iterations.
765 evals : int
766 The cumulative number of function evaluations in the search process.
768 """
769 # Open the Nelder-Mead progress file
770 with open(name + ".txt", newline="") as f:
771 my_reader = csv.reader(f, delimiter=",")
773 # Get the shape of the simplex and initialize it
774 my_shape_txt = my_reader.__next__()
775 N = int(my_shape_txt[0])
776 K = int(my_shape_txt[1])
777 simplex = np.zeros((N, K)) + np.nan
779 # Get number of iterations and cumulative evaluations from the next line
780 my_nums_txt = my_reader.__next__()
781 iters = int(my_nums_txt[0])
782 evals = int(my_nums_txt[1])
784 # Read one line per point of the simplex
785 for n in range(N):
786 simplex[n, :] = np.array(my_reader.__next__(), dtype=float)
788 # Read the final line to get function values
789 fvals = np.array(my_reader.__next__(), dtype=float)
791 return simplex, fvals, iters, evals
794def parallel_nelder_mead_worker(obj_func, simplex, f_vals, j, P, opt_params):
795 """A worker process for the parallel Nelder-Mead algorithm. Updates one point
796 in the simplex, returning its function value as well. Should basically
797 never be called directly, only by parallelNelderMead().
799 Parameters
800 ----------
801 obj_func : function
802 The function to be minimized; takes a single 1D array as input.
803 simplex : numpy.array
804 The current simplex for minimization; simplex[k,:] is an input for obj_func.
805 f_vals : numpy.array
806 The values of the objective function at each point of the simplex:
807 f_vals[k] = obj_func(simplex[k,:])
808 j : int
809 Index of the point in the simplex to update: simplex[j,:]
810 P : int
811 Degree of parallelization of the algorithm.
812 opt_params : numpy.array
813 Three element array with parameters for reflection, contraction, expansion.
815 Returns
816 -------
817 new_point : numpy.array
818 An updated point for the simplex; might be the same as simplex[j,:].
819 new_val : float
820 The value of the objective function at the new point: obj_func(new_point).
821 evals : int
822 Number of evaluations of obj_func by this worker.
824 """
825 # Unpack the input parameters
826 alpha = opt_params[0] # reflection parameter
827 beta = opt_params[1] # contraction parameter
828 gamma = opt_params[2] # expansion parameter
829 my_point = simplex[j, :] # vertex to update
830 my_val = f_vals[j] # value at the vertex to update
831 best_val = f_vals[0] # best value in the vertex
832 next_val = f_vals[j - 1] # next best point in the simplex
833 evals = 0
835 # Calculate the centroid of the "good" simplex points
836 N = simplex.shape[0] # number of points in simplex
837 centroid = np.mean(simplex[0 : (N - P), :], axis=0)
839 # Calculate the reflection point and its function value
840 r_point = centroid + alpha * (centroid - my_point)
841 r_val = obj_func(r_point)
842 evals += 1
844 # Case 1: the reflection point is better than best point
845 if r_val < best_val:
846 e_point = r_point + gamma * (r_point - centroid)
847 e_val = obj_func(e_point) # Calculate expansion point
848 evals += 1
849 if e_val < r_val:
850 new_point = e_point
851 new_val = e_val
852 else:
853 new_point = r_point
854 new_val = r_val
855 # Case 2: the reflection point is better than the next best point
856 elif r_val < next_val:
857 new_point = r_point # Report reflection point
858 new_val = r_val
859 # Case 3: the reflection point is worse than the next best point
860 else:
861 if r_val < my_val:
862 temp_point = r_point # Check whether reflection or original point
863 temp_val = r_val # is better and use it temporarily
864 else:
865 temp_point = my_point
866 temp_val = my_val
867 c_point = temp_point + beta * (centroid - temp_point)
868 c_val = obj_func(c_point) # Calculate contraction point
869 evals += 1
870 if c_val < temp_val:
871 new_point = c_point
872 new_val = c_val # Check whether the contraction point is better
873 else:
874 new_point = temp_point
875 new_val = temp_val
877 # Return the outputs
878 return new_point, new_val, evals
881def estimate_msm(
882 agent: AgentType,
883 params: dict,
884 empirical_moments: dict,
885 moments_cov: dict | np.ndarray,
886 simulate_moments: callable,
887 optimize_options: dict,
888 simulate_moments_kwargs: dict = None,
889 weights: str = "diagonal",
890 estimagic_options: dict = {},
891):
892 """Use the method of simulated moments to estimate a model."""
893 simulate_moments_kwargs = simulate_moments_kwargs or {}
894 simulate_moments_kwargs.setdefault("agent", agent)
896 res = em.estimate_msm(
897 simulate_moments,
898 empirical_moments,
899 moments_cov,
900 params,
901 optimize_options=optimize_options,
902 simulate_moments_kwargs=simulate_moments_kwargs,
903 weights=weights,
904 **estimagic_options,
905 )
907 return res