minchoi02 / FFT

FFT matplot
0 stars 0 forks source link

FFT Optimization, Minimization of scalar function of one or more variables using the Nelder-Mead algorithm #3

Open minchoi02 opened 4 years ago

minchoi02 commented 4 years ago

import numpy as np from scipy.optimize import minimize

def rosen(x): """The Rosenbrock function""" return sum(100.0*(x[1:]-x[:-1]2.0)2.0 + (1-x[:-1])**2.0)

def rosen_der(x): xm = x[1:-1] xm_m1 = x[:-2] xm_p1 = x[2:] der = np.zeros_like(x) der[1:-1] = 200*(xm-xm_m1*2) - 400(xm_p1 - xm2)xm - 2(1-xm) der[0] = -400x[0](x[1]-x[0]2) - 2(1-x[0]) der[-1] = 200(x[-1]-x[-2]**2) return der

def rosen_hess(x): x = np.asarray(x) H = np.diag(-400x[:-1],1) - np.diag(400x[:-1],-1) diagonal = np.zeros_like(x) diagonal[0] = 1200*x[0]2-400x[1]+2 diagonal[-1] = 200 diagonal[1:-1] = 202 + 1200x[1:-1]2 - 400*x[2:] H = H + np.diag(diagonal) return H

def rosen_hess_p(x, p): x = np.asarray(x) Hp = np.zeros_like(x) Hp[0] = (1200*x[0]*2 - 400x[1] + 2)p[0] - 400x[0]p[1] Hp[1:-1] = -400x[:-2]p[:-2]+(202+1200x[1:-1]*2-400x[2:])p[1:-1] \ -400x[1:-1]p[2:] Hp[-1] = -400x[-2]p[-2] + 200p[-1] return Hp

x0 = np.array([1.3,0.7,0.8,1.9,1.2])

x0 = np.zeros([-1,2,3,1,5])

res = minimize(rosen,x0,method='nelder-mead', options={'xtol':1e-8,'disp':True}) print('nelder-mead:',res.x)

res = minimize(rosen, x0, method='BFGS', jac=rosen_der, options={'disp': True}) print('BFGS:',res.x)

res = minimize(rosen, x0, method='Newton-CG', jac=rosen_der, hess=rosen_hess, options={'xtol': 1e-8, 'disp': True}) print('Newton-CG with Hessian:',res.x)

res = minimize(rosen, x0, method='Newton-CG', jac=rosen_der, hessp=rosen_hess_p, options={'xtol': 1e-8, 'disp': True}) print('Newton-CG with Hessian product:',res.x)

res = minimize(rosen, x0, method='trust-ncg', jac=rosen_der, hess=rosen_hess, options={'gtol': 1e-8, 'disp': True}) print('trust-ncg with Hessian:',res.x)

res = minimize(rosen, x0, method='trust-ncg', jac=rosen_der, hessp=rosen_hess_p, options={'gtol': 1e-8, 'disp': True}) print('trust-ncg with Hessian product:',res.x)

res = minimize(rosen, x0, method='trust-krylov', jac=rosen_der, hess=rosen_hess, options={'gtol': 1e-8, 'disp': True})

res = minimize(rosen, x0, method='trust-krylov', jac=rosen_der, hessp=rosen_hess_p, options={'gtol': 1e-8, 'disp': True})

import scipy.optimize as optimize

optimize.show_options(solver='minimize',method='nelder-mead')

minchoi02 commented 4 years ago

Optimization terminated successfully. Current function value: 0.000000 Iterations: 19 Function evaluations: 20 Gradient evaluations: 20 Hessian evaluations: 64 Minimization of scalar function of one or more variables using the Nelder-Mead algorithm.