diff options
Diffstat (limited to 'src/optimization.jl')
-rw-r--r-- | src/optimization.jl | 94 |
1 files changed, 94 insertions, 0 deletions
diff --git a/src/optimization.jl b/src/optimization.jl new file mode 100644 index 0000000..bacead7 --- /dev/null +++ b/src/optimization.jl @@ -0,0 +1,94 @@ +# gradient descent: find local minimum of function of one variable from initial guess +# numerically estimate the derivative +@everywhere function gradient_descent( + f::Function, + x0::Float64, + delta::Float64, # shift is delta*df + dx::Float64, # finite difference for numerical derivative evaluation + maxiter::Int64 # interrupt and fail after maxiter steps +) + counter=0 + + # init + x=x0 + + while counter<maxiter + # value at x and around + val=f(x) + valm=f(x-dx) + valp=f(x+dx) + # quit if minimum + if(val<valm && val<valp) + return(x,val) + end + + # derivative + df=(valp-val)/dx + # step + x=x-delta*df + counter+=1 + end + + # fail + return(Inf,Inf) +end + +# Newton algorithm to compute extrema +# numerically estimate the derivatives +@everywhere function newton_maximum( + f::Function, + x0::Float64, + dx::Float64, # finite difference for numerical derivative evaluation + maxiter::Int64, + tolerance::Float64, + maxstep::Float64 # maximal size of step +) + counter=0 + + # init + x=x0 + + while counter<maxiter + # value at x and around + val=f(x) + valm=f(x-dx) + valp=f(x+dx) + + # derivative + dfp=(valp-val)/dx + dfm=(val-valm)/dx + # second derivative + ddf=(dfp-dfm)/dx + + #@printf(stderr,"% .15e % .15e % .15e % .15e\n",x,val,dfp,ddf) + + if abs(dfp/ddf)<tolerance + # check it is a local maximum + if ddf<0 + return(x,val) + else + return(Inf,Inf) + end + end + + # step + step=dfp/abs(ddf) + + # step too large + if abs(step)>maxstep + step=maxstep*sign(step) + end + + x=x+step + + # fail if off to infinity + if x==Inf || x==-Inf + return(x,val) + end + counter+=1 + end + + # fail + return(Inf,Inf) +end + |