# gradient descent: find local minimum of function of one variable from initial guess # numerically estimate the derivative @everywhere function gradient_descent( f::Function, x0::Float64, delta::Float64, # shift is delta*df dx::Float64, # finite difference for numerical derivative evaluation maxiter::Int64 # interrupt and fail after maxiter steps ) counter=0 # init x=x0 while countermaxstep step=maxstep*sign(step) end x=x+step # fail if off to infinity if x==Inf || x==-Inf return(x,val) end counter+=1 end # fail return(Inf,Inf) end