1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
|
# gradient descent: find local minimum of function of one variable from initial guess
# numerically estimate the derivative
@everywhere function gradient_descent(
f::Function,
x0::Float64,
delta::Float64, # shift is delta*df
dx::Float64, # finite difference for numerical derivative evaluation
maxiter::Int64 # interrupt and fail after maxiter steps
)
counter=0
# init
x=x0
while counter<maxiter
# value at x and around
val=f(x)
valm=f(x-dx)
valp=f(x+dx)
# quit if minimum
if(val<valm && val<valp)
return(x,val)
end
# derivative
df=(valp-val)/dx
# step
x=x-delta*df
counter+=1
end
# fail
return(Inf,Inf)
end
# Newton algorithm to compute extrema
# numerically estimate the derivatives
@everywhere function newton_maximum(
f::Function,
x0::Float64,
dx::Float64, # finite difference for numerical derivative evaluation
maxiter::Int64,
tolerance::Float64,
maxstep::Float64 # maximal size of step
)
counter=0
# init
x=x0
while counter<maxiter
# value at x and around
val=f(x)
valm=f(x-dx)
valp=f(x+dx)
# derivative
dfp=(valp-val)/dx
dfm=(val-valm)/dx
# second derivative
ddf=(dfp-dfm)/dx
#@printf(stderr,"% .15e % .15e % .15e % .15e\n",x,val,dfp,ddf)
if abs(dfp/ddf)<tolerance
# check it is a local maximum
if ddf<0
return(x,val)
else
return(Inf,Inf)
end
end
# step
step=dfp/abs(ddf)
# step too large
if abs(step)>maxstep
step=maxstep*sign(step)
end
x=x+step
# fail if off to infinity
if x==Inf || x==-Inf
return(x,val)
end
counter+=1
end
# fail
return(Inf,Inf)
end
|