Skip to content

Commit 305f7d5

Browse files
authored
Fix for new AIBECS version (#10)
* Fix for new AIBECS version * Fix email * Fix ReadMe * Deploy docs in PRs
1 parent 59b7716 commit 305f7d5

11 files changed

+158
-133
lines changed

.github/workflows/CompatHelper.yml

+16
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
name: CompatHelper
2+
on:
3+
schedule:
4+
- cron: '00 00 * * *'
5+
workflow_dispatch:
6+
jobs:
7+
CompatHelper:
8+
runs-on: ubuntu-latest
9+
steps:
10+
- name: Pkg.add("CompatHelper")
11+
run: julia -e 'using Pkg; Pkg.add("CompatHelper")'
12+
- name: CompatHelper.main()
13+
env:
14+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
15+
COMPATHELPER_PRIV: ${{ secrets.DOCUMENTER_KEY }}
16+
run: julia -e 'using CompatHelper; CompatHelper.main()'

Project.toml

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "F1Method"
22
uuid = "d5605bae-6d76-11e9-2fd7-71bcf42edbaa"
3-
authors = ["Benoit Pasquier <pasquieb@uci.edu>"]
4-
version = "0.3.2"
3+
authors = ["Benoit Pasquier <briochemc@gmail.com>"]
4+
version = "0.4.0"
55

66
[deps]
77
DiffEqBase = "2b5f629d-d688-5b77-993f-72d75c75574e"
@@ -11,7 +11,7 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1111
[compat]
1212
DiffEqBase = "6"
1313
ForwardDiff = "0.10"
14-
julia = "1.0"
14+
julia = "1"
1515

1616
[extras]
1717
AIBECS = "ace601d6-714c-11e9-04e5-89b7fad23838"

README.md

+15-11
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
<p>
77
<a href="https://briochemc.github.io/F1Method.jl/stable/">
8-
<img src=https://img.shields.io/badge/docs-stable-important.svg?style=flat-square&label=Documentation&logo=Read%20the%20Docs>
8+
<img src="https://img.shields.io/github/workflow/status/briochemc/F1Method.jl/Documentation?style=for-the-badge&label=Documentation&logo=Read%20the%20Docs&logoColor=white">
99
</a>
1010
</p>
1111

@@ -19,8 +19,14 @@
1919
</p>
2020

2121
<p>
22-
<a href="https://travis-ci.com/briochemc/F1Method.jl">
23-
<img alt="Build Status" src="https://img.shields.io/travis/com/briochemc/F1Method.jl/master?label=OSX/Linux/Windows&logo=travis&logocolor=white&style=flat-square">
22+
<a href="https://github.com/briochemc/F1Method.jl/actions">
23+
<img src="https://img.shields.io/github/workflow/status/briochemc/F1Method.jl/Mac%20OS%20X?label=OSX&logo=Apple&logoColor=white&style=flat-square">
24+
</a>
25+
<a href="https://github.com/briochemc/F1Method.jl/actions">
26+
<img src="https://img.shields.io/github/workflow/status/briochemc/F1Method.jl/Linux?label=Linux&logo=Linux&logoColor=white&style=flat-square">
27+
</a>
28+
<a href="https://github.com/briochemc/F1Method.jl/actions">
29+
<img src="https://img.shields.io/github/workflow/status/briochemc/F1Method.jl/Windows?label=Windows&logo=Windows&logoColor=white&style=flat-square">
2430
</a>
2531
<a href="https://codecov.io/gh/briochemc/F1Method.jl">
2632
<img src="https://img.shields.io/codecov/c/github/briochemc/F1Method.jl/master?label=Codecov&logo=codecov&logoColor=white&style=flat-square">
@@ -75,17 +81,16 @@ Once initial values for the state, `x`, and parameters, `p`, are chosen, simply
7581

7682
```julia
7783
# Initialize the cache for storing reusable objects
78-
mem = F1Method.initialize_mem(x, p)
84+
mem = initialize_mem(F, ∇ₓf, ∇ₓF, x, p, alg; options...)
7985
```
8086

8187
wrap the functions into functions of `p` only via
8288

8389
```julia
84-
8590
# Wrap the objective, gradient, and Hessian functions
86-
objective(p) = F1Method.objective(f, F, ∇ₓF, mem, p, myAlg(); my_options...)
87-
gradient(p) = F1Method.gradient(f, F, ∇ₓf, ∇ₓF, mem, p, myAlg(); my_options...)
88-
hessian(p) = F1Method.hessian(f, F, ∇ₓf, ∇ₓF, mem, p, myAlg(); my_options...)
91+
objective(p) = F1Method.objective(f, F, ∇ₓF, mem, p, alg; options...)
92+
gradient(p) = F1Method.gradient(f, F, ∇ₓf, ∇ₓF, mem, p, alg; options...)
93+
hessian(p) = F1Method.hessian(f, F, ∇ₓf, ∇ₓF, mem, p, alg; options...)
8994
```
9095

9196
and compute the objective, gradient, or Hessian via either of
@@ -101,10 +106,9 @@ hessian(p)
101106
That's it.
102107
You were told it was simple, weren't you?
103108
Now you can test how fast and accurate it is!
104-
(Or trust our published work, *[Pasquier and Primeau](https://www.bpasquier.com/publication/pasquier_primeau_sisc_2019/)* (in preparation).)
105109

106110
## Citing the software
107111

108112
If you use this package, or implement your own package based on the F-1 algorithm please cite us.
109-
If you use the F-1 algorithm, please cite *[Pasquier and Primeau](https://www.bpasquier.com/publication/pasquier_primeau_sisc_2019/)* (in review for publication in the *SIAM Journal on Scientific Computing*).
110-
If you also use this package directly, please cite us using the [CITATION.bib](./CITATION.bib), which contains a bibtex entry for the software.
113+
If you use the F-1 algorithm, please cite *[Pasquier and Primeau](https://www.bpasquier.com/publication/pasquier_primeau_sisc_2019/)* (in prep.).
114+
If you also use this package directly, please cite it! (Use [the Zenodo link](https://doi.org/10.5281/zenodo.2667835) or the [CITATION.bib file](./CITATION.bib), which contains a bibtex entry.)

docs/Project.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,4 @@ Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
44
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
55

66
[compat]
7-
Documenter = "~0.24"
7+
Documenter = "~0.25"

docs/make.jl

+1
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,5 @@ makedocs(
99

1010
deploydocs(
1111
repo = "github.com/briochemc/F1Method.jl.git",
12+
push_preview = true
1213
)

docs/src/index.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ Finally, we wrap the objective, gradient, and Hessian functions defined by the F
123123

124124
```jldoctest usage
125125
# Initialize the cache for storing reusable objects
126-
mem = F1Method.initialize_mem(x₀, p₀)
126+
mem = F1Method.initialize_mem(F, ∇ₓf, ∇ₓF, x₀, p₀, MyAlg())
127127
# Define the functions via the F1 method
128128
F1_objective(p) = F1Method.objective(f, F, ∇ₓF, mem, p, MyAlg())
129129
F1_gradient(p) = F1Method.gradient(f, F, ∇ₓf, ∇ₓF, mem, p, MyAlg())

src/F1Method.jl

+27-21
Original file line numberDiff line numberDiff line change
@@ -21,32 +21,32 @@ Contains
2121
- `p` the parameters 𝒑
2222
The `Mem`-type object should be initialized with `initialize_mem`.
2323
"""
24-
mutable struct Mem
25-
s # 𝒔(𝒑)
26-
A # factors of 𝐀 = ∇ₓ𝑭(𝒔,𝒑)
27-
∇s # ∇𝒔(𝒑)
28-
∇ₓf # ∇ₓ𝑓(𝒔,𝒑)
29-
p # 𝒑
24+
mutable struct Mem{Ts, TA, T∇s, T∇ₓf, Tp}
25+
s::Ts # 𝒔(𝒑)
26+
A::TA # factors of 𝐀 = ∇ₓ𝑭(𝒔,𝒑)
27+
∇s::T∇s # ∇𝒔(𝒑)
28+
∇ₓf::T∇ₓf # ∇ₓ𝑓(𝒔,𝒑)
29+
p::Tp # 𝒑 that matches all but 𝒔(𝒑)
30+
psol::Tp # 𝒑 that matches 𝒔(𝒑)
3031
end
3132

3233

3334
function update_mem!(f, F, ∇ₓf, ∇ₓF, mem, p, alg; options...)
3435
if p mem.p # only update mem if 𝒑 has changed
3536
update_solution!(F, ∇ₓF, mem, p, alg; options...)
36-
s, m = mem.s.u, length(p)
37-
∇ₚF = ForwardDiff.jacobian-> F(s,p+λ), zeros(m))
38-
mem.A = factorize(∇ₓF(s,p)) # update factors of ∇ₓ𝑭(𝒔,𝒑)
39-
mem.∇s .= mem.A \ -∇ₚF # update ∇𝒔
40-
mem.∇ₓf .= ∇ₓf(s,p) # update ∇ₓ𝑓(𝒔,𝒑)
41-
mem.p = p # update 𝒑
37+
∇ₚF = ForwardDiff.jacobian(p -> F(mem.s, p), p)
38+
mem.A = factorize(∇ₓF(mem.s, p)) # update factors of ∇ₓ𝑭(𝒔,𝒑)
39+
mem.∇s .= mem.A \ -∇ₚF # update ∇𝒔
40+
mem.∇ₓf .= ∇ₓf(mem.s, p) # update ∇ₓ𝑓(𝒔,𝒑)
41+
mem.p .= p # update 𝒑 for the variables above
4242
end
4343
end
4444

4545
function update_solution!(F, ∇ₓF, mem, p, alg; options...)
46-
if ~(mem.s isa SteadyStateSolution) || p mem.s.prob.p
47-
mem.s isa SteadyStateSolution ? x = mem.s.u : x = mem.s
48-
prob = SteadyStateProblem(F, ∇ₓF, x, p) # define problem
49-
mem.s = solve(prob, alg; options...) # update 𝒔
46+
if p mem.psol
47+
prob = SteadyStateProblem(F, ∇ₓF, mem.s, p) # define problem
48+
mem.s .= solve(prob, alg; options...).u # update 𝒔
49+
mem.psol .= p # update 𝒑 for 𝒔
5050
end
5151
end
5252

@@ -63,7 +63,7 @@ The Jacobian, `∇ₓF`, and the memory cache `mem` must be supplied.
6363
"""
6464
function objective(f, F, ∇ₓF, mem, p, alg; options...)
6565
update_solution!(F, ∇ₓF, mem, p, alg; options...)
66-
return f(mem.s,p)
66+
return f(mem.s, p)
6767
end
6868

6969
"""
@@ -74,7 +74,7 @@ Returns the gradient of the `objective` function using the F-1 method.
7474
function gradient(f, F, ∇ₓf, ∇ₓF, mem, p, alg; options...)
7575
update_mem!(f, F, ∇ₓf, ∇ₓF, mem, p, alg; options...)
7676
s, ∇s, m = mem.s, mem.∇s, length(p)
77-
∇ₚf = ForwardDiff.jacobian(λ -> [f(s,p+λ)], zeros(m))
77+
∇ₚf = ForwardDiff.jacobian(p -> [f(s,p)], p)
7878
return mem.∇ₓf * ∇s + ∇ₚf
7979
end
8080

@@ -96,9 +96,15 @@ end
9696
9797
Initializes the memory cache for the F-1 method.
9898
"""
99-
function initialize_mem(x, p)
100-
n, m = length(x), length(p)
101-
return Mem(copy(x), nothing, zeros(n,m), zeros(1,n), nothing)
99+
function initialize_mem(F, ∇ₓf, ∇ₓF, x, p, alg; options...)
100+
x = copy(x)
101+
p = copy(p)
102+
psol = copy(p)
103+
prob = SteadyStateProblem(F, ∇ₓF, x, p)
104+
s = solve(prob, alg; options...).u
105+
A = factorize(∇ₓF(s, p))
106+
∇ₚF = ForwardDiff.jacobian(p -> F(s, p), p)
107+
return Mem(s, A, A \ -∇ₚF, ∇ₓf(s, p), p, psol)
102108
end
103109

104110
end

test/AIBECS_test.jl

+32-41
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,30 @@
11

22
# This AIBECS test is derived from the P-model tutorial in AIBECS.jl
33
# But it uses one of the samll circulations to test derivatives of the cost function
4+
module SubModuleAIBECS
45

6+
using Test
7+
using F1Method
8+
using AIBECS
9+
import AIBECS: @units, units
10+
import AIBECS: @limits, limits
11+
import AIBECS: @initial_value, initial_value
12+
import AIBECS: @flattenable, flattenable, flatten
13+
import AIBECS: @prior, prior
14+
using Unitful: m, d, s, yr, Myr, mol, mmol, μmol, μM
15+
using Distributions
16+
using WorldOceanAtlasTools
17+
using FiniteDiff
518

619
# AIBECS model
720
grd, T_Circ = Primeau_2x2x2.load()
821
T_DIP(p) = T_Circ
922
T_POP(p) = transportoperator(grd, z -> w(z,p))
1023
function w(z,p)
1124
@unpack w₀, w′ = p
12-
return @. w₀ + w′ * z
25+
w₀ + w′ * z
1326
end
14-
const z_top = topdepthvec(grd) # uptake only in top layer
27+
z_top = topdepthvec(grd) # uptake only in top layer
1528
function U(x,p)
1629
@unpack σ, τ_DIP, k = p
1730
return @. σ * x/τ_DIP * x/(x+k) * (z_top==0) * (x0)
@@ -28,11 +41,8 @@ function G_POP(DIP, POP, p)
2841
@unpack τ_geo = p
2942
return @. $U(DIP,p) - $R(POP,p) - POP / τ_geo
3043
end
31-
import AIBECS: @units, units
32-
import AIBECS: @limits, limits
33-
import AIBECS: @initial_value, initial_value
34-
import AIBECS: @flattenable, flattenable, flatten
35-
const= Inf
44+
45+
= Inf
3646
@initial_value @units @flattenable @limits struct PmodelParameters{U} <: AbstractParameters{U}
3747
w₀::U | 0.64 | m/d | true | (0,∞)
3848
w′::U | 0.13 | m/d/m | true | (0,∞)
@@ -43,7 +53,6 @@ const ∞ = Inf
4353
DIP_geo::U | 2.12 | mmol/m^3 | true | (-∞,∞)
4454
σ::U | 0.3 | NoUnits | true | (0,1)
4555
end
46-
import AIBECS: @prior, prior
4756
function prior(::Type{T}, s::Symbol) where {T<:AbstractParameters}
4857
if flattenable(T, s)
4958
lb, ub = limits(T, s)
@@ -65,12 +74,13 @@ prior(::T, s::Symbol) where {T<:AbstractParameters} = prior(T,s)
6574
prior(::Type{T}) where {T<:AbstractParameters} = Tuple(prior(T,s) for s in AIBECS.symbols(T))
6675
prior(::T) where {T<:AbstractParameters} = prior(T)
6776
p = PmodelParameters()
77+
λ = p2λ(p)
6878
nb = sum(iswet(grd))
69-
𝐹, ∇ₓ𝐹 = state_function_and_Jacobian((T_DIP, T_POP), (G_DIP, G_POP), nb)
79+
F, ∇ₓF = F_and_∇ₓF((T_DIP, T_POP), (G_DIP, G_POP), nb, PmodelParameters)
7080
@unpack DIP_geo = p
7181
x = DIP_geo * ones(2nb) # initial guess
72-
prob = SteadyStateProblem(𝐹, ∇ₓ𝐹, x, p)
73-
sol = solve(prob, CTKAlg()).u
82+
prob = SteadyStateProblem(F, ∇ₓF, x, p)
83+
sol = solve(prob, CTKAlg())
7484

7585
# Observations from World Ocean Atlas used to evaluate the
7686
# AIBECS model mismatch with observations
@@ -81,42 +91,23 @@ modify(DIP, POP) = (DIP,)
8191
ωs = (1.0,) # the weight for the mismatch (weight of POP = 0)
8292
ωp = 1.0 # the weight for the parameters prior estimates
8393
obs = (DIPobs,)
84-
𝑓, ∇ₓ𝑓, ∇ₚ𝑓 = generate_objective_and_derivatives(ωs, ωp, grd, modify, obs)
94+
f, ∇ₓf = f_and_∇ₓf(ωs, ωp, grd, modify, obs, PmodelParameters)
8595

8696
# Now we apply the F1 method
87-
mem = F1Method.initialize_mem(x, p)
88-
objective(p) = F1Method.objective(𝑓, 𝐹, ∇ₓ𝐹, mem, p, CTKAlg(), τstop=ustrip(u"s", 1e3u"Myr"))
89-
gradient(p) = F1Method.gradient(𝑓, 𝐹, ∇ₓ𝑓, ∇ₓ𝐹, mem, p, CTKAlg(), τstop=ustrip(u"s", 1e3u"Myr"))
90-
hessian(p) = F1Method.hessian(𝑓, 𝐹, ∇ₓ𝑓, ∇ₓ𝐹, mem, p, CTKAlg(), τstop=ustrip(u"s", 1e3u"Myr"))
91-
92-
# and convert p::PmodelParameters to λ::Vector according to AIBECS change of variables
93-
λ2p = subfun(typeof(p))
94-
∇λ2p = ∇subfun(typeof(p))
95-
∇²λ2p = ∇²subfun(typeof(p))
96-
p2λ = invsubfun(typeof(p))
97-
λ = p2λ(p)
98-
function obj(λ)
99-
return objective(λ2p(λ))
100-
end
101-
function grad(λ)
102-
return gradient(λ2p(λ)) * Diagonal(vec(∇λ2p(λ)))
103-
end
104-
function hess(λ)
105-
p = λ2p(λ)
106-
∇p = Diagonal(vec(∇λ2p(λ)))
107-
∇²p = ∇²λ2p(λ)
108-
G = gradient(p)
109-
H = hessian(p)
110-
return ∇p * H * ∇p + Diagonal(vec(G)) * ∇²p
111-
end
97+
τ = ustrip(u"s", 1e3u"Myr")
98+
mem = F1Method.initialize_mem(F, ∇ₓf, ∇ₓF, x, λ, CTKAlg(), τstop=τ)
99+
objective(λ) = F1Method.objective(f, F, ∇ₓF, mem, λ, CTKAlg(), τstop=τ)
100+
gradient(λ) = F1Method.gradient(f, F, ∇ₓf, ∇ₓF, mem, λ, CTKAlg(), τstop=τ)
101+
hessian(λ) = F1Method.hessian(f, F, ∇ₓf, ∇ₓF, mem, λ, CTKAlg(), τstop=τ)
112102

113103
# Finally we test the result with the "reliable" FiniteDiff :)
114-
λ = p2λ(p)
115-
@test FiniteDiff.finite_difference_gradient(obj, λ) grad(λ)' rtol=1e-5
116-
@test FiniteDiff.finite_difference_hessian(obj, λ) hess(λ) rtol=1e-3
117-
104+
@test FiniteDiff.finite_difference_gradient(objective, 2λ) gradient(2λ)' rtol=1e-3
105+
@test FiniteDiff.finite_difference_hessian(objective, 2λ) hessian(2λ) rtol=1e-3
106+
@test FiniteDiff.finite_difference_gradient(objective, λ) gradient(λ)' rtol=1e-3
107+
@test FiniteDiff.finite_difference_hessian(objective, λ) hessian(λ) rtol=1e-3
118108

119109

120110

121111

112+
end #submodule
122113

0 commit comments

Comments
 (0)