Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ENH: special object for decision rules. #126

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/Dolo.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
__precompile__(true)

module Dolo

using Printf
Expand Down Expand Up @@ -111,6 +110,7 @@ import .splines
import .splines: eval_UC_spline, eval_UC_spline!, prefilter!

include("util.jl")
include("drstore.jl")

include("numeric/complementarities.jl")
include("numeric/newton.jl")
Expand Down
74 changes: 42 additions & 32 deletions src/algos/time_iteration_direct.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,36 +21,42 @@ function time_iteration_direct(model, dprocess::AbstractDiscretizedProcess,
endo_nodes = nodes(ListOfPoints,grid)
N = length(endo_nodes)

d = length(endo_nodes[1])
# Discretized exogenous process
number_of_smooth_drs(dprocess) = max(n_nodes(dprocess), 1)
n_m = nsd = number_of_smooth_drs(dprocess)

p = SVector(model.calibration[:parameters]...)
n_x = length(model.calibration[:controls])

# initial guess for controls
x0 = [init_dr(i, endo_nodes) for i=1:nsd]

ti_trace = trace ? IterationTrace([x0]) : nothing

n_x = length(model.calibration[:controls])
# initial guess for controls
ds0 = DRStore(Point{n_x}, [N for i=1:n_m])
for i=1:n_m
ds0.data[i][:] = init_dr(i, endo_nodes)
end

complementarities = true
if complementarities == true
x_lb = [controls_lb(model, node(Point,dprocess,i),endo_nodes,p) for i=1:n_m]
x_ub = [controls_ub(model, node(Point,dprocess,i),endo_nodes,p) for i=1:n_m]
ds_lb = DRStore([controls_lb(model, node(Point,dprocess,i),endo_nodes,p) for i=1:n_m])
ds_ub = DRStore([controls_ub(model, node(Point,dprocess,i),endo_nodes,p) for i=1:n_m])
BIG = 100000
for i=1:n_m
for n=1:N
x_lb[i][n] = max.(x_lb[i][n],-BIG)
x_ub[i][n] = min.(x_ub[i][n], BIG)
end
for n=1:length(ds_lb.flat)
ds_lb.flat[n] = max.(ds_lb.flat[n],-BIG)
ds_ub.flat[n] = min.(ds_ub.flat[n], BIG)
end
clamp!(ds0, ds_lb, ds_ub)
end
ds1 = copy(ds0)


# create decision rule (which interpolates x0)
dr = CachedDecisionRule(dprocess, grid, x0)
dr = CachedDecisionRule(dprocess, grid, [ds0.data...])

# Define controls of tomorrow
x1 = deepcopy(x0)


# define states of today
s = deepcopy(endo_nodes);
Expand All @@ -68,45 +74,49 @@ function time_iteration_direct(model, dprocess::AbstractDiscretizedProcess,
############################### Iteration loop
n_h = length(model.symbols[:expectations])

# temporary vars
E_f = DRStore(Point{n_h}, [length(e) for e in ds0.data])
d_E_f = deepcopy(E_f.data[1])
S = deepcopy(s)
X = deepcopy(ds0.data[1])


while it<maxit && err>tol_η

it += 1

t1 = time_ns()

E_f = [zeros(Point{n_h},N) for i=1:number_of_smooth_drs(dprocess)]
set_values!(dr, [ds0.data...])
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there any kind of penalty from the [ds0.data...] that appears throughout? (e.g. performance, memory, or type inference penalty)

This might be pushing things too far (and please tell me if it is), but would it make sense to have drs.data be NTuple{N,Vector{T}} so we know how many elements are in it and can use @generated functions to unroll these [drs.data...] calls?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For now I don't think we have any penalty. The elements of [ds0.data...] should be correctly inferred. set_values! itself could be improved to take a tuple as argument. I doubt unrolling would provide much benefit, as the amount of code within the loop is quite big.


set_values!(dr, x0)
# Compute expectations function E_f and states of tomorrow

# S = zeros(size(s))
E_f.flat[:] *= 0.0

for i in 1:size(E_f, 1)
m = node(Point,dprocess, i)

x0 = ds0.data[i]
x1 = ds1.data[i]
m = node(Point, dprocess, i)
for (w, M, j) in get_integration_nodes(Point,dprocess,i)
# Update the states
# S[:,:] = Dolo.transition(model, m, s, x0[i], M, p)
S = Dolo.transition(model, m, s, x0[i], M, p)
Dolo.transition(model, Val{(0,)}, m, s, x0, M, p, (S,))
# interpolate controles conditional states of tomorrow
X = dr(i, j, S)
X[:] = dr(i, j, S)
# Compute expectations as a weighted average of the exo states w_j
E_f[i] += w*Dolo.expectation(model, M, S, X, p)

Dolo.expectation(model, Val{(0,)}, M, S, X, p, (d_E_f,))
d_E_f[:] *= w
E_f[i][:] += d_E_f
end
# compute controles of tomorrow
x1[i][:] = Dolo.direct_response(model, m, s, E_f[i], p)
# compute tomorrow's control
Dolo.direct_response(model, Val{(0,)}, m, s, E_f[i], p, (x1,) )
end

err = 0.0
for i in 1:size(x1, 1)
# apply bounds # TODO: improve
x1[i] = clamp.(x1[i], x_lb[i], x_ub[i])
# update error
err = max(err, maxabs(x1[i] - x0[i]))
# copy controls back into x0
copyto!(x0[i], x1[i])
if complementarities
clamp!(ds1, ds_lb, ds_ub)
end
err = distance(ds0,ds1)

copy!(ds0, ds1)

gain = err/err_0
err_0 = err
Expand Down
74 changes: 74 additions & 0 deletions src/drstore.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
###
# special struct to hold decision rules
###

# import Main
struct DRStore{T}
data::Tuple{Vararg{Vector{T}}}
flat::Vector{T}
end

Main.length(ds::DRStore) = length(ds.data)
Main.size(ds::DRStore, x...) = size(ds.data, x...)
Main.getindex(ds::DRStore,x...) = getindex(ds.data,x...)
Main.setindex!(ds::DRStore,x...) = setindex!(ds.data,x...)
Main.abs(ds::DRStore{T}) where T = abs(ds.flat)

maxabs(ds::DRStore{T}) where T = maximum(e->norm(e,Inf), ds.flat)

norm(ds::DRStore{T}) where T = maximum(e->norm(e,Inf), ds.flat)
# Main.norm(ds::DRStore{T},k::Int) where T = norm( (norm(e,k) for e in ds.flat), k)
distance(ds1::DRStore{T}, ds2::DRStore{T}) where T = maximum( k->norm(k[1]-k[2],Inf), zip(ds1.flat,ds2.flat) )
# distance(ds1::DRStore{T}, ds2::DRStore{T}, k::Int) where T = norm( (norm(e[1]-e[2],k) for e in zip(ds1.flat,ds2.flat)), k )

total(ds::DRStore{T}) where T = sum(length(e) for e in ds.data)

function DRStore(::Type{T}, dims::AbstractVector{Int}) where T
L = sum(dims)
fD = zeros(T, L)
ptr = pointer(fD)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yikes 😨 ! I don't think I'v ever seen pointer and unsafe_wrap used out in in the pure Julia wild (e.g. not interfacing with C libs)

Are we confident that this is safe and works correctly? I would guess that it does given your experiments, but it makes me a little scared.

What type of penalty would we get if we were to construct D using SubArrays (e.g. using view) instead of unsafe_wrap and pointer offsets?

Copy link
Member Author

@albop albop Dec 21, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think problem is not whether it is safe:

  • we control object construction, and the object is immutable so user cannot alter its behaviour. Of course this assumes we add consistency checks.
  • all arrays are linked to the same object, so one "view" cannot be destroyed without the other being destroyed. By construction, the memory we access should be freed for all arrays/subarrays at the same time.
    One potential problem is actually performance. From my reptilian understanding of the discussion here (Get rid of reinterpret in its current form JuliaLang/julia#22849), the compiler can optimize further if we guarantee non-aliasing. Se we might slow down things a little bit with this approach (but not more than our current use of reinterpret).

In principle, SubArray should work, and we should be able to pass them to all routines (including interpolation routines). Right now it doesn't:

  • our interpolation routine assume that input are contiguous arrays (i.e. take Array as arguments). We rely on this to reinterpret arrays in both directions.
  • we cannot just change all arguments to AbstractArray. That would be inapropriate, because it doesn't give us any guarantee that we can reinterpret them.
  • SubArray have a flag which tells whether they are contiguous, but reinterpret doesn't work on them.
    Intuitively, the condition for us to be able to reinterpret an array, should not for it to be contiguous but for all consecutive chunks to be a multiple of the size of all datatype we wish to reinterpret it into. I think this is the direction, which is followed in Julia 0.7, where reinterpret will return a special type of Array. It would be natural that some SubArrays are reinterpretable too, but I don't know whether this is the case yet.

Right now, I don't totally understand how the new reintepret will work in 0.7 and it certainly won't be backward compatible.
On the other hand, the current behaviour is arguably stupid, but certainly forward compatible, unless they remove pointer, which I'm sure they won't. Maybe there is a way to actually tell the compiler we are not aliasing anything. Overall there are less factors for us to take into account with that simple solution so I'm quite inclined to stay with it for now.

sz = sizeof(T)
offsets = [[0]; cumsum(dims)[1:end-1]]*sz
D = [unsafe_wrap(Array{T}, ptr+offsets[i], dims[i]) for i=1:length(dims)]
return DRStore(tuple(D...), fD)
end
function DRStore(data::AbstractVector{S}) where S<:AbstractVector{T} where T
# data is always copied in this case
# as we cannot guarantee data is contiguous
dims = [length(d) for d in data]
ds = DRStore(T, dims)
for i=1:length(data)
ds.data[i][:] = data[i]
end
ds
end

# this one reuses data in place
# should it be called DRStore! ?
function DRStore(fD::Vector{T},dims::AbstractVector{Int}) where T
L = sum(dims)
ptr = pointer(fD)
sz = sizeof(T)
offsets = [[0]; cumsum(dims)[1:end-1]]*sz
D = [unsafe_wrap(Array{T}, ptr+offsets[i], dims[i]) for i=1:length(dims)]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should probably be testing that fD is the correct length, right?

unsafe_wrap will let us have elements of D defined over memory that extends beyond fD:

julia> fD = rand(4)
4-element Array{Float64,1}:
 0.36243
 0.0790668
 0.232033
 0.975159


julia> X = unsafe_wrap(Array{Float64}, pointer(fD), 5)
5-element Array{Float64,1}:
 0.36243
 0.0790668
 0.232033
 0.975159
 9.88131e-324

julia> fD[:] = 1
1

julia> X
5-element Array{Float64,1}:
 1.0
 1.0
 1.0
 1.0
 9.88131e-324

(they did warn us it was unsafe)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't see anything wrong with the value 9.88131e-324. I quite like it actually.
(joke aside: sure we need to check dimensions).

return DRStore(tuple(D...), fD)
end

DRStore! = DRStore

function Main.copy(ds::DRStore{T}) where T
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should be Base.copy

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sure. I followed the error message without thinking...

dims = [length(e) for e in ds.data]
fD = deepcopy(ds.flat)
ds1 = DRStore!(fD,dims)
return ds1
end

Main.copy!(ds0::DRStore{T}, ds1::DRStore{T}) where T = copy!(ds0.flat, ds1.flat)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Base.copy!



function Main.clamp!(ds::DRStore{T}, ds_lb::DRStore{T}, ds_ub::DRStore{T}) where T
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Base.clamp!

N = length(ds.flat)
for n=1:N
ds.flat[n] = clamp.(ds.flat[n], ds_lb.flat[n], ds_ub.flat[n])
end
end
49 changes: 49 additions & 0 deletions test/test_drstore.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@

import Dolo: DRStore
using StaticArrays

@testset "DR Storage" begin

@testset "floats"

vals = rand(150)
dims = [50,60,40]
ddr = DRStore(vals, dims)

ddrc = copy(ddr)

ddr.flat[3] = 19.341
@test maximum(abs,ddr.flat-vals)==0.0
@test maximum(abs,ddr.flat-ddrc.flat)!=0.0
@test maximum(cat(1, ddr.data...) - ddr.flat)==0.0


dd = [rand(4) for i=1:3]
ds = TTest.DRStore(dd)

@test maximum(cat(1, ds.data...) - ds.flat) == 0.0


ds.flat[end] = 10
@test maximum(cat(1, ds.data...) - ds.flat) == 0.0

ds.data[2][:] *= 0.1
@test maximum(cat(1, ds.data...) - ds.flat) == 0.0

@test TTest.distance(ds,ds) == 0.0
# @time TTest.distance(ds,ds0)

end

begin "sarrays"

ddr1 = DRStore(SVector{2,Float64}, [20000,30000,40000])
ddr2 = DRStore(SVector{2,Float64}, [20000,30000,40000])

@time maxabs(ddr1)
@time distance(ddr1, ddr2)

# tbc...

end
end