Skip to content

Commit

Permalink
Merge branch 'dev_Christian' of https://github.com/BASEforHANK/BASEto…
Browse files Browse the repository at this point in the history
…olbox.jl into dev_Christian
  • Loading branch information
ChristianBayerEcon committed Mar 1, 2024
2 parents 4ec514a + c6631da commit 4196835
Show file tree
Hide file tree
Showing 26 changed files with 69 additions and 55 deletions.
34 changes: 17 additions & 17 deletions Manifest.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

julia_version = "1.10.0"
manifest_format = "2.0"
project_hash = "4804c52726eb75e7d6dd6c1ca48690d7a2b4671e"
project_hash = "14ba0ecd71491fff8f7a2d858f96a4140d142cf8"

[[deps.ANSIColoredPrinters]]
git-tree-sha1 = "574baf8110975760d391c710b6341da1afa48d8c"
Expand Down Expand Up @@ -223,9 +223,9 @@ version = "0.15.7"

[[deps.CodecBzip2]]
deps = ["Bzip2_jll", "Libdl", "TranscodingStreams"]
git-tree-sha1 = "c0ae2a86b162fb5d7acc65269b469ff5b8a73594"
git-tree-sha1 = "9b1ca1aa6ce3f71b3d1840c538a8210a043625eb"
uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd"
version = "0.8.1"
version = "0.8.2"

[[deps.CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
Expand Down Expand Up @@ -816,9 +816,9 @@ version = "0.6.8"

[[deps.KrylovKit]]
deps = ["ChainRulesCore", "GPUArraysCore", "LinearAlgebra", "Printf"]
git-tree-sha1 = "1a5e1d9941c783b0119897d29f2eb665d876ecf3"
git-tree-sha1 = "5cebb47f472f086f7dd31fb8e738a8db728f1f84"
uuid = "0b1a1467-8014-51b9-945f-bf0ae24f4b77"
version = "0.6.0"
version = "0.6.1"

[[deps.LAME_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
Expand Down Expand Up @@ -965,9 +965,9 @@ uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"

[[deps.LinearMaps]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "9df2ab050ffefe870a09c7b6afdb0cde381703f2"
git-tree-sha1 = "9948d6f8208acfebc3e8cf4681362b2124339e7e"
uuid = "7a12625a-238d-50fd-b39a-03d52299707e"
version = "3.11.1"
version = "3.11.2"
weakdeps = ["ChainRulesCore", "SparseArrays", "Statistics"]

[deps.LinearMaps.extensions]
Expand Down Expand Up @@ -1008,9 +1008,9 @@ version = "1.0.3"

[[deps.MCMCChains]]
deps = ["AbstractMCMC", "AxisArrays", "Dates", "Distributions", "Formatting", "IteratorInterfaceExtensions", "KernelDensity", "LinearAlgebra", "MCMCDiagnosticTools", "MLJModelInterface", "NaturalSort", "OrderedCollections", "PrettyTables", "Random", "RecipesBase", "Statistics", "StatsBase", "StatsFuns", "TableTraits", "Tables"]
git-tree-sha1 = "3b1ae6bcb0a94ed7760e72cd3524794f613658d2"
git-tree-sha1 = "d0ce57aa5ebbdb456bac3bc5a2ca15cd06ec5f1b"
uuid = "c7f686f2-ff18-58e9-bc7b-31028e88f75d"
version = "6.0.4"
version = "6.0.5"

[[deps.MCMCDiagnosticTools]]
deps = ["AbstractFFTs", "DataAPI", "DataStructures", "Distributions", "LinearAlgebra", "MLJModelInterface", "Random", "SpecialFunctions", "Statistics", "StatsBase", "StatsFuns", "Tables"]
Expand Down Expand Up @@ -1217,9 +1217,9 @@ version = "0.5.5+0"

[[deps.Optim]]
deps = ["Compat", "FillArrays", "ForwardDiff", "LineSearches", "LinearAlgebra", "MathOptInterface", "NLSolversBase", "NaNMath", "Parameters", "PositiveFactorizations", "Printf", "SparseArrays", "StatsBase"]
git-tree-sha1 = "47fea72de134f75b105a5d4a1abe5c6aec89d390"
git-tree-sha1 = "d024bfb56144d947d4fafcd9cb5cafbe3410b133"
uuid = "429524aa-4258-5aef-a3af-852621145aeb"
version = "1.9.1"
version = "1.9.2"

[[deps.Opus_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
Expand Down Expand Up @@ -1291,9 +1291,9 @@ version = "1.4.0"

[[deps.Plots]]
deps = ["Base64", "Contour", "Dates", "Downloads", "FFMPEG", "FixedPointNumbers", "GR", "JLFzf", "JSON", "LaTeXStrings", "Latexify", "LinearAlgebra", "Measures", "NaNMath", "Pkg", "PlotThemes", "PlotUtils", "PrecompileTools", "Printf", "REPL", "Random", "RecipesBase", "RecipesPipeline", "Reexport", "RelocatableFolders", "Requires", "Scratch", "Showoff", "SparseArrays", "Statistics", "StatsBase", "UUIDs", "UnicodeFun", "UnitfulLatexify", "Unzip"]
git-tree-sha1 = "38a748946dca52a622e79eea6ed35c6737499109"
git-tree-sha1 = "c4fa93d7d66acad8f6f4ff439576da9d2e890ee0"
uuid = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"
version = "1.40.0"
version = "1.40.1"

[deps.Plots.extensions]
FileIOExt = "FileIO"
Expand Down Expand Up @@ -1461,9 +1461,9 @@ version = "0.4.0+0"

[[deps.Roots]]
deps = ["Accessors", "ChainRulesCore", "CommonSolve", "Printf"]
git-tree-sha1 = "39ebae5b76c8cd5629bec21adfca78b437dac1e6"
git-tree-sha1 = "754acd3031a9f2eaf6632ba4850b1c01fe4460c1"
uuid = "f2b01f46-fcfa-551c-844a-d8ac1e96c665"
version = "2.1.1"
version = "2.1.2"

[deps.Roots.extensions]
RootsForwardDiffExt = "ForwardDiff"
Expand Down Expand Up @@ -1692,9 +1692,9 @@ deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[[deps.TranscodingStreams]]
git-tree-sha1 = "1fbeaaca45801b4ba17c251dd8603ef24801dd84"
git-tree-sha1 = "54194d92959d8ebaa8e26227dbe3cdefcdcd594f"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.10.2"
version = "0.10.3"
weakdeps = ["Random", "Test"]

[deps.TranscodingStreams.extensions]
Expand Down
1 change: 1 addition & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341"
FieldMetadata = "bf96fef3-21d2-5d20-8afa-0e7d4c32a885"
FileIO = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549"
FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41"
Flatten = "4c728ea3-d9ee-5c9a-9642-b6f7d7dc04fa"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
GR_jll = "d2c73de3-f751-5644-a686-071e5b155ba9"
Expand Down
2 changes: 1 addition & 1 deletion docs/build/.documenter-siteinfo.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"documenter":{"julia_version":"1.10.0","generation_timestamp":"2024-01-31T18:01:26","documenter_version":"1.2.1"}}
{"documenter":{"julia_version":"1.10.0","generation_timestamp":"2024-02-06T15:26:23","documenter_version":"1.2.1"}}
2 changes: 1 addition & 1 deletion docs/build/estimation.html → docs/build/Estimation.html

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/build/Parsing.html

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions docs/build/PerturbationSolution.html

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/build/PostEstimation.html

Large diffs are not rendered by default.

10 changes: 10 additions & 0 deletions docs/build/SteadyState.html

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions docs/build/Tools.html

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/build/index.html

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/build/search_index.js

Large diffs are not rendered by default.

10 changes: 0 additions & 10 deletions docs/build/steadystate.html

This file was deleted.

2 changes: 1 addition & 1 deletion docs/src/estimation.md → docs/src/Estimation.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ BASEforHANK.Estimation.measurement_error
```
## Bayesian estimation
```@docs
montecarlo
sample_posterior
```
We use a Monte Carlo Markov Chain method, specifically the Random-Walk Metropolis Hastings ([`BASEforHANK.Estimation.rwmh()`](@ref)) algorithm, to sample from the posterior probability distribution of the parameter vector. The acceptance rate of the algorithm can be adjusted via setting `EstimationSettings.mhscale`. To obtain the posterior likelihood of each draw, we call [`BASEforHANK.Estimation.likeli()`](@ref), which evaluates the priors at `par` ([`BASEforHANK.Estimation.prioreval()`](@ref)) and returns the log-posterior as a sum of the log-prior and the log-likelihood.

Expand Down
2 changes: 1 addition & 1 deletion docs/src/PerturbationSolution.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ in two successive periods. Applying the total differential yields
to ``X'``,``X``. In the standard setting, we use the generalized Schur decomposition [^Klein]
to transform this equation into a linearized observation equation ``d = gx*k`` and
a linearized state transition equation ``k' = hx*k``, where ``k`` is a vector of the
*state* variables and ``d`` is a vector of the *control* variables (``X = \begin{bmatrix} k \\ d \end{bmatrix}``).
*state* variables and ``d`` is a vector of the *control* variables, ``X = \begin{bmatrix} k & d \end{bmatrix}'``.

In our code, ``F`` is implemented as [`BASEforHANK.PerturbationSolution.Fsys()`](@ref), while differentiating and
solving for ``gx`` and ``hx`` is done in [`BASEforHANK.PerturbationSolution.LinearSolution()`](@ref), called by [`linearize_full_model()`](@ref) returns the results as a `struct` `LinearResults`:
Expand Down
2 changes: 1 addition & 1 deletion docs/src/steadystate.md → docs/src/SteadyState.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

The model features uninsured income shocks ``y`` (by assumption, all workers supply the same
efficiency units of labor [^BBL], so idiosyncratic productivity shocks translate
to income shocks) and two assets, bonds ``m`` and illiquid capital ``k``. Entrepreneurs
to income shocks) and two assets, liquid assets (bonds) ``m`` and illiquid assets (capital) ``k``. Entrepreneurs
(last income-state) receive no labor income, but firm profits, while workers additionally
receive labor union profits.

Expand Down
10 changes: 5 additions & 5 deletions docs/src/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ The file `Parameters.jl` contains three structures to provide model parameters,

The model parameters for the steady state have to be calibrated. We set them in the `struct` [`ModelParameters`](@ref). It also contains all other parameters that are estimated, including the stochastic process-parameters for the aggregate shocks. Each model parameter has a line of code. It starts with the parameter name as it is used in the code and a default value. The next two entries are its ascii name and its name for LaTeX output. The fourth entry is the prior if the parameter is to be estimated. Please see the [Distributions.jl](https://github.com/JuliaStats/Distributions.jl)-package for available options. The fifth entry is a Boolean whether the parameter should be estimated (`true`) or not (`false`).

The folder `Model` also contains the mapping of prices to household incomes (given the idiosyncratic state space). This can be found in the subfolder `IncomesETC`. Depending on the adjustments to the macroeconomic model, the user needs to adjust this mapping. Similarly, the subfolder contains definitions of utility functions, profit functions, employment demand etc. that are used in the calculation of the steady state equilibrium.
The folder `Model` also contains the mapping of prices to household incomes (given the idiosyncratic state space). This can be found in the subfolder `IncomesETC`. Depending on the adjustments to the macroeconomic model, the user needs to adjust this mapping from prices to incomes. Similarly, the subfolder contains definitions of utility functions, profit functions, employment demand, etc. that are used in the calculation of the steady state equilibrium.


### Steady state and first dimensionality reduction
Expand Down Expand Up @@ -75,14 +75,14 @@ lr_full = linearize_full_model(sr_full, m_par)
```
computes the linear dynamics of the "full" model, i.e., using the first-stage model reduction, around the steady state (in the background, this calls [`BASEforHANK.PerturbationSolution.LinearSolution()`](@ref)) and saves a state-space representation in the instance `lr_full` of the `struct` `LinearResults` (see [`linearize_full_model()`](@ref)).

Linearization of the full model takes a few seconds. The resulting state space is, because the copula and the value functions are treated fully flexible in this first step, relatively large. As a result, also computing the first-order dynamics of this model takes a few seconds as well.
Linearization of the full model takes a few seconds. The resulting state space is relatively large, because the copula and the value functions are treated fully flexible in this first step. As a result, also computing the first-order dynamics of this model takes a few seconds as well.

### Model reduction
This large state-space representation can, however, be reduced substantially using an approximate factor representation. For this purpose, run
```
sr_reduc = model_reduction(sr_full, lr_full, m_par)
```
which calculates the unconditional covariance matrix of all state and control variables and rewrites the coefficients of the value functions and the copula as linear combinations of some underlying factors. Only those factors that have eigenvalues above the precision predefined in `sr_full.n_par.compress_critC` and `sr_full.n_par.compress_critS` are retained.
which calculates the unconditional covariance matrix of all state and control variables and rewrites the coefficients of the value functions and the copula as linear combinations of some underlying factors. Only those factors that have eigenvalues above the precision predefined in `sr_full.n_par.compress_critC` (controls, i.e., marginal value functions) and `sr_full.n_par.compress_critS` (states, i.e., the copula) are retained.
!!! warning
After model reduction, `sr_reduc.indexes_r` contains the indexes that map correctly into the states/controls used in `LOMstate` and `State2Control`.

Expand All @@ -109,9 +109,9 @@ computes the mode of the likelihood, i.e., the parameter vector that maximizes t

Lastly,
```
montecarlo(sr_reduc, lr_reduc, er_mode, m_par)
sample_posterior(sr_reduc, lr_reduc, er_mode, m_par)
```
uses a Monte Carlo Markov Chain method to trace out the posterior probabilites of the estimated parameters.
uses a Markov Chain Monte Carlo method to trace out the posterior probabilites of the estimated parameters.
The final estimates (and further results) are saved in a file with the name given by the field `save_posterior_file`
in the `struct` `EstimationSettings` (instantiated in `e_set`).

Expand Down
6 changes: 3 additions & 3 deletions src/BASEforHANK.jl
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ export compute_steadystate,
model_reduction,
update_model,
find_mode,
montecarlo,
sample_posterior,
compute_irfs_vardecomp,
plot_irfs,
compute_hist_decomp,
Expand Down Expand Up @@ -265,7 +265,7 @@ end


@doc raw"""
montecarlo(mr,er;file=e_set.save_posterior_file)
mcmc_estimation(mr,er;file=e_set.save_posterior_file)
Sample posterior of parameter vector with [`rwmh()`](@ref), take sample mean as
parameter estimate, and save all results in `file`.
Expand All @@ -275,7 +275,7 @@ parameter estimate, and save all results in `file`.
- `mr::LinearResults`
- `er::EstimResults`
"""
function montecarlo(
function sample_posterior(
sr::SteadyResults,
lr::LinearResults,
er::EstimResults,
Expand Down
Binary file modified src/Output/Saves/HANK_chain.jld2
Binary file not shown.
Binary file modified src/Output/Saves/HANK_mode.jld2
Binary file not shown.
3 changes: 2 additions & 1 deletion src/SubModules/Estimation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ using LinearAlgebra,
DataFrames,
CSV,
FieldMetadata,
Parameters
Parameters,
FiniteDiff

using Parameters: @unpack
using MatrixEquations: lyapd
Expand Down
3 changes: 1 addition & 2 deletions src/SubModules/Estimation/mode_finding.jl
Original file line number Diff line number Diff line change
Expand Up @@ -212,8 +212,7 @@ function mode_finding(sr, lr, m_par, e_set, par_start)
if sr.n_par.verbose
println("Computing Hessian. This might take a while...")
end
func = TwiceDifferentiable(pp -> LL_final(pp), par_final)
hessian_final = Optim.hessian!(func, par_final)
hessian_final = FiniteDiff.finite_difference_hessian(LL, par_final, relstep = 0.001)
else
if sr.n_par.verbose
println("Assuming Hessian is I...")
Expand Down
8 changes: 6 additions & 2 deletions src/SubModules/PerturbationSolution/SolveDiffEq.jl
Original file line number Diff line number Diff line change
Expand Up @@ -187,9 +187,13 @@ function SolveDiffEq(
if n_par.sol_algo == :schur || lit_fail # schur decomposition
alarm_LinearSolution = false
Schur_decomp, slt, nk, λ = try
real_schur(A, -B) # first output is generalized Schur factorization
real_schur(A, -B) # first output is generalized Schur factorization
catch # in rare cases the schur decomposition fails numerically => treat as no solution
(0, 0, 0, 0)
try
complex_schur(A, -B)
catch
(0, 0, 0, 0)
end
end
# Check for determinacy and existence of solution
if n_par.nstates_r != nk
Expand Down
2 changes: 1 addition & 1 deletion src/SubModules/Tools.jl
Original file line number Diff line number Diff line change
Expand Up @@ -61,5 +61,5 @@ include("Tools/Schur_and_DualUtils.jl")
include("Tools/Pdf2cdf.jl")
include("Tools/Broyden.jl")
include("Tools/CentralDerivatives.jl")
include("Tools/distrSummaries.jl")
include("Tools/DistrSummaries.jl")
end # module Tools
File renamed without changes.
9 changes: 9 additions & 0 deletions src/SubModules/Tools/Schur_and_DualUtils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,15 @@ function real_schur(A, B)
return F, select_ev, nk, λ
end

function complex_schur(A, B)
F = LinearAlgebra.schur(complex(A), complex(B))
α::Vector{complex(promote_type(eltype(A), eltype(B)))} = F.alpha
λ = abs.(α) ./ abs.(F.beta)
select_ev = λ .>= 1.0
# select_ev = abs.(λ) .>= 1.0
nk = sum(select_ev) # Number of state Variables based on Eigenvalues
return F, select_ev, nk, λ
end

function tot_dual(x::ForwardDiff.Dual)
a = sum(ForwardDiff.partials(x, :))
Expand Down
2 changes: 1 addition & 1 deletion src/script.jl
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ if e_set.estimate_model == true
accept_rate,
par_final,
hessian_sym,
smoother_output = montecarlo(sr_mode, lr_mode, er_mode, m_par_mode)
smoother_output = sample_posterior(sr_mode, lr_mode, er_mode, m_par_mode)

# Only relevant output for later plotting will be saved.
# If you want all smoother output including the variance estimates
Expand Down

0 comments on commit 4196835

Please sign in to comment.