From d00b93920e6c7cdfe15904a2b866728fd64e2c71 Mon Sep 17 00:00:00 2001 From: Humphrey Yang Date: Mon, 22 Jul 2024 10:20:25 +1000 Subject: [PATCH 1/2] fix a minor tag --- lectures/calvo_machine_learn.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lectures/calvo_machine_learn.md b/lectures/calvo_machine_learn.md index ade356d8..7dd74a8b 100644 --- a/lectures/calvo_machine_learn.md +++ b/lectures/calvo_machine_learn.md @@ -414,7 +414,7 @@ First, because we'll want to compare the results we obtain here with those obtai We hide the cell that copies the class, but readers can find details of the class in this quantecon lecture {doc}`calvo`. ```{code-cell} ipython3 -:tags: [hide-output] +:tags: [hide-input] class ChangLQ: """ From ebeea1ea8ff8912bf9811c15b4c736e03e3fc39a Mon Sep 17 00:00:00 2001 From: Humphrey Yang Date: Mon, 22 Jul 2024 10:27:18 +1000 Subject: [PATCH 2/2] remove redundent parameters --- lectures/calvo_machine_learn.md | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/lectures/calvo_machine_learn.md b/lectures/calvo_machine_learn.md index 7dd74a8b..fe73c60a 100644 --- a/lectures/calvo_machine_learn.md +++ b/lectures/calvo_machine_learn.md @@ -558,14 +558,20 @@ def compute_θ(μ, α=1): θ = jnp.append(θ, μbar) return θ + +@jit +def compute_hs(u0, u1, u2, α): + h0 = u0 + h1 = -u1 * α + h2 = -0.5 * u2 * α**2 + + return h0, h1, h2 @jit def compute_V(μ, β, c, α=1, u0=1, u1=0.5, u2=3): θ = compute_θ(μ, α) - h0 = u0 - h1 = -u1 * α - h2 = -0.5 * u2 * α**2 + h0, h1, h2 = compute_hs(u0, u1, u2, α) T = len(μ) - 1 t = np.arange(T) @@ -890,9 +896,7 @@ With the more structured approach, we can update our gradient descent exercise w def compute_J(μ, β, c, α=1, u0=1, u1=0.5, u2=3): T = len(μ) - 1 - h0 = u0 - h1 = -u1 * α - h2 = -0.5 * u2 * α**2 + h0, h1, h2 = compute_hs(u0, u1, u2, α) λ = α / (1 + α) _, B = construct_B(α, T+1) @@ -944,9 +948,7 @@ We can also derive a closed-form solution for $\vec \mu$ ```{code-cell} ipython3 def compute_μ(β, c, T, α=1, u0=1, u1=0.5, u2=3): - h0 = u0 - h1 = -u1 * α - h2 = -0.5 * u2 * α**2 + h0, h1, h2 = compute_hs(u0, u1, u2, α) _, B = construct_B(α, T+1) @@ -981,9 +983,7 @@ We can check the gradient of the analytical solution against the `JAX` computed def compute_grad(μ, β, c, α=1, u0=1, u1=0.5, u2=3): T = len(μ) - 1 - h0 = u0 - h1 = -u1 * α - h2 = -0.5 * u2 * α**2 + h0, h1, h2 = compute_hs(u0, u1, u2, α) _, B = construct_B(α, T+1)