dapper.mods.Lorenz96.todter2015

Concerns figure 4 of bib.todter2015second.

 1"""Concerns figure 4 of `bib.todter2015second`."""
 2
 3import numpy as np
 4
 5import dapper.mods as modelling
 6import dapper.tools.randvars as RVs
 7from dapper.mods.Lorenz96 import step
 8from dapper.tools.localization import nd_Id_localization
 9
10t = modelling.Chronology(0.05, dko=2, T=4**5, BurnIn=20)
11
12Nx = 80
13Dyn = {
14    "M": Nx,
15    "model": step,
16    "noise": 0,
17}
18
19X0 = modelling.GaussRV(M=Nx, C=0.001)
20
21jj = np.arange(0, Nx, 2)
22Obs = modelling.partial_Id_Obs(Nx, jj)
23Obs["localizer"] = nd_Id_localization((Nx,), (1,), jj)
24# Obs['noise'] = RVs.LaplaceRV(C=1,M=len(jj))
25Obs["noise"] = RVs.LaplaceParallelRV(C=1, M=len(jj))
26
27HMM = modelling.HiddenMarkovModel(Dyn, Obs, t, X0)
28
29####################
30# Suggested tuning
31####################
32
33#                                                          rmse.a
34# xps += LETKF(N=20,rot=True,infl=1.04       ,loc_rad=5) # 0.44
35# xps += LETKF(N=40,rot=True,infl=1.04       ,loc_rad=5) # 0.44
36# xps += LETKF(N=80,rot=True,infl=1.04       ,loc_rad=5) # 0.43
37# These scores are quite variable:
38# xps += LNETF(N=40,rot=True,infl=1.10,Rs=2.5,loc_rad=5) # 0.57
39# xps += LNETF(N=80,rot=True,infl=1.10,Rs=1.6,loc_rad=5) # 0.45
40
41# In addition to standard post-analysis inflation,
42# we also find the necessity of tuning the inflation (Rs) for R,
43# which is not mentioned in the reference.
44
45# In summary, compared to the reference paper:
46# - Our rmse scores for the local ETKF are better
47# - Our rmse scores for the local NETF
48#   - with N=80 seems to be equal
49#   - with N=40 is worse
50#
51# These results (contradict and) pretty much void the merit of the NETF,
52# especially considering how the Laplace obs-error favours the NETF.
53# A possible explanation is that they use T=100 (unit-less),
54# which is way too short, and so they might have gotten "lucky".
55# Another explanation is that our implementation is buggy,
56# but this seems unlikely, especially since we reproduce the NETF
57# results from Wiljes'2017.
t = <Chronology> - K: 20480 - Ko: 10239 - T: 1024.0 - BurnIn: 20 - dto: 0.1 - dt: 0.05
Nx = 80
Dyn = {'M': 80, 'model': <function step>, 'noise': 0}
X0 = GaussRV({ 'C': <CovMat> M: 80 kind: 'diag' trunc: 1.0 rk: 80 full: (only computing/printing corners) [[0.001 0. 0. ... 0. 0. 0. ] [0. 0.001 0. ... 0. 0. 0. ] [0. 0. 0.001 ... 0. 0. 0. ] ... [0. 0. 0. ... 0.001 0. 0. ] [0. 0. 0. ... 0. 0.001 0. ] [0. 0. 0. ... 0. 0. 0.001]] diag: [0.001 0.001 0.001 ... 0.001 0.001 0.001], 'mu': array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]), 'M': 80 })
jj = array([ 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78])
Obs = {'M': 40, 'model': Direct obs. at [ 0 2 4 6 8 10 12 14 16 18 20 22 24 26 28 30 32 34 36 38 40 42 44 46 48 50 52 54 56 58 60 62 64 66 68 70 72 74 76 78] <function partial_Id_Obs.<locals>.model>, 'linear': Constant matrix [[1. 0. 0. ... 0. 0. 0.] [0. 0. 1. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 1. 0.]] <function partial_Id_Obs.<locals>.linear>, 'localizer': <function localization_setup.<locals>.localization_now>, 'noise': LaplaceParallelRV({ 'C': <CovMat> M: 40 kind: 'diag' trunc: 1.0 rk: 40 full: (only computing/printing corners) [[1. 0. 0. ... 0. 0. 0.] [0. 1. 0. ... 0. 0. 0.] [0. 0. 1. ... 0. 0. 0.] ... [0. 0. 0. ... 1. 0. 0.] [0. 0. 0. ... 0. 1. 0.] [0. 0. 0. ... 0. 0. 1.]] diag: [1. 1. 1. ... 1. 1. 1.], 'mu': array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]), 'M': 40 })}
HMM = HiddenMarkovModel({ 'Dyn': Operator({ 'M': 80, 'model': <function step>, 'noise': GaussRV({ 'mu': array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]), 'M': 80, 'C': 0 }) }), 'Obs': <TimeDependentOperator> CONSTANT operator sepcified by .Op1: Operator({ 'M': 40, 'model': Direct obs. at [ 0 2 4 6 8 10 12 14 16 18 20 22 24 26 28 30 32 34 36 38 40 42 44 46 48 50 52 54 56 58 60 62 64 66 68 70 72 74 76 78] <function partial_Id_Obs.<locals>.model>, 'noise': LaplaceParallelRV({ 'C': <CovMat> M: 40 kind: 'diag' trunc: 1.0 rk: 40 full: (only computing/printing corners) [[1. 0. 0. ... 0. 0. 0.] [0. 1. 0. ... 0. 0. 0.] [0. 0. 1. ... 0. 0. 0.] ... [0. 0. 0. ... 1. 0. 0.] [0. 0. 0. ... 0. 1. 0.] [0. 0. 0. ... 0. 0. 1.]] diag: [1. 1. 1. ... 1. 1. 1.], 'mu': array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]), 'M': 40 }), 'linear': Constant matrix [[1. 0. 0. ... 0. 0. 0.] [0. 0. 1. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 1. 0.]] <function partial_Id_Obs.<locals>.linear>, 'localizer': <function localization_setup.<locals>.localization_now at 0x7fa23fdb29e0> }), 'tseq': <Chronology> - K: 20480 - Ko: 10239 - T: 1024.0 - BurnIn: 20 - dto: 0.1 - dt: 0.05, 'X0': GaussRV({ 'C': <CovMat> M: 80 kind: 'diag' trunc: 1.0 rk: 80 full: (only computing/printing corners) [[0.001 0. 0. ... 0. 0. 0. ] [0. 0.001 0. ... 0. 0. 0. ] [0. 0. 0.001 ... 0. 0. 0. ] ... [0. 0. 0. ... 0.001 0. 0. ] [0. 0. 0. ... 0. 0.001 0. ] [0. 0. 0. ... 0. 0. 0.001]] diag: [0.001 0.001 0.001 ... 0.001 0.001 0.001], 'mu': array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]), 'M': 80 }), 'liveplotters': [], 'sectors': {}, 'name': 'Lorenz96/todter2015.py' })