Coverage for webgeodyn/inout/s1fs.py: 0%
86 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-12-18 09:33 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-12-18 09:33 +0000
1import numpy as np
2from webgeodyn.processing import spectra
3from webgeodyn.filters import time
4from webgeodyn.constants import rE, rC
5from webgeodyn.models import Models, Model
8def load(dataDirectory, dataModel, keepRealisations=False):
9 """ Loading function for s1fs data. Also adds the data to the dataModel.
11 :param dataDirectory: directory where the data is located
12 :type dataDirectory: str (path)
13 :param dataModel: Model to which the data should be added
14 :type dataModel: Model
15 :param keepRealisations: indicating if realisations should be kept or averaged (not used)
16 :type keepRealisations: bool (default: False)
17 :return: 0 if everything went well, -1 otherwise
18 :rtype: int
19 """
20 # load times
21 # times = np.loadtxt(os.path.join(dataDirectory,"time.S1fs2"), skiprows=1)
22 datafile_time = dataDirectory[0:15] + "time.S1fs_pm0.8"
23 times = np.loadtxt(datafile_time, skiprows=1)
25 # Reading MF @ CMB in NS units
26 # gnmC = np.loadtxt(os.path.join(dataDirectory,"gnm_1.53846.S1fs2"), skiprows=1)
27 gnmC = np.loadtxt(dataDirectory, skiprows=1)
28 print("!! unscaled data !!")
30 # detect lmax
31 lmax = (-2 + np.sqrt(4 + 4 * gnmC.shape[1])) / 2
32 if int(lmax) != lmax:
33 raise ValueError("Data length %i does not correspond to lmax*(lmax+2)" % gnmC.shape[1])
34 else:
35 lmax = int(lmax)
37 print('lmax NS:', lmax)
38 print("from core surface to Earth's surface... ")
40 fac_c2a = np.zeros((lmax))
41 for j in range(lmax):
42 l = j + 1
43 fac_c2a[j] = (rC / rE) ** (2 * l + 4)
45 # gnm: from earth's surface to core surface
46 n = lmax * (lmax + 2)
47 nt = times.shape[0]
48 gnmE = np.zeros((nt, n))
50 k = 0
51 for j in range(lmax):
52 l = j + 1
53 for m in range(2 * l + 1):
54 gnmE[:, k] = gnmC[:, k] * np.sqrt(fac_c2a[j])
55 k = k + 1
57 dataModel.addMeasure("MF", "MF", lmax, "nT", gnmE, times=times)
59 # Reading U @ CMB in NS units
60 # data = np.loadtxt(os.path.join(dataDirectory,"unm_1.53846.S1fs2"), skiprows=1)
61 datafile_unm = dataDirectory[0:15] + "unm" + dataDirectory[18::]
62 data = np.loadtxt(datafile_unm, skiprows=1)
63 lmax = (-2 + np.sqrt(4 + 4 * (data.shape[1]) / 2)) / 2
64 if int(lmax) != lmax:
65 raise ValueError("Data length %i does not correspond to 2*lmax*(lmax+2)" % data.shape[1])
66 else:
67 lmax = int(lmax)
68 dataModel.addMeasure("U", "U", lmax, "km/yr", data, times=times)
70 return 0
73def num2dim_S1fs(dataModel_adim):
74 print("scale num. data...")
75 ####################################################################""
76 # 1) dimensionalize time following Lhuillier et al, 2011
77 # 2) dimensionalize mag field by fitting MF spectrum to that of chaos
78 # 3) dimensionalize flow by using t_dim and rC/rC_adim
80 rC_adim = 1.53846154
82 ####################################################################""
83 # 1)
84 # ... calculate MF power spectrum
86 MFarray = dataModel_adim.measures['MF'].data.T
87 lmax_adim = dataModel_adim.measures['MF'].lmax
88 nt = dataModel_adim.measures['MF'].data.shape[0]
89 S_MF = spectra.spatial_spectrum_mag(MFarray, nt, lmax_adim)
90 Sm_MF = np.mean(S_MF, axis=1)
91 t_MF = dataModel_adim.times
92 dt_adim = dataModel_adim.times[1] - dataModel_adim.times[0]
94 # ... calculate SV power spectrum
95 [SVarray, t_SV] = time.deriv_series(MFarray, t_MF)
96 S_SV = spectra.spatial_spectrum_mag(SVarray, nt - 2, lmax_adim)
97 Sm_SV = np.mean(S_SV, axis=1)
99 # ... calculate tau_SV
100 tau_SV = np.sqrt(Sm_MF / Sm_SV)
101 LL = np.linspace(1, lmax_adim, lmax_adim, dtype=int)
103 # ... fit as 1/l for l in [2-13]
104 LLin = np.linspace(2, 13, 12, dtype=int)
105 XXin = np.log10(LLin)
106 YYin = np.log10(tau_SV[LLin - 1])
107 ZZin = XXin + YYin # sppose YY=-XX+ZZ
108 tau_SV0 = 10 ** (np.mean(ZZin))
110 fac_T = 415 / tau_SV0
111 print('T scaling factor', fac_T)
113 dt_dim = dt_adim * fac_T; # 415 yrs from Lhuillier et al 2011
114 t_MF_dim = t_MF * fac_T;
115 t_MF_dim = t_MF_dim - t_MF_dim[0];
117 print("... time scaled following Lhuillier et al (2011).")
119 ####################################################################""
120 # 2)
121 var_dim = np.loadtxt("/home/gilletn/recherche/inversion/geomag_mfsv/covariances/var_mfsv_ref_chaos3.txt")
122 varMF_dim = var_dim[:, 0]
123 lm_chaos = varMF_dim.shape[0]
124 LL = np.linspace(1, lm_chaos, lm_chaos, dtype=int)
125 fac = (LL + 1) * (2 * LL + 1);
126 specMF_dim = varMF_dim * fac;
128 LLin = np.linspace(2, 13, 12, dtype=int)
130 ZZ = np.log10(specMF_dim[LL - 1]) - np.log10(Sm_MF[LL - 1])
131 pow_ratio = 10 ** (np.mean(ZZ[LLin - 1]))
133 fac_B = np.sqrt(pow_ratio)
134 print('B scaling factor', fac_B)
136 MFarray_dim = MFarray * fac_B # in nT !!
137 # [SVarray_dim, t_SV_dim] = time.deriv_series(MFarray_dim,t_MF_dim) # in nT/yr !!
138 print("... mag field scaled by fitting num. MF spectra to CHAOS.")
140 #########################################################
141 # 3)
142 Uarray = dataModel_adim.measures['U'].data.T
143 lu_adim = dataModel_adim.measures['U'].lmax
145 fac_L = (rC / rC_adim)
146 print('L scaling factor', fac_L)
148 U_dim = Uarray * fac_L / fac_T # in km/yr !!
150 #########################################################
152 dataModel_dim = Model()
153 dataModel_dim.addMeasure(measureName='MF', measureType='MF', lmax=lmax_adim, units='nT', data=MFarray_dim.T,
154 rmsdata=None, times=t_MF_dim)
155 dataModel_dim.addMeasure(measureName='U', measureType='U', lmax=lmax_adim, units='km/yr', data=U_dim.T,
156 rmsdata=None, times=t_MF_dim)
158 return dataModel_dim, fac_T, fac_B, fac_L