Coverage for webgeodyn/inout/s1fs2.py: 0%
88 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-12-18 09:33 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-12-18 09:33 +0000
1import numpy as np
2from webgeodyn.processing import spectra
3from webgeodyn.filters import time
4from webgeodyn.constants import rE, rC
5from webgeodyn.models import Models, Model
8def load(dataDirectory, dataModel, keepRealisations=False):
9 """ Loading function for s1fs2 data. Also adds the data to the dataModel.
11 :param dataDirectory: directory where the data is located
12 :type dataDirectory: str (path)
13 :param dataModel: Model to which the data should be added
14 :type dataModel: Model
15 :param keepRealisations: indicating if realisations should be kept or averaged (not used)
16 :type keepRealisations: bool (default: False)
17 :return: 0 if everything went well, -1 otherwise
18 :rtype: int
19 """
20 # load times
21 # times = np.loadtxt(os.path.join(dataDirectory,"time.S1fs2"), skiprows=1)
22 datafile_time = dataDirectory[0:10] + "time.S1fs2"
23 times = np.loadtxt(datafile_time, skiprows=1)
25 # Reading MF @ CMB in NS units
26 # gnmC = np.loadtxt(os.path.join(dataDirectory,"gnm_1.53846.S1fs2"), skiprows=1)
27 gnmC = np.loadtxt(dataDirectory, skiprows=1)
28 print("!! unscaled data !!")
30 # detect lmax
31 lmax = (-2 + np.sqrt(4 + 4 * gnmC.shape[1])) / 2
32 if int(lmax) != lmax:
33 raise ValueError("Data length %i does not correspond to lmax*(lmax+2)" % gnmC.shape[1])
34 else:
35 lmax = int(lmax)
37 print('lmax NS:', lmax)
38 print("from core surface to Earth's surface... ")
40 fac_c2a = np.zeros((lmax))
41 for j in range(lmax):
42 l = j + 1
43 fac_c2a[j] = (rC / rE) ** (2 * l + 4)
45 # gnm: from earth's surface to core surface
46 n = lmax * (lmax + 2)
47 nt = times.shape[0]
48 gnmE = np.zeros((nt, n))
50 k = 0
51 for j in range(lmax):
52 l = j + 1
53 for m in range(2 * l + 1):
54 gnmE[:, k] = gnmC[:, k] * np.sqrt(fac_c2a[j])
55 k = k + 1
57 '''
58 print(type(gnmE))
59 print(type(times))
60 print(gnmE.flags['C_CONTIGUOUS'])
61 print(times.flags['C_CONTIGUOUS'])
62 '''
63 dataModel.addMeasure("MF", "MF", lmax, "nT", gnmE, times=times)
65 # Reading U @ CMB in NS units
66 # data = np.loadtxt(os.path.join(dataDirectory,"unm_1.53846.S1fs2"), skiprows=1)
67 datafile_unm = dataDirectory[0:10] + "unm" + dataDirectory[13::]
68 data = np.loadtxt(datafile_unm, skiprows=1)
69 lmax = (-2 + np.sqrt(4 + 4 * (data.shape[1]) / 2)) / 2
70 if int(lmax) != lmax:
71 raise ValueError("Data length %i does not correspond to 2*lmax*(lmax+2)" % data.shape[1])
72 else:
73 lmax = int(lmax)
74 dataModel.addMeasure("U", "U", lmax, "km/yr", data, times=times)
76 return 0
79def num2dim_S1fs2(dataModel_adim):
80 print("scale num. data...")
81 ####################################################################""
82 # 1) dimensionalize time following Lhuillier et al, 2011
83 # 2) dimensionalize mag field by fitting MF spectrum to that of chaos
84 # 3) dimensionalize flow by using t_dim and rC/rC_adim
86 rC_adim = 1.53846154
88 ####################################################################""
89 # 1)
90 # ... calculate MF power spectrum
92 MFarray = dataModel_adim.measures['MF'].data.T
93 lmax_adim = dataModel_adim.measures['MF'].lmax
94 nt = dataModel_adim.measures['MF'].data.shape[0]
95 S_MF = spectra.spatial_spectrum_mag(MFarray, nt, lmax_adim)
96 Sm_MF = np.mean(S_MF, axis=1)
97 t_MF = dataModel_adim.times
98 dt_adim = dataModel_adim.times[1] - dataModel_adim.times[0]
100 # ... calculate SV power spectrum
101 [SVarray, t_SV] = time.deriv_series(MFarray, t_MF)
102 S_SV = spectra.spatial_spectrum_mag(SVarray, nt - 2, lmax_adim)
103 Sm_SV = np.mean(S_SV, axis=1)
105 # ... calculate tau_SV
106 tau_SV = np.sqrt(Sm_MF / Sm_SV)
107 LL = np.linspace(1, lmax_adim, lmax_adim, dtype=int)
109 # ... fit as 1/l for l in [2-13]
110 LLin = np.linspace(2, 13, 12, dtype=int)
111 XXin = np.log10(LLin)
112 YYin = np.log10(tau_SV[LLin - 1])
113 ZZin = XXin + YYin # sppose YY=-XX+ZZ
114 tau_SV0 = 10 ** (np.mean(ZZin))
116 fac_T = 415 / tau_SV0
117 print('T scaling factor', fac_T)
119 dt_dim = dt_adim * fac_T; # 415 yrs from Lhuillier et al 2011
120 t_MF_dim = t_MF * fac_T;
121 t_MF_dim = t_MF_dim - t_MF_dim[0];
123 print("... time scaled following Lhuillier et al (2011).")
125 ####################################################################""
126 # 2)
127 var_dim = np.loadtxt("/home/gilletn/recherche/inversion/geomag_mfsv/covariances/var_mfsv_ref_chaos3.txt")
128 varMF_dim = var_dim[:, 0]
129 lm_chaos = varMF_dim.shape[0]
130 LL = np.linspace(1, lm_chaos, lm_chaos, dtype=int)
131 fac = (LL + 1) * (2 * LL + 1);
132 specMF_dim = varMF_dim * fac;
134 LLin = np.linspace(2, 13, 12, dtype=int)
136 '''
137 plt.plot(np.log10(specMF_dim[LL-1]))
138 plt.plot(np.log10(Sm_MF[LL-1]))
139 plt.show()
140 '''
142 ZZ = np.log10(specMF_dim[LL - 1]) - np.log10(Sm_MF[LL - 1])
143 pow_ratio = 10 ** (np.mean(ZZ[LLin - 1]))
145 fac_B = np.sqrt(pow_ratio)
146 print('B scaling factor', fac_B)
148 MFarray_dim = MFarray * fac_B # in nT !!
149 # [SVarray_dim, t_SV_dim] = time.deriv_series(MFarray_dim,t_MF_dim) # in nT/yr !!
150 print("... mag field scaled by fitting num. MF spectra to CHAOS.")
152 #########################################################
153 # 3)
154 Uarray = dataModel_adim.measures['U'].data.T
155 lu_adim = dataModel_adim.measures['U'].lmax
157 fac_L = (rC / rC_adim)
158 print('L scaling factor', fac_L)
160 U_dim = Uarray * fac_L / fac_T # in km/yr !!
162 #########################################################
164 dataModel_dim = Model()
165 dataModel_dim.addMeasure(measureName='MF', measureType='MF', lmax=lmax_adim, units='nT', data=MFarray_dim.T,
166 rmsdata=None, times=t_MF_dim)
167 dataModel_dim.addMeasure(measureName='U', measureType='U', lmax=lmax_adim, units='km/yr', data=U_dim.T,
168 rmsdata=None, times=t_MF_dim)
170 return dataModel_dim, fac_T, fac_B, fac_L