Coverage for pygeodyn/compute_ER.py: 0%
81 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-12-22 13:43 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-12-22 13:43 +0000
1# load required modules
3import os
4import time
5import h5py
6import logging
7import numpy as np
8from mpi4py import MPI
9import geodyn_fortran as fortran
10from pygeodyn.common import compute_legendre_polys
12"""
13Compute ER from U and SV. Writes the output file ".npy" file in the prior_dir.
14"""
16#directory of the script
17dir = os.path.dirname(os.path.abspath(__file__))
19#computation parameters
20Lu = 18
21Lb = 13
22Lsv = 13
23#prior_dir = dir + "/data/priors/0path" #coupled_earth
24#prior_type = "0path" #coupled_earth
25#prior_dir = dir + "/data/priors/50path" #midpath
26#prior_type = "50path" #midpath
27prior_dir = dir + "/data/priors/71path" #71path
28prior_type = "71path" #71path
29#prior_dir = dir + "/data/priors/100path" #100path
30#prior_type = "100path" #100path
32def compute_ER(Lu, Lb, Lsv, prior_dir, prior_type):
33 """
34 Compute ER for all prior times and save it in prior_dir
36 :param Lu: Maximal spherical harmonic degree of the core flow
37 :type Lu: int
38 :param Lb: Maximal spherical harmonic degree of the magnetic field
39 :type Lb: int
40 :param Lsv: Maximal spherical harmonic degree of the secular variation
41 :type Lsv: int
42 :param prior_dir: Directory containing the prior data
43 :type prior_dir: str
44 :param prior_type: Prior type
45 :type prior_type: str
46 """
48 # to improve performance in parrallel
49 os.environ["MKL_NUM_THREADS"] = "1"
50 os.environ["NUMEXPR_NUM_THREADS"] = "1"
51 os.environ["OMP_NUM_THREADS"] = "1"
53 # Init MPI
54 if not MPI.Is_initialized():
55 MPI.Init()
56 comm = MPI.COMM_WORLD
57 nb_proc = comm.Get_size()
58 rank = comm.Get_rank()
60 def first_process():
61 return rank == 0
63 # Log
64 logging_level = logging.INFO
65 log_format = '%(asctime)s - Process {} - %(levelname)s - %(message)s'.format(rank)
66 logging.basicConfig(format=log_format, level=logging_level)
68 if first_process:
69 begin_time = time.time()
71 Nu = Lu*(Lu+2)
72 Nu2 = 2*Nu
73 Nsv = Lsv*(Lsv+2)
74 Nb = Lb*(Lb+2)
75 tmax = 64
77 #read Prior T U MF SV (can be adapted to the user's prior data)
78 if prior_type == "71path" or prior_type == "100path" or prior_type == "midpath" or prior_type == "S1dynamo":
79 with h5py.File(prior_dir + "/Real.hdf5", "r") as hf:
80 T = np.array(hf["times"])
81 U = np.array(hf["U"])
82 MF = np.array(hf["MF"])
83 SV = np.gradient(MF,T,axis=0)
85 #select max degree
86 tnm = U[:, :int(np.shape(U)[1]/2)]
87 snm = U[:, int(np.shape(U)[1]/2):]
88 U = np.concatenate([tnm[:, :Nu],snm[:, :Nu]],axis=1)
89 MF = MF[:, :Nb]
90 SV = SV[:, :Nsv]
92 #number of times
93 N_times = T.shape[0]
95 #compute legendre polynomes
96 legendre_polys = compute_legendre_polys(tmax, Lb, Lu, Lsv)
98 gauss_th = np.asfortranarray(legendre_polys['thetas'])
99 gauss_weights = np.asfortranarray(legendre_polys['weights'])
101 lpsv = np.asfortranarray(legendre_polys['SV'][0])
102 dlpsv = np.asfortranarray(legendre_polys['SV'][1])
103 d2lpsv = np.asfortranarray(legendre_polys['SV'][2])
104 LLsv = d2lpsv.shape[0]
106 lpu = np.asfortranarray(legendre_polys['U'][0])
107 dlpu = np.asfortranarray(legendre_polys['U'][1])
108 d2lpu = np.asfortranarray(legendre_polys['U'][2])
109 LLu = d2lpu.shape[0]
111 lpb = np.asfortranarray(legendre_polys['MF'][0])
112 dlpb = np.asfortranarray(legendre_polys['MF'][1])
113 d2lpb = np.asfortranarray(legendre_polys['MF'][2])
114 LLb = d2lpb.shape[0]
116 # Build the attribution of times if several processes in parallel
117 attributed_times = range(rank, N_times, nb_proc)
119 ER = np.zeros((N_times,Nsv))
121 for i in attributed_times:
122 logging.info("Computing ER at time index " + str(i) + " out of " + str(N_times))
123 # Function radmats computes Ar(b)^T for the radial induction equation
124 # dBr/dt = Ar(b)*U
125 ArbT = np.zeros((Nu2, Nsv), order='F')
126 fortran.integral.radmats(gauss_th, gauss_weights, lpsv, dlpsv, d2lpsv,
127 lpu, dlpu, d2lpu, lpb, dlpb, d2lpb,
128 ArbT, MF[i,:],
129 Lsv, Lu, Lb,
130 Nsv, Nu2 // 2, Nb,
131 LLsv, LLu, LLb, tmax)
133 Arb = np.transpose(ArbT)
135 # SV from unresolved field --> unresolved SV = total SV - resolved SV
136 ER[i, :] = SV[i, :] - np.dot(Arb,U[i, :])
138 # Recover all errors by summing all arrays
139 # Note that Allreduce updates the array of each process with the result so no need to broadcast afterwards.
140 logging.info("Waiting all process to finish computing ER")
141 comm.Allreduce(MPI.IN_PLACE, ER, op=MPI.SUM)
142 logging.info("Sync done !".format(rank))
144 if first_process:
145 # Save ER in prior_dir_shear
146 np.save(prior_dir + "/ER_Lu_{}_Lb_{}_Lsv_{}.npy".format(Lu,Lb,Lsv), ER)
148 end_time = time.time()
149 elapsed_time = end_time - begin_time
150 logging.info("Elapsed time : {:.2f}".format(elapsed_time))
152if __name__ == '__main__':
153 compute_ER(Lu, Lb, Lsv, prior_dir, prior_type)