Coverage for webgeodyn/models/model.py: 0%

293 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-12-18 09:33 +0000

1import numpy as np 

2import os 

3import json 

4import importlib 

5 

6from webgeodyn.data import GHData, TSData 

7from webgeodyn.data.normPnm import semiNormalisedSchmidt, noNorm 

8from webgeodyn.constants import rE, rC 

9 

10ACCELERATION = False 

11 

12 

13class Model: 

14 """ 

15 Class handling models that contain measures data 

16 """ 

17 

18 # CONSTRUCTING METHODS 

19 

20 def __init__(self, dataDirectory=None, dataFormat='default', nth=90, nph=180, normalisation=semiNormalisedSchmidt, 

21 keepRealisations=True, state_type='analysed', export=True): 

22 """ 

23 :param dataDirectory: directory where the data is located 

24 :type dataDirectory: str (path) 

25 :param dataFormat: format to use for loading the data 

26 :type dataFormat: str 

27 :param nth: size of the theta grid 

28 :type nth: int 

29 :param nph: size of the phi grid 

30 :type nph: int 

31 :param normalisation: normalisation to use for Legendre polynomials 

32 :type normalisation: function 

33 :param keepRealisations: indicating if realisations should be kept or averaged 

34 :type keepRealisations: bool 

35 :param export: whether the model should be available for download (export = True) or not (export = False) 

36 :type expot: bool 

37 """ 

38 self.nth = nth 

39 self.nph = nph 

40 self.normalisation = normalisation 

41 self.setGridSize(nth, nph) 

42 self.times = None 

43 

44 if normalisation is None: 

45 self.PnmNorm = noNorm 

46 else: 

47 self.PnmNorm = normalisation 

48 

49 self.dataDirectory = dataDirectory 

50 # Put the dataFormat to lower for consistency with future checks 

51 self.dataFormat = dataFormat.lower() 

52 self.color = "#000000" 

53 

54 # Measures that are under the form of Gauss coefficients (Spherical Harmonics) 

55 self.measures = {} 

56 self.measures_rms = {} 

57 # Measures NOT under the form of Gauss coefficients 

58 self.notSH_measures = {} 

59 

60 self.export = export 

61 if dataDirectory is not None: 

62 try: 

63 self.loadDataFromFile(keepRealisations, state_type=state_type) 

64 except KeyError as ke: 

65 # Old pygeodyn files have the analysis keyword instead of analysed, give it another try with the key analysis instead of analysed, or vice versa. 

66 if state_type == 'analysis': 

67 state_type = 'analysed' 

68 elif state_type == 'analysed': 

69 state_type = 'analysis' 

70 else: 

71 raise ke 

72 

73 self.loadDataFromFile(keepRealisations, state_type=state_type) 

74 

75 def copy(self): 

76 """ 

77 Returns a copy of current model. 

78 

79 :return: Copy of the model with same measures, normalisation and grid sizes. 

80 :rtype: Model 

81 """ 

82 returnedModel = Model(nth=self.nth, nph=self.nph, normalisation=self.normalisation) 

83 for measureName in self.measures: 

84 measure = self.measures[measureName] 

85 newcoefs = measure.data.copy() 

86 if self.hasRMSData(measureName): 

87 newcoefs_rms = self.measures_rms[measureName].data.copy() 

88 else: 

89 newcoefs_rms = None 

90 

91 returnedModel.addMeasure(measureName, measure.measureType, measure.lmax, measure.units, newcoefs, 

92 newcoefs_rms, times=self.times) 

93 return returnedModel 

94 

95 def hasRMSData(self, measureName): 

96 """ 

97 Checks if the measure exists and if RMS data exists for it. 

98 

99 :param measureName: name of the measure to probe 

100 :type measureName: str 

101 :return: a boolean indicating if RMS data exists for the measure 

102 :rtype: bool 

103 """ 

104 return (measureName in self.measures_rms) and (self.measures_rms[measureName].data is not None) 

105 

106 def toMeanRMS(self): 

107 """ 

108 Converts a Model with realisations to a Model contaning only Mean and RMS. 

109 

110 :return: Model with the same measures but under the form Mean/RMS 

111 :rtype: Model 

112 """ 

113 returnedModel = Model(nth=self.nth, nph=self.nph, normalisation=self.normalisation) 

114 for measureName in self.measures: 

115 measure = self.measures[measureName] 

116 newcoefs = np.mean(measure.data, axis=2) 

117 newcoefs_rms = np.std(measure.data, axis=2) 

118 returnedModel.addMeasure(measureName, measure.measureType, measure.lmax, measure.units, newcoefs, 

119 newcoefs_rms, times=self.times) 

120 

121 return returnedModel 

122 

123 def clearCache(self): 

124 """ 

125 Clears the cache of all measures 

126 """ 

127 for measureName in self.measures: 

128 self.measures[measureName].clearCache() 

129 

130 def setGridSize(self, nth, nph): 

131 """ 

132 Sets the grid theta ]0,pi[ (size: nth+1) and phi grid [0, 2pi] (size: nph). Called in __init__. 

133 

134 :param nth: size of the theta grid 

135 :type nth: int 

136 :param nph: size of the phi grid 

137 :type nph: int 

138 """ 

139 self.nth = nth 

140 self.nph = nph 

141 self.th_step = np.pi / (nth + 1) 

142 self.ph_step = 2 * np.pi / nph 

143 

144 self.th = np.arange(np.pi / (nth + 1), np.pi, self.th_step) 

145 self.ph = np.arange(0, 2 * np.pi, self.ph_step) 

146 

147 def addMeasure(self, measureName, measureType, lmax, units=None, data=None, rmsdata=None, times=None): 

148 """ 

149 Adds a new measure to the model. Also computes secondary measures ('DIVHU' and 'LOD') for 'U'. 

150 

151 :param measureName: Name of the measure 

152 :type measureName: str 

153 :param measureType: Type of the measure 

154 :type measureType: str 

155 :param lmax: Maximal degree 

156 :type lmax: int 

157 :param units: unit of the measure (default: None) 

158 :type units: str 

159 :param data: measure data (default: None, in this case, only creates a times array) 

160 :type data: np.array 

161 :param rmsdata: RMS data (optional: default to None) 

162 :type rmsdata: np.array 

163 :param times: times corresponding of the measure (default:None, in this case do nothing) 

164 :type times: np.array 

165 """ 

166 # change name measure 

167 if measureName == "EF": 

168 measureName = "EXT_DIPOLE" 

169 measureType = "EXT_DIPOLE" 

170 

171 if times is not None: 

172 if self.times is not None: 

173 if not np.allclose(self.times, times): 

174 raise ValueError("Time array must be all the same for a given modeldata") 

175 self.times = times 

176 if measureType == "U" or measureType == "S" or (measureType == "DIVHU" or measureType == "dU/dt"): 

177 if data is not None: 

178 self.measures[measureName] = TSData( 

179 data, 

180 int(lmax), 

181 units, 

182 measureType, 

183 self.PnmNorm, 

184 self.th, 

185 self.ph 

186 ) 

187 

188 # Compute secondary measures only for U 

189 if measureType == "U": 

190 # Compute the divH if not already done and if DivHU was not added 

191 if "DIVHU" not in self.measures: 

192 divh_data = self.computeDivH(self.measures[measureName]) 

193 

194 if divh_data.shape != data.shape: 

195 raise RuntimeError("Computation of DivHU went wrong for measure={}".format(measureName)) 

196 

197 # Once the data is built, add the measure DivHU to the model 

198 self.measures["DIVHU"] = TSData( 

199 divh_data, 

200 int(lmax), 

201 "yr-1", 

202 "DIVHU", 

203 self.PnmNorm, 

204 self.th, 

205 self.ph 

206 ) 

207 

208 if ACCELERATION: 

209 acc_data = self.computeAcceleration() 

210 self.measures["dU/dt"] = TSData( 

211 acc_data, 

212 int(lmax), 

213 "km/yr2", 

214 "dU/dt", 

215 self.PnmNorm, 

216 self.th, 

217 self.ph 

218 ) 

219 

220 # Compute the LOD if not already done nor added 

221 if "LOD" not in self.notSH_measures: 

222 lod_data = self.computeLOD() 

223 

224 # Once the data is built, add the measure LOD to the model 

225 self.notSH_measures["LOD"] = lod_data 

226 

227 if rmsdata is not None: 

228 self.measures_rms[measureName] = TSData( 

229 rmsdata, 

230 int(lmax), 

231 units, 

232 measureType, 

233 self.PnmNorm, 

234 self.th, 

235 self.ph, 

236 ) 

237 else: 

238 if data is not None: 

239 self.measures[measureName] = GHData( 

240 data, 

241 int(lmax), 

242 units, 

243 measureType, 

244 self.PnmNorm, 

245 self.th, 

246 self.ph 

247 ) 

248 if rmsdata is not None: 

249 self.measures_rms[measureName] = GHData( 

250 rmsdata, 

251 int(lmax), 

252 units, 

253 measureType, 

254 self.PnmNorm, 

255 self.th, 

256 self.ph 

257 ) 

258 return 

259 

260 def loadDataFromFile(self, keepRealisations, state_type='analysed'): 

261 """ 

262 Loads the data from files according to the dataDirectory and dataFormat of the model. Uses dedicated load functions of the inout module. 

263 

264 :param keepRealisations: indicating if realisations should be kept or averaged 

265 :type keepRealisations: bool 

266 """ 

267 self.measures = {} 

268 self.measures_rms = {} 

269 

270 # If no specified format, read "dataFormat" file in data directory 

271 if self.dataFormat is None: 

272 if os.path.exists(os.path.join(self.dataDirectory, "dataFormat")): 

273 with open(os.path.join(self.dataDirectory, "dataFormat"), 'r') as f: 

274 self.dataFormat = f.readline().rstrip() 

275 

276 # If specified data format (or dataFormat file), read via the built-in format from inout.py 

277 if self.dataFormat is not None: 

278 print("Reading model using predefined format", self.dataFormat) 

279 load_fct = importlib.import_module("webgeodyn.inout." + self.dataFormat).load 

280 try: 

281 load_fct(self.dataDirectory, self, keepRealisations, state_type=state_type) 

282 except TypeError: # state_type is a valid argument only for pygeodyn hdf5 simulations for the moment 

283 load_fct(self.dataDirectory, self, keepRealisations) 

284 

285 print('Finished loading model data with format', self.dataFormat) 

286 # Else raise an error 

287 else: 

288 raise ValueError("dataFormat is not defined.") 

289 

290 def saveDataToFile(self, dataDirectory, forceOverwrite=False, dataformat="default"): 

291 """ 

292 Saves the data of the model in a designated format. Uses dedicated save functions of the inout module. 

293 

294 :param dataDirectory: directory where to save the file 

295 :type dataDirectory: str (path) 

296 :param forceOverwrite: indicates if files of the same name should be overwritten 

297 :type forceOverwrite: bool 

298 :param dataformat: dataformat under which the data should be saved (default: "default") 

299 :type dataformat: str 

300 """ 

301 save_fct = importlib.import_module("webgeodyn.inout." + dataformat).save 

302 save_fct(dataDirectory, self, forceOverwrite) 

303 

304 # GETTERS CALLED BY AJAX REQUESTS 

305 

306 def getSpherHarmData(self, measureName, l, mlist, removemean, computeallrealisation=False, irealisation=-1): 

307 """ 

308 Extracts the spherical harmonic coefficients of a measure for asked degrees and orders. 

309 

310 :param measureName: name of the measure 

311 :type measureName: str 

312 :param l: degree for which to extract coefficients 

313 :type l: int 

314 :param mlist: orders for which to extract coefficients 

315 :type mlist: int or list of ints 

316 :param removemean: indicates if the temporal mean should be removed 

317 :type removemean: bool 

318 :param computeallrealisation: not used 

319 :type computeallrealisation: 

320 :param irealisation: not used 

321 :type irealisation: 

322 :return: JSON containing the times ('times') and coefficients of the measure for the asked degrees and orders ('data') 

323 :rtype: dict 

324 """ 

325 jsondata = {"times": self.times.tolist(), "data": {}, "unit": self.measures[measureName].units} 

326 

327 if mlist is None: 

328 mlist = np.arange(l + 1) # Returns all m for given l 

329 if isinstance(mlist, int): 

330 mlist = [mlist] # Return a single m 

331 

332 print("ASKING FOR l=", l, "m=", mlist) 

333 

334 for m in mlist: 

335 jsondata["data"][str(m)] = {} 

336 for coef in self.measures[measureName].coefs_list: 

337 k = self.measures[measureName].lm2k(int(l), int(m), coef) 

338 if k is None: 

339 continue # it has no k indice (e.g. h10) 

340 jsondata["data"][str(m)][coef] = {} 

341 

342 if self.measures[measureName].has_realisations: 

343 if removemean: 

344 jsondata["data"][str(m)][coef]["values"] = np.mean( 

345 (self.measures[measureName].data[:, k] - self.measures[measureName].coefs_mean[k]), 

346 axis=1).tolist() 

347 else: 

348 jsondata["data"][str(m)][coef]["values"] = np.mean(self.measures[measureName].data[:, k], 

349 axis=1).tolist() 

350 jsondata["data"][str(m)][coef]["rms"] = np.std(self.measures[measureName].data[:, k], 

351 axis=1).tolist() 

352 else: 

353 if removemean: 

354 jsondata["data"][str(m)][coef]["values"] = (self.measures[measureName].data[:, k] - self.measures[measureName].coefs_mean[k]).tolist() 

355 else: 

356 jsondata["data"][str(m)][coef]["values"] = self.measures[measureName].data[:, k].tolist() 

357 

358 if self.hasRMSData(measureName): 

359 jsondata["data"][str(m)][coef]["rms"] = np.hstack( 

360 self.measures_rms[measureName].data[:, k]).tolist() 

361 # Comment: hstack on rms is for returning an array containing [left,right,left,right,left,...] if each rms point is [left,right] 

362 

363 return jsondata 

364 

365 def getTimeSerieData(self, measureName, vectorComponent, cutvariable, cutvalue, removemean, coordtype, 

366 computeallrealisation=False, irealisation=-1): 

367 """ 

368 Computes the time evolution of a component (radial or angular) of a measure. 

369 

370 :param measureName: name of the measure 

371 :type measureName: str 

372 :param vectorComponent: vector component to compute 

373 :type vectorComponent: str 

374 :param cutvariable: variable along which the cut is made 

375 :type cutvariable: str ('phi' or 'theta') 

376 :param cutvalue: value of the cut angle 

377 :type cutvalue: float 

378 :param removemean: indicates if the temporal mean should be removed 

379 :type removemean: bool 

380 :param coordtype: type of the coordinate asked (angle or curvilinear (s_north or s_south)) 

381 :type coordtype: str 

382 :param computeallrealisation: boolean indicating if the computation should be on all realisations or on the mean (default=False) 

383 :type computeallrealisation: bool 

384 :param irealisation: index of the realisation to use (default=-1 and takes the mean instead) 

385 :type irealisation: int 

386 :return: JSON containing the times ('times'), the angles used for the computation ('angles') and the time series of the measure ('data') 

387 :rtype: dict 

388 """ 

389 

390 jsondata = {"times": self.times.tolist(), "data": {}} 

391 

392 def get_coords_list(input_th, coord_type): 

393 """ Get data for computation and display from coord_type (angle or curvilinear) """ 

394 if coord_type == "s_north": 

395 display_data = np.linspace(0, 1., len(input_th))[1:-1] 

396 angles_to_compute = np.arcsin(display_data) 

397 elif coord_type == "s_south": 

398 # Compute angles for s = ]-1,0[ to have the correct sampling... 

399 s = np.linspace(-1, 0., len(input_th))[1:-1] 

400 # ...but add np.pi to have the angle between ]pi/2,pi[ (South)... 

401 angles_to_compute = np.arcsin(s) + np.pi 

402 # Recompute y_data in consequence 

403 display_data = np.sin(angles_to_compute) 

404 # Flip both to have the same order as s_north 

405 angles_to_compute = np.flip(angles_to_compute) 

406 display_data = np.flip(display_data) 

407 else: 

408 display_data = (input_th * 180 / np.pi) 

409 angles_to_compute = input_th 

410 return angles_to_compute, display_data.tolist() 

411 

412 # Separate treatment for geostrophic component 

413 if vectorComponent == 'geos': 

414 th, jsondata['angles'] = get_coords_list(self.th, coordtype) 

415 data = self.measures[measureName].computeUGeostrophic(rC, th, computeallrealisation=computeallrealisation, irealisation=irealisation) 

416 

417 else: 

418 if cutvalue is None: 

419 raise TypeError('{} is not a valid number'.format(cutvalue)) 

420 cutvalue = cutvalue / 180 * np.pi 

421 if cutvariable == "phi": 

422 ph = [cutvalue] 

423 th, jsondata['angles'] = get_coords_list(self.th, coordtype) 

424 elif cutvariable == "theta": 

425 th = [cutvalue] 

426 ph = self.ph 

427 jsondata["angles"] = (ph * 180 / np.pi).tolist() 

428 else: 

429 print("ERROR, cutvariable should be theta or phi") 

430 return jsondata 

431 

432 data = self.measures[measureName].computeRThetaPhiData(rC, th, ph, [vectorComponent], 

433 computeallrealisation=computeallrealisation, 

434 irealisation=irealisation)[vectorComponent] 

435 

436 data = data.reshape((data.shape[0], data.shape[1] * data.shape[2])) 

437 

438 # Common operations 

439 if removemean: 

440 data = data - np.mean(data, axis=0) 

441 

442 jsondata["unit"] = self.measures[measureName].units 

443 jsondata["data"] = data.tolist() 

444 

445 return jsondata 

446 

447 def getObservatoryData(self, measureName, r, strth, strph): 

448 """ 

449 Computes all components of a measure to compare it with observatory data. 

450 

451 :param measureName: name of the measure 

452 :type measureName: str 

453 :param r: radius where to evaluate the measure components 

454 :type r: float 

455 :param strth: theta where to evaluate the measure components (under string format, in degrees) 

456 :type strth: str 

457 :param strph: phi where to evaluate the measure components (under string format, in degrees) 

458 :type strph: str 

459 :return: JSON containing the times ('times'), the parameters used for the computation ('r', 'th', 'ph), the measure parameters ('unit', 'measuretype') and the computed components ('mean' and 'rms' if present) 

460 :rtype: dict 

461 """ 

462 jsondata = {} 

463 

464 # Or compute it from this data 

465 jsondata["mean"] = {"times": self.times.tolist()} 

466 if self.measures[measureName].has_realisations: 

467 jsondata["rms"] = {"times": self.times.tolist()} 

468 data = self.measures[measureName].computeRThetaPhiData(float(r), [float(strth) / 180 * np.pi], 

469 [float(strph) / 180 * np.pi], ["r", "th", "ph"], 

470 computeallrealisation=True) 

471 else: 

472 data = self.measures[measureName].computeRThetaPhiData(float(r), [float(strth) / 180 * np.pi], 

473 [float(strph) / 180 * np.pi], ["r", "th", "ph"]) 

474 

475 for component in data: 

476 if self.measures[measureName].has_realisations: 

477 jsondata["mean"][component] = np.mean(data[component], axis=3).flatten().tolist() 

478 jsondata["rms"][component] = np.std(data[component], axis=3).flatten().tolist() 

479 else: 

480 jsondata["mean"][component] = data[component].flatten().tolist() 

481 

482 jsondata["r"] = float(r) 

483 jsondata["th"] = float(strth) 

484 jsondata["ph"] = float(strph) 

485 jsondata["unit"] = self.measures[measureName].units 

486 jsondata["measuretype"] = self.measures[measureName].measureType 

487 return jsondata 

488 

489 def getDataInfo(self): 

490 """ 

491 Gets info on the model (measures, times and color). 

492 

493 :return: JSON containing the measures type and lmax ('measures'), the times ('times') and the color ('color') of the model. 

494 :rtype: dict 

495 """ 

496 datainfo = { 

497 "measures": 

498 {measureName: { 

499 "type": self.measures[measureName].measureType, 

500 "lmax": self.measures[measureName].lmax, 

501 } for measureName in self.measures}, 

502 "notSH_measures": 

503 {measureName: { 

504 "type": measureName 

505 } for measureName in self.notSH_measures}, 

506 "times": self.times.tolist(), 

507 "color": self.color, 

508 } 

509 

510 return datainfo 

511 

512 def getFileInfo(self): 

513 """ 

514 Gets the info on the location and the format of the files used to load the model. 

515 

516 :return: JSON containing the data directory ('directory'), the format of the files ('format') and if the measures have rms ('rms'). 

517 :rtype: dict 

518 """ 

519 fileinfo = { 

520 "directory": self.dataDirectory, 

521 "format": self.dataFormat, 

522 "rms": {measureName: self.hasRMSData(measureName) for measureName in self.measures} 

523 } 

524 

525 return fileinfo 

526 

527 def canGetGlobeData(self, measureName, vectorComponent, r=rC): 

528 """ 

529 Gets the component to compute to compute vectorComponent of a measure. 

530 

531 :param measureName: name of the measure 

532 :type measureName: str 

533 :param vectorComponent: component to compute 

534 :type vectorComponent: str 

535 :param r: radius where the component must be computed (default: constants.rC) 

536 :type r: float 

537 :return: the components to compute and the state of computation 

538 :rtype: tuple 

539 """ 

540 if vectorComponent is None: 

541 # Test whole vector 

542 oneComponentToCompute = False 

543 oneComponentComputing = False 

544 for component in self.measures[measureName].components: 

545 isComponentToCompute, isComponentComputing = self.measures[measureName].isAlreadyComputed(r, component) 

546 # If at least one isComponentToCompute is True, oneComponentToCompute is True 

547 oneComponentToCompute = isComponentToCompute or oneComponentToCompute 

548 # Idem for oneComponentComputing 

549 oneComponentComputing = isComponentComputing or oneComponentComputing 

550 return oneComponentToCompute, oneComponentComputing 

551 else: 

552 isComponentToCompute, isComponentComputing = self.measures[measureName].isAlreadyComputed(r, 

553 vectorComponent) 

554 return isComponentToCompute, isComponentComputing 

555 

556 def getGlobeDataComputingState(self, measureName): 

557 """ 

558 Gets the computing state of a measure. 

559 

560 :param measureName: name of the measure to check 

561 :type measureName: str 

562 :return: computing state 

563 :rtype: float 

564 """ 

565 return self.measures[measureName].computingState 

566 

567 def getGlobeData(self, measureName, vectorComponent, itime, removemean, r=rC): 

568 """ 

569 Computes a component of a measure on the whole globe. 

570 

571 :param measureName: Name of the measure 

572 :type measureName: str 

573 :param vectorComponent: component to compute 

574 :type vectorComponent: str 

575 :param itime: index of the date at which the computation will be done 

576 :type itime: int 

577 :param removemean: indicates if the temporal mean should be removed 

578 :type removemean: bool 

579 :param r: radius where the component must be computed (default: constants.rC) 

580 :type r: float 

581 :return: JSON containing the parameters of the computation ("theta_step" in degrees, "phi_step" in degrees, "time"), the unit of the measure ("unit") and the data computed on the angular grid ('dataarray'). 

582 :rtype: dict 

583 """ 

584 jsondata = {"theta_step": self.th_step * 180 / np.pi, 

585 "phi_step": self.ph_step * 180 / np.pi, 

586 "time": self.times[itime], 

587 "unit": self.measures[measureName].units} 

588 if vectorComponent is None: 

589 # Return whole vector 

590 data = [] 

591 for component in self.measures[measureName].components: 

592 print(component, "in", self.measures[measureName].components) 

593 componentData = self.measures[measureName].getRThetaPhiGridData(r, itime, removemean, component) 

594 if componentData is not None: 

595 data.append(componentData) 

596 gridData = np.dstack(tuple(componentData for componentData in data)).tolist() 

597 else: 

598 gridData = self.measures[measureName].getRThetaPhiGridData(r, itime, removemean, vectorComponent).tolist() 

599 

600 jsondata["dataarray"] = gridData 

601 return jsondata 

602 

603 def getSpectraData(self, measureName, spectraType, spectraDate): 

604 """ 

605 Returns a JSON with the Lowes spectra of the asked type and measure. 

606 

607 :param measureName: name of the measure used of the computation 

608 :type measureName: str 

609 :param spectraType: type of spectra ("spectraofmean", "meanofspectra", "dated" or "deviation") 

610 :type spectraType: str 

611 :param spectraDate: date at which the timed spectrum should be evaluated 

612 :type spectraDate: float 

613 :return: JSON containing the degrees, the Lowes spectrum and the units of the measure 

614 :rtype: dict 

615 """ 

616 measure = self.measures[measureName] 

617 

618 if spectraType == "dated": 

619 date_index = np.where(self.times == spectraDate) 

620 # If no index was found, raise an error 

621 if len(date_index) == 0: 

622 raise KeyError('{} is not a valid date of model {}'.format(spectraDate, self.dataFormat)) 

623 # Else compute for the first occurrence 

624 else: 

625 itime = date_index[0][0] 

626 else: 

627 itime = 0 

628 

629 degrees, lowes_spectrum = measure.computeSpectra(spectraType, spectraDate, itime) 

630 

631 jsondata = {'degrees': degrees.tolist(), 'data': lowes_spectrum.tolist(), 

632 'unit': measure.units, 'has_reals': measure.has_realisations} 

633 

634 return jsondata 

635 

636 def getLodData(self, removemean): 

637 loddata = self.notSH_measures['LOD'] 

638 

639 if removemean: 

640 loddata = loddata - loddata.mean(axis=0) 

641 

642 if loddata.ndim > 1: 

643 mean_lod = loddata.mean(axis=1) 

644 rms_lod = loddata.std(axis=1) 

645 else: 

646 mean_lod = loddata 

647 rms_lod = np.zeros_like(loddata) 

648 

649 return {'times': self.times.tolist(), 'data': mean_lod.tolist(), 'rmsdata': rms_lod.tolist()} 

650 

651 # COMPUTATION OF SECONDARY MEASURES 

652 

653 def computeAcceleration(self): 

654 """ 

655 Computes the acceleration dU/dt by finite differences. The 'U' measure must be loaded. 

656 

657 :return: NumPy array containing the computed acceleration 

658 :rtype: np.array (same shape as measure U data) 

659 """ 

660 if "U" not in self.measures: 

661 raise ValueError("Cannot compute acceleration: no measure for core flow was found !") 

662 measure_U = self.measures["U"].data 

663 

664 # Get unique times 

665 unique_times, unique_indexes = np.unique(self.times, return_index=True) 

666 dt = unique_times[1] - unique_times[0] 

667 

668 acceleration = np.zeros_like(measure_U) 

669 

670 # Compute acceleration for each unique time... 

671 for i_unique in unique_indexes: 

672 if i_unique == 0: 

673 continue 

674 # dU/dt(t) = U(t) - U(t-dt) / dt 

675 acc = (measure_U[i_unique] - measure_U[i_unique-1])/dt 

676 current_t = self.times[i_unique] 

677 # ...but set it at every index corresponding to the current time 

678 current_indexes = np.where(self.times == current_t)[0] 

679 for i_t in current_indexes: 

680 acceleration[i_t] = acc 

681 

682 return acceleration 

683 

684 def computeDivH(self, measure_U): 

685 """ Computes the coefficients of DivH from a flow measure. 

686 

687 :param measure_U: Core flow measure 

688 :type measure_U: TSData 

689 :return: NumPy array containing the computed DivHU 

690 :rtype: np.array (same shape as measure U data) 

691 """ 

692 if not isinstance(measure_U, TSData): 

693 raise ValueError("Cannot compute DivH(U): the given core flow measure has not the right type !") 

694 

695 # DivH is computed only from poloidal coeffs 

696 # so I build a bool array that is True only for poloidal coeffs 

697 is_s = np.logical_or(measure_U.is_sc, measure_U.is_ss) 

698 L = measure_U.l 

699 

700 # If U has realisations, expand the dims to be able to broadcast 

701 if measure_U.has_realisations: 

702 is_s = measure_U.expand_dims(is_s) 

703 L = measure_U.expand_dims(L) 

704 

705 Lplus1 = L + 1 

706 

707 # The coeffs of DivH are the poloidal ones of U multiplied by -l(l+1)/rC 

708 # DivH has no toroidal coeffs so these are put to 0 

709 data_divh = np.where(is_s, -L * Lplus1 * measure_U.data / rC, 0) 

710 

711 return data_divh 

712 

713 def computeLOD(self): 

714 """ Computes the length-of-day variation (LOD) from the measure of U. The 'U' measure must be loaded. 

715 

716 :return: NumPy array containing the computed LOD 

717 :rtype: np.array (times, 1) or (times, 1, coef) 

718 """ 

719 if "U" not in self.measures: 

720 raise ValueError("Cannot compute LOD: no measure for core flow was found !") 

721 measure_U = self.measures["U"].data 

722 

723 lmax = self.measures["U"].lmax 

724 

725 # If there are realisations put the coef axis at the end 

726 if self.measures["U"].has_realisations: 

727 measure_U = np.swapaxes(measure_U, 1, 2) 

728 

729 # Compute LOD with the available data (realisations included if present) 

730 LOD = measure_U[..., 0] 

731 if lmax > 1: 

732 LOD = LOD + 1.776 * measure_U[..., 8] 

733 if lmax > 2: 

734 LOD = LOD + 0.080 * measure_U[..., 24] 

735 if lmax > 3: 

736 LOD = LOD + 0.002 * measure_U[..., 48] 

737 LOD = LOD * 1.232 

738 

739 return LOD 

740 

741 def computeLowes(self, gauss_coefs, measureName, squared=True): 

742 """ 

743 Computes the Lowes spectrum from the gauss coefficients given for the measure. Handles broadcasting as long as the last dimension is the number of gauss coefficients. 

744 Delegates to the computeLowes method of the measure. 

745 

746 :param gauss_coefs: NumPy array containing the (squared or not) gauss coeffs for every degree. The last dimension must be the number of gauss coefficients. 

747 :type gauss_coefs: np.array (dim: ..., nb_coeffs) 

748 :param measureName: name of the measure used for computation 

749 :type measureName: str 

750 :param squared: indicates if the supplied gauss_coefs are already squared or not 

751 :type squared: bool 

752 :return: NumPy array containing the degrees, NumPy array containing the Lowes spectra 

753 :rtype: np.array(dim: lmax), np.array (dim: ..., lmax) 

754 """ 

755 return self.measures[measureName].computeLowes(gauss_coefs, squared=squared)