Coverage for enpt/options/config.py: 91%

213 statements  

« prev     ^ index     » next       coverage.py v7.4.1, created at 2024-03-07 11:39 +0000

1# -*- coding: utf-8 -*- 

2 

3# EnPT, EnMAP Processing Tool - A Python package for pre-processing of EnMAP Level-1B data 

4# 

5# Copyright (C) 2018-2024 Karl Segl (GFZ Potsdam, segl@gfz-potsdam.de), Daniel Scheffler 

6# (GFZ Potsdam, danschef@gfz-potsdam.de), Niklas Bohn (GFZ Potsdam, nbohn@gfz-potsdam.de), 

7# Stéphane Guillaso (GFZ Potsdam, stephane.guillaso@gfz-potsdam.de) 

8# 

9# This software was developed within the context of the EnMAP project supported 

10# by the DLR Space Administration with funds of the German Federal Ministry of 

11# Economic Affairs and Energy (on the basis of a decision by the German Bundestag: 

12# 50 EE 1529) and contributions from DLR, GFZ and OHB System AG. 

13# 

14# This program is free software: you can redistribute it and/or modify it under 

15# the terms of the GNU General Public License as published by the Free Software 

16# Foundation, either version 3 of the License, or (at your option) any later 

17# version. Please note the following exception: `EnPT` depends on tqdm, which 

18# is distributed under the Mozilla Public Licence (MPL) v2.0 except for the files 

19# "tqdm/_tqdm.py", "setup.py", "README.rst", "MANIFEST.in" and ".gitignore". 

20# Details can be found here: https://github.com/tqdm/tqdm/blob/master/LICENCE. 

21# 

22# This program is distributed in the hope that it will be useful, but WITHOUT 

23# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 

24# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 

25# details. 

26# 

27# You should have received a copy of the GNU Lesser General Public License along 

28# with this program. If not, see <https://www.gnu.org/licenses/>. 

29 

30"""EnPT configuration module. 

31 

32Provides the configuration that is later passed to individual submodules. 

33""" 

34 

35import os 

36import json 

37from json import JSONDecodeError 

38import datetime 

39import pkgutil 

40import warnings 

41from pprint import pformat 

42 

43from jsmin import jsmin 

44from cerberus import Validator 

45from collections import OrderedDict 

46from collections.abc import Mapping 

47import numpy as np 

48from multiprocessing import cpu_count 

49 

50from .options_schema import \ 

51 enpt_schema_input, \ 

52 enpt_schema_config_output, \ 

53 parameter_mapping, \ 

54 get_param_from_json_config 

55from ..version import \ 

56 __version__, \ 

57 __versionalias__ 

58 

59__author__ = 'Daniel Scheffler' 

60 

61 

62path_enptlib = os.path.dirname(pkgutil.get_loader("enpt").path) 

63path_options_default = os.path.join(path_enptlib, 'options', 'options_default.json') 

64 

65try: 

66 # from acwater.acwater import polymer_ac_enmap 

67 path_polymer = os.path.abspath(os.path.join(os.path.dirname(pkgutil.get_loader("polymer").path), os.pardir)) 

68except AttributeError: 

69 path_polymer = '' 

70 

71config_for_testing_water = dict( 

72 path_l1b_enmap_image=os.path.abspath( 

73 os.path.join(path_enptlib, '..', 'tests', 'data', 'EnMAP_Level_1B', 

74 # Arcachon 

75 'ENMAP01-____L1B-DT000400126_20170218T110115Z_002_V000204_20200206T182719Z__rows700-730.zip' 

76 # 'ENMAP01-____L1B-DT000400126_20170218T110115Z_002_V000204_20200206T182719Z__rows800-899.zip' 

77 

78 # Arcachon full tile 2 

79 # 'ENMAP01-____L1B-DT000400126_20170218T110115Z_002_V000204_20200206T182719Z.zip' 

80 )), 

81 # path_l1b_enmap_image_gapfill=os.path.abspath( 

82 # os.path.join(path_enptlib, '..', 'tests', 'data', 'EnMAP_Level_1B', 

83 # 'ENMAP01-____L1B-DT000400126_20170218T110115Z_002_V000204_20200206T182719Z__rows700-730.zip')), 

84 path_dem=os.path.abspath( 

85 os.path.join(path_enptlib, '..', 'tests', 'data', 

86 'ENMAP01-____L1B-DT000400126_20170218T110115Z_002_V000204_20200206T182719Z__tile2' 

87 '__DEM_ASTER.bsq')), 

88 log_level='DEBUG', 

89 output_dir=os.path.join(path_enptlib, '..', 'tests', 'data', 'test_outputs'), 

90 disable_progress_bars=False, 

91 is_dummy_dataformat=False, 

92 auto_download_ecmwf=True, 

93 average_elevation=0, 

94 deadpix_P_algorithm='spectral', 

95 deadpix_P_interp_spatial='linear', 

96 deadpix_P_interp_spectral='linear', 

97 enable_keystone_correction=False, 

98 enable_vnir_swir_coreg=False, 

99 n_lines_to_append=None, 

100 ortho_resampAlg='bilinear', 

101 run_deadpix_P=True, 

102 run_smile_P=False, 

103 scale_factor_boa_ref=10000, 

104 scale_factor_toa_ref=10000, 

105 enable_ac=True, 

106 mode_ac='combined', 

107 polymer_additional_results=True, 

108 polymer_root=path_polymer, 

109 threads=-1, 

110 blocksize=100, 

111 vswir_overlap_algorithm='swir_only', 

112 CPUs=16 

113) 

114 

115 

116config_for_testing = dict( 

117 path_l1b_enmap_image=os.path.abspath( 

118 os.path.join(path_enptlib, '..', 'tests', 'data', 'EnMAP_Level_1B', 'AlpineTest1_CWV2_SM0.zip')), 

119 path_l1b_enmap_image_gapfill=os.path.abspath( 

120 os.path.join(path_enptlib, '..', 'tests', 'data', 'EnMAP_Level_1B', 'AlpineTest2_CWV2_SM0.zip')), 

121 path_dem=os.path.abspath( 

122 os.path.join(path_enptlib, '..', 'tests', 'data', 'dem_map_geo.bsq')), 

123 log_level='DEBUG', 

124 output_dir=os.path.join(path_enptlib, '..', 'tests', 'data', 'test_outputs'), 

125 n_lines_to_append=50, 

126 disable_progress_bars=True, 

127 is_dummy_dataformat=True, 

128 enable_ac=False, 

129 ortho_resampAlg='bilinear', 

130 CPUs=16 

131) 

132 

133 

134config_for_testing_dlr = dict( 

135 path_l1b_enmap_image=os.path.abspath( 

136 os.path.join(path_enptlib, '..', 'tests', 'data', 'EnMAP_Level_1B', 

137 # Alps 

138 # 'ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000101_20190426T143700Z__rows0-99.zip' 

139 

140 # Alps full 

141 # 'ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000101_20190426T143700Z.zip' 

142 

143 # Arcachon 

144 'ENMAP01-____L1B-DT000400126_20170218T110115Z_002_V000204_20200206T182719Z__rows700-799.zip' 

145 

146 # Arcachon 1000x30 

147 # 'ENMAP01-____L1B-DT000400126_20170218T110115Z_002_V000204_20200206T182719Z__rows700-730.zip' 

148 

149 # Arcachon full tile 2 

150 # 'ENMAP01-____L1B-DT000400126_20170218T110115Z_002_V000204_20200206T182719Z.zip' 

151 

152 # Arcachon full tile 3, reprocessed 05/2020 

153 # 'ENMAP01-____L1B-DT000400126_20170218T110119Z_003_V000204_20200508T124425Z.zip' 

154 

155 # Arcachon tile 3 (full), downloaded from enmap.org 

156 # 'L1B_Arcachon_3__enmap.org.zip', 

157 )), 

158 # path_l1b_enmap_image_gapfill=os.path.abspath( 

159 # os.path.join(path_enptlib, '..', 'tests', 'data', 'EnMAP_Level_1B', 

160 # # Alps 

161 # 'ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000101_20190426T143700Z__rows100-199.zip' 

162 # 

163 # # Arcachon 

164 # # 'ENMAP01-____L1B-DT000400126_20170218T110115Z_002_V000204_20200206T182719Z__rows800-899.zip' 

165 # )), 

166 path_dem=os.path.abspath( 

167 os.path.join(path_enptlib, '..', 'tests', 'data', 

168 # Alps 

169 # 'DLR_L2A_DEM_UTM32.bsq' 

170 

171 # Arcachon tile 2 ASTER DEM (02/2020) 

172 'ENMAP01-____L1B-DT000400126_20170218T110115Z_002_V000204_20200206T182719Z__tile2__DEM_ASTER.bsq' 

173 

174 # Arcachon tile 3 ASTER DEM (05/2020) 

175 # 'ENMAP01-____L1B-DT000400126_20170218T110119Z_003_V000204_20200508T124425Z__tile3__DEM_ASTER.bsq' 

176 # '15_DEM_UTM__with_prj.tif' 

177 )), 

178 log_level='DEBUG', 

179 output_dir=os.path.join(path_enptlib, '..', 'tests', 'data', 'test_outputs'), 

180 n_lines_to_append=50, 

181 disable_progress_bars=False, 

182 is_dummy_dataformat=False, 

183 # output_format='ENVI', 

184 # output_interleave='band', 

185 # target_projection_type='Geographic', 

186 # target_epsg=32632, 

187 # target_coord_grid=[-1.37950, -1.37923, 44.60710, 44.60737], 

188 enable_absolute_coreg=True, 

189 path_reference_image=os.path.join(path_enptlib, '..', 'tests', 'data', 'T30TXQ_20170218T110111_B05__sub.tif'), 

190 enable_ac=True, 

191 mode_ac='land', 

192 CPUs=32, 

193 ortho_resampAlg='bilinear', 

194 vswir_overlap_algorithm='swir_only' 

195) 

196 

197 

198enmap_coordinate_grid_utm = dict(x=np.array([0, 30]), 

199 y=np.array([0, 30])) 

200enmap_xres, enmap_yres = np.ptp(enmap_coordinate_grid_utm['x']), np.ptp(enmap_coordinate_grid_utm['y']) 

201 

202 

203class EnPTConfig(object): 

204 def __init__(self, json_config='', **user_opts): 

205 """Create a job configuration. 

206 

207 :arg json_config: 

208 path to JSON file containing configuration parameters or a string in JSON format 

209 

210 :key CPUs: 

211 number of CPU cores to be used for processing (default: "None" -> use all available) 

212 

213 :key path_l1b_enmap_image: 

214 input path of the EnMAP L1B image to be processed 

215 (zip-archive or root directory; must be given if not contained in --json-config.) 

216 

217 :key path_l1b_enmap_image_gapfill: 

218 input path of an adjacent EnMAP L1B image to be used for gap-filling (zip-archive or root directory) 

219 

220 :key path_dem: 

221 input path of digital elevation model in map or sensor geometry; GDAL compatible file format (must cover 

222 the EnMAP L1B data completely if given in map geometry or must have the same pixel dimensions like the 

223 EnMAP L1B data if given in sensor geometry) 

224 

225 :key average_elevation: 

226 average elevation in meters above sea level; may be provided if no DEM is available; ignored if DEM is given 

227 

228 :key output_dir: 

229 output directory where processed data and log files are saved 

230 

231 :key output_format: 

232 file format of all raster output files ('GTiff': GeoTIFF, 'ENVI': ENVI BSQ; default: 'ENVI') 

233 

234 :key output_interleave: 

235 raster data interleaving type (default: 'pixel') 

236 

237 - 'band': band-sequential (BSQ), 

238 - 'line': data interleaved-by-line (BIL; only usable for ENVI output format), 

239 - 'pixel' data interleaved-by-pixel (BIP) 

240 

241 :key output_nodata_value: 

242 output no-data/background value (should be within the integer 16-bit range, default: -32768) 

243 

244 :key working_dir: 

245 directory to be used for temporary files 

246 

247 :key n_lines_to_append: 

248 number of lines to be added to the main image [if None, use the whole imgap]. 

249 Requires 'path_l1b_enmap_image_gapfill' to be set. 

250 

251 :key drop_bad_bands: 

252 if set to True (default), the water absorption bands between 1358 and 1453 nm as well 

253 as between 1814 and 1961 nm are excluded from processing and will not be contained in the L2A product 

254 

255 :key disable_progress_bars: 

256 whether to disable all progress bars during processing 

257 

258 :key path_earthSunDist: 

259 input path of the earth sun distance model 

260 

261 :key path_solar_irr: 

262 input path of the solar irradiance model 

263 

264 :key scale_factor_toa_ref: 

265 scale factor to be applied to TOA reflectance result 

266 

267 :key enable_keystone_correction: 

268 Enable keystone correction 

269 

270 :key enable_vnir_swir_coreg: 

271 Enable VNIR/SWIR co-registration 

272 

273 :key enable_absolute_coreg: 

274 Enable the co-registration of the EnMAP image to the reference image given with 'path_reference_image' 

275 

276 :key path_reference_image: 

277 Reference image for co-registration. 

278 

279 :key polymer_root: 

280 Polymer root directory (that contains the subdirectory for ancillary data). 

281 

282 :key enable_ac: 

283 Enable atmospheric correction using SICOR algorithm (default: True). 

284 If False, the L2A output contains top-of-atmosphere reflectance. 

285 

286 :key mode_ac: 

287 3 modes to determine which atmospheric correction is applied at which surfaces (default: land): 

288 

289 - 'land': SICOR (developed for land surfaces is applied to land AND water surfaces 

290 - 'water': POLYMER (developed for water surfaces) is applied to water only 

291 (land surfaces are no included in the L2A product) 

292 - 'combined': SICOR is applied to land and POLYMER is applied to water surfaces; 

293 NOTE that this may result in edge effects, e.g., at coastlines 

294 

295 :key polymer_additional_results: 

296 Enable the generation of additional results when running ACwater/POLYMER (default: True) 

297 

298 :key auto_download_ecmwf: 

299 Automatically download ECMWF AUX data when running Polymer atmospheric correction for water surfaces 

300 

301 :key scale_factor_boa_ref: 

302 Scale factor to be applied to BOA reflectance result 

303 

304 :key threads: 

305 number of threads for multiprocessing of blocks (see bellow): 

306 

307 - 'threads = 0': for single thread 

308 - 'threads < 0': for as many threads as there are CPUs 

309 - 'threads > 0': gives the number of threads 

310 

311 :key blocksize: 

312 block size for multiprocessing 

313 

314 :key run_smile_P: 

315 Enable extra smile detection and correction (provider smile coefficients are ignored) 

316 

317 :key run_deadpix_P: 

318 Enable dead pixel correction 

319 

320 :key deadpix_P_algorithm: 

321 Algorithm for dead pixel correction ('spectral' or 'spatial') 

322 

323 :key deadpix_P_interp_spectral: 

324 Spectral interpolation algorithm to be used during dead pixel correction 

325 ('linear', 'quadratic', 'cubic') 

326 

327 :key deadpix_P_interp_spatial: 

328 Spatial interpolation algorithm to be used during dead pixel correction 

329 ('linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic') 

330 

331 :key ortho_resampAlg: 

332 Ortho-rectification resampling algorithm ('nearest', 'bilinear', 'gauss', 'cubic', 'cubic_spline', 

333 'lanczos', 'average', 'mode', 'max', 'min', 'med', 'q1', 'q3') 

334 

335 :key target_projection_type: 

336 Projection type of the raster output files ('UTM', 'Geographic') (default: 'UTM') 

337 

338 :key target_epsg: 

339 Custom EPSG code of the target projection (overrides target_projection_type) 

340 

341 :key target_coord_grid: 

342 Custom target coordinate grid where the output is resampled to ([x0, x1, y0, y1], e.g., [0, 30, 0, 30]) 

343 """ 

344 # fixed attributes 

345 self.version = __version__ 

346 self.versionalias = __versionalias__ 

347 

348 ####################### 

349 # POPULATE PARAMETERS # 

350 ####################### 

351 

352 # args 

353 self.json_config = json_config 

354 self.kwargs = user_opts 

355 

356 # get validated options dict from JSON-options 

357 self.json_opts_fused_valid = self.get_json_opts(validate=True) 

358 

359 gp = self.get_parameter 

360 

361 ################### 

362 # general options # 

363 ################### 

364 

365 self.is_dummy_dataformat = gp('is_dummy_dataformat') 

366 if 'is_dlr_dataformat' in user_opts: 

367 warnings.warn("The 'is_dlr_dataformat' flag is deprecated and will not exist in future. " 

368 "Please set 'is_dummy_dataformat' to False instead.", DeprecationWarning) 

369 self.is_dummy_dataformat = user_opts['is_dlr_dataformat'] is False 

370 

371 self.CPUs = gp('CPUs', fallback=cpu_count()) 

372 self.log_level = gp('log_level') 

373 self.create_logfile = gp('create_logfile') 

374 self.path_l1b_enmap_image = self.absPath(gp('path_l1b_enmap_image')) 

375 self.path_l1b_enmap_image_gapfill = self.absPath(gp('path_l1b_enmap_image_gapfill')) 

376 self.path_dem = self.absPath(gp('path_dem')) 

377 self.average_elevation = gp('average_elevation') 

378 self.path_l1b_snr_model = self.absPath(gp('path_l1b_snr_model')) 

379 self.working_dir = self.absPath(gp('working_dir')) or None 

380 self.n_lines_to_append = gp('n_lines_to_append') 

381 self.drop_bad_bands = gp('drop_bad_bands') 

382 self.disable_progress_bars = gp('disable_progress_bars') 

383 

384 ################## 

385 # output options # 

386 ################## 

387 

388 self.output_dir = self.absPath(gp('output_dir', fallback=os.path.abspath(os.path.curdir))) 

389 self.output_format = gp('output_format') 

390 self.output_interleave = gp('output_interleave') 

391 self.output_nodata_value = gp('output_nodata_value') 

392 

393 ########################### 

394 # processor configuration # 

395 ########################### 

396 

397 # toa_ref 

398 self.path_earthSunDist = self.absPath(gp('path_earthSunDist')) 

399 self.path_solar_irr = self.absPath(gp('path_solar_irr')) 

400 self.scale_factor_toa_ref = gp('scale_factor_toa_ref') 

401 

402 # geometry 

403 self.enable_keystone_correction = gp('enable_keystone_correction') 

404 self.enable_vnir_swir_coreg = gp('enable_vnir_swir_coreg') 

405 self.enable_absolute_coreg = gp('enable_absolute_coreg') 

406 self.path_reference_image = gp('path_reference_image') 

407 

408 # atmospheric_correction 

409 self.polymer_root = gp('polymer_root') 

410 self.enable_ac = gp('enable_ac') 

411 self.mode_ac = gp('mode_ac') 

412 self.polymer_additional_results = gp('polymer_additional_results') 

413 self.auto_download_ecmwf = gp('auto_download_ecmwf') 

414 self.scale_factor_boa_ref = gp('scale_factor_boa_ref') 

415 self.threads = gp('threads') 

416 self.blocksize = gp('blocksize') 

417 

418 # smile 

419 self.run_smile_P = gp('run_smile_P') 

420 

421 # dead_pixel 

422 self.run_deadpix_P = gp('run_deadpix_P') 

423 self.deadpix_P_algorithm = gp('deadpix_P_algorithm') 

424 self.deadpix_P_interp_spectral = gp('deadpix_P_interp_spectral') 

425 self.deadpix_P_interp_spatial = gp('deadpix_P_interp_spatial') 

426 

427 # orthorectification / VSWIR fusion 

428 self.ortho_resampAlg = gp('ortho_resampAlg') 

429 self.vswir_overlap_algorithm = gp('vswir_overlap_algorithm') 

430 self.target_projection_type = gp('target_projection_type') 

431 self.target_epsg = gp('target_epsg') 

432 grid = gp('target_coord_grid') 

433 self.target_coord_grid = dict(x=np.array(grid[:2]), y=np.array(grid[2:])) if grid else None 

434 

435 ######################### 

436 # validate final config # 

437 ######################### 

438 

439 EnPTValidator(allow_unknown=True, schema=enpt_schema_config_output).validate(self.to_dict()) 

440 

441 # check if given paths point to existing files 

442 if os.getenv('IS_ENPT_GUI_TEST') != "1": 

443 paths = {k: v for k, v in self.__dict__.items() if k.startswith('path_')} 

444 for k, fp in paths.items(): 

445 if fp and not os.path.isfile(fp): 

446 raise FileNotFoundError("The file path provided at the '%s' parameter does not point " 

447 "to an existing file (%s)." % (k, fp)) 

448 

449 if not self.path_dem: 

450 warnings.warn('No digital elevation model provided. Note that this may cause uncertainties, e.g., ' 

451 'in the atmospheric correction and the orthorectification.', RuntimeWarning, stacklevel=2) 

452 

453 # check invalid interleave 

454 if self.output_interleave == 'line' and self.output_format == 'GTiff': 

455 warnings.warn("The interleaving type 'line' is not supported by the GTiff output format. Using 'pixel'.", 

456 UserWarning) 

457 self.output_interleave = 'pixel' 

458 

459 # override target_projection_type if target_epsg is given 

460 if self.target_epsg: 

461 self.target_projection_type = \ 

462 'Geographic' if self.target_epsg == 4326 else \ 

463 'UTM' if len(str(self.target_epsg)) == 5 and str(self.target_epsg)[:3] in ['326', '327'] else \ 

464 'NA' 

465 if self.target_projection_type == 'Geographic': 

466 self.target_epsg = 4326 

467 

468 # set target coordinate grid to the UTM EnMAP grid if no other grid is provided and target projection is UTM 

469 self.target_coord_grid = \ 

470 self.target_coord_grid if self.target_coord_grid else \ 

471 enmap_coordinate_grid_utm if self.target_projection_type == 'UTM' else None 

472 

473 @staticmethod 

474 def absPath(path): 

475 return path if not path or os.path.isabs(path) else os.path.abspath(os.path.join(path_enptlib, path)) 

476 

477 def get_parameter(self, key_user_opts, fallback=None): 

478 # 1. priority: parameters that have directly passed to EnPTConfig within user_opts 

479 if key_user_opts in self.kwargs: 

480 return self.kwargs[key_user_opts] 

481 

482 # 2. priority: default options, overridden by eventually provided json_config 

483 else: 

484 param = get_param_from_json_config(key_user_opts, self.json_opts_fused_valid) 

485 if not param: 

486 if fallback: 

487 return fallback 

488 return param 

489 

490 def get_json_opts(self, validate=True): 

491 """Get a dictionary of EnPT config parameters. 

492 

493 NOTE: Reads the default options from options_default.json and updates the values with those from database. 

494 """ 

495 def update_dict(d, u): 

496 for k, v in u.items(): 

497 if isinstance(v, Mapping): 

498 d[k] = update_dict(d.get(k, {}), v) 

499 else: 

500 d[k] = v 

501 return d 

502 

503 # read options_default.json 

504 default_options = get_options(path_options_default, validation=validate) 

505 

506 ############################################################################################################### 

507 # if json config is provided (via python bindings or CLI parser -> override all options with that json config # 

508 ############################################################################################################### 

509 

510 if self.json_config: 

511 if self.json_config.startswith("{"): 

512 try: 

513 params_dict = json.loads(jsmin(self.json_config)) 

514 except JSONDecodeError: 

515 warnings.warn('The given JSON options string could not be decoded. ' 

516 'JSON decoder failed with the following error:') 

517 raise 

518 elif os.path.isfile(self.json_config): 

519 try: 

520 with open(self.json_config, 'r') as inF: 

521 params_dict = json.loads(jsmin(inF.read())) 

522 except JSONDecodeError: 

523 warnings.warn('The given JSON options file %s could not be decoded. ' 

524 'JSON decoder failed with the following error:' % self.json_config) 

525 raise 

526 

527 else: 

528 raise ValueError("The parameter 'json_config' must be a JSON formatted string or a JSON file on disk.") 

529 

530 # convert values to useful data types and update the default values 

531 params_dict = json_to_python(params_dict) 

532 update_dict(default_options, params_dict) 

533 

534 if validate: 

535 EnPTValidator(allow_unknown=True, schema=enpt_schema_input).validate(default_options) 

536 

537 json_options = default_options 

538 return json_options 

539 

540 def to_dict(self): 

541 """Generate a dictionary in the same structure like the one in options_default.json from the current config.""" 

542 

543 def nested_set(dic, keys, value): 

544 for k in keys[:-1]: 

545 dic = dic.setdefault(k, {}) 

546 dic[keys[-1]] = value 

547 

548 outdict = dict() 

549 for key_user_opts, subkeys in parameter_mapping.items(): 

550 nested_set(outdict, subkeys, getattr(self, key_user_opts)) 

551 

552 return outdict 

553 

554 def to_jsonable_dict(self): 

555 return python_to_json(self.to_dict()) 

556 

557 def save(self, path_outfile): 

558 """Save the JobConfig instance to a JSON file in the same structure like the one in options_default.json. 

559 

560 :param path_outfile: path of the output JSON file 

561 """ 

562 with open(path_outfile, 'w') as outF: 

563 json.dump(self.to_jsonable_dict(), outF, skipkeys=False, indent=4) 

564 

565 def __repr__(self): 

566 return pformat(self.to_dict()) 

567 

568 

569def json_to_python(value): 

570 def is_number(s): 

571 try: 

572 float(s) 

573 return True 

574 except ValueError: 

575 return False 

576 

577 if type(value) is dict: 

578 return {json_to_python(k): json_to_python(v) for k, v in value.items()} 

579 elif type(value) is list: 

580 return [json_to_python(v) for v in value] 

581 else: 

582 if value == "None": 

583 return None 

584 if value == "slice(None, None, None)": 

585 return slice(None) 

586 if value is True or value == "true": 

587 return True 

588 if value is False or value == "false": 

589 return False 

590 if is_number(value): 

591 try: 

592 if str(int(value)) != str(float(value)): 

593 return int(value) 

594 else: 

595 return float(value) 

596 except ValueError: 

597 return float(value) 

598 else: 

599 return value 

600 

601 

602def python_to_json(value): 

603 if type(value) in [dict, OrderedDict]: 

604 return {python_to_json(k): python_to_json(v) for k, v in value.items()} 

605 elif type(value) is list: 

606 return [python_to_json(v) for v in value] 

607 elif type(value) is np.ndarray: 

608 return [python_to_json(v) for v in value.tolist()] 

609 else: 

610 if value is None: 

611 return "None" 

612 if value is slice(None): 

613 return "slice(None, None, None)" 

614 if value is True: 

615 return "true" 

616 if value is False: 

617 return "false" 

618 if type(value) is datetime.datetime: 

619 return datetime.datetime.strftime(value, '%Y-%m-%d %H:%M:%S.%f%z') 

620 else: 

621 return value 

622 

623 

624class EnPTValidator(Validator): 

625 def __init__(self, *args, **kwargs): 

626 """Get an instance of EnPTValidator. 

627 

628 :param args: Arguments to be passed to cerberus.Validator 

629 :param kwargs: Keyword arguments to be passed to cerberus.Validator 

630 """ 

631 super(EnPTValidator, self).__init__(*args, **kwargs) 

632 

633 def validate(self, document2validate, **kwargs): 

634 if super(EnPTValidator, self).validate(document=document2validate, **kwargs) is False: 

635 raise ValueError("Options is malformed: %s" % str(self.errors)) 

636 

637 

638def get_options(target: str, validation: bool = True): 

639 """Return dictionary with all options. 

640 

641 :param target: if path to file, then json is used to load, otherwise the default template is used 

642 :param validation: True / False, whether to validate options read from files or not 

643 :return: dictionary with options 

644 """ 

645 if os.path.isfile(target): 

646 with open(target, "r") as fl: 

647 options = json_to_python(json.loads(jsmin(fl.read()))) 

648 

649 if validation is True: 

650 EnPTValidator(allow_unknown=True, schema=enpt_schema_input).validate(options) 

651 

652 return options 

653 else: 

654 raise FileNotFoundError("Options file not found at file path %s." % target)