in the middle of fixing custom field import
This commit is contained in:
126
log
Normal file
126
log
Normal file
@@ -0,0 +1,126 @@
|
||||
attempt 1 to compute w_c, this time using Rule(targets={'z_targets': 1, 't': 1, 'time_window': 1, 't_num': 1, 'dt': 1, 'w_c': 1, 'w0': 1, 'w': 1, 'w_power_fact': 1, 'l': 1}, func=<function build_sim_grid_0 at 0x7f9e92cb1280>, args=['length', 'z_num', 'wavelength', 'interpolation_degree', 'time_window', 't_num'])
|
||||
attempt 1 to compute time_window, this time using Rule(targets={'z_targets': 1, 't': 1, 'time_window': 1, 't_num': 1, 'dt': 1, 'w_c': 1, 'w0': 1, 'w': 1, 'w_power_fact': 1, 'l': 1}, func=<function build_sim_grid_0 at 0x7f9e92cb1280>, args=['length', 'z_num', 'wavelength', 'interpolation_degree', 'time_window', 't_num'])
|
||||
error using build_sim_grid_0 : cyclic dependency detected : 'time_window' seems to depend on itself, please provide a value for at least one variable in {'w_c', 'time_window'}
|
||||
attempt 2 to compute time_window, this time using Rule(targets={'z_targets': 1, 't': 1, 'time_window': 1, 't_num': 1, 'dt': 1, 'w_c': 1, 'w0': 1, 'w': 1, 'w_power_fact': 1, 'l': 1}, func=<function build_sim_grid_0 at 0x7f9e92cb11f0>, args=['length', 'z_num', 'wavelength', 'interpolation_degree', 'time_window', 'dt'])
|
||||
error using build_sim_grid_0 : cyclic dependency detected : 'time_window' seems to depend on itself, please provide a value for at least one variable in {'w_c', 'time_window'}
|
||||
attempt 3 to compute time_window, this time using Rule(targets={'z_targets': 1, 't': 1, 'time_window': 1, 't_num': 1, 'dt': 1, 'w_c': 1, 'w0': 1, 'w': 1, 'w_power_fact': 1, 'l': 1}, func=<function build_sim_grid_0 at 0x7f9e92cb1310>, args=['length', 'z_num', 'wavelength', 'interpolation_degree', 't_num', 'dt'])
|
||||
computed z_targets=[0. 0.00043307 0.00086614 0.00129921 0.00173228 0.00216535
|
||||
0.00259843 0.0030315 0.00346457 0.00389764 0.00433071 0.00476378
|
||||
0.00519685 0.00562992 0.00606299 0.00649606 0.00692913 0.0073622
|
||||
0.00779528 0.00822835 0.00866142 0.00909449 0.00952756 0.00996063
|
||||
0.0103937 0.01082677 0.01125984 0.01169291 0.01212598 0.01255906
|
||||
0.01299213 0.0134252 0.01385827 0.01429134 0.01472441 0.01515748
|
||||
0.01559055 0.01602362 0.01645669 0.01688976 0.01732283 0.01775591
|
||||
0.01818898 0.01862205 0.01905512 0.01948819 0.01992126 0.02035433
|
||||
0.0207874 0.02122047 0.02165354 0.02208661 0.02251969 0.02295276
|
||||
0.02338583 0.0238189 0.02425197 0.02468504 0.02511811 0.02555118
|
||||
0.02598425 0.02641732 0.02685039 0.02728346 0.02771654 0.02814961
|
||||
0.02858268 0.02901575 0.02944882 0.02988189 0.03031496 0.03074803
|
||||
0.0311811 0.03161417 0.03204724 0.03248031 0.03291339 0.03334646
|
||||
0.03377953 0.0342126 0.03464567 0.03507874 0.03551181 0.03594488
|
||||
0.03637795 0.03681102 0.03724409 0.03767717 0.03811024 0.03854331
|
||||
0.03897638 0.03940945 0.03984252 0.04027559 0.04070866 0.04114173
|
||||
0.0415748 0.04200787 0.04244094 0.04287402 0.04330709 0.04374016
|
||||
0.04417323 0.0446063 0.04503937 0.04547244 0.04590551 0.04633858
|
||||
0.04677165 0.04720472 0.0476378 0.04807087 0.04850394 0.04893701
|
||||
0.04937008 0.04980315 0.05023622 0.05066929 0.05110236 0.05153543
|
||||
0.0519685 0.05240157 0.05283465 0.05326772 0.05370079 0.05413386
|
||||
0.05456693 0.055 ] using build_sim_grid_0 from evaluator
|
||||
computed t=[-8.1915e-12 -8.1905e-12 -8.1895e-12 ... 8.1895e-12 8.1905e-12
|
||||
8.1915e-12] using build_sim_grid_0 from evaluator
|
||||
computed time_window=1.6383000000000002e-11 using build_sim_grid_0 from evaluator
|
||||
computed w_c=[ 0.00000000e+00 3.83495197e+11 7.66990394e+11 ... -1.15048559e+12
|
||||
-7.66990391e+11 -3.83495194e+11] using build_sim_grid_0 from evaluator
|
||||
computed w0=1226655097231605.2 using build_sim_grid_0 from evaluator
|
||||
computed w=[1.22665510e+15 1.22703859e+15 1.22742209e+15 ... 1.22550461e+15
|
||||
1.22588811e+15 1.22627160e+15] using build_sim_grid_0 from evaluator
|
||||
computed w_power_fact=[[ 0.00000000e+000 7.35342831e+022 2.94137132e+023 ... 6.61808544e+023
|
||||
2.94137130e+023 7.35342818e+022]
|
||||
[ 0.00000000e+000 9.40001479e+033 7.52001183e+034 ... -2.53800397e+035
|
||||
-7.52001174e+034 -9.40001456e+033]
|
||||
[ 0.00000000e+000 9.01215131e+044 1.44194421e+046 ... 7.29984248e+046
|
||||
1.44194419e+046 9.01215101e+044]
|
||||
...
|
||||
[ 0.00000000e+000 1.16026959e+088 2.97029015e+090 ... 7.61252860e+091
|
||||
2.97029005e+090 1.16026951e+088]
|
||||
[ 0.00000000e+000 4.94397571e+098 2.53131557e+101 ... -9.73122716e+102
|
||||
-2.53131547e+101 -4.94397535e+098]
|
||||
[ 0.00000000e+000 1.89599094e+109 1.94149472e+112 ... 1.11956366e+114
|
||||
1.94149464e+112 1.89599078e+109]] using build_sim_grid_0 from evaluator
|
||||
computed l=[1.53560000e-06 1.53512007e-06 1.53464044e-06 ... 1.53704160e-06
|
||||
1.53656077e-06 1.53608023e-06] using build_sim_grid_0 from evaluator
|
||||
computed w_c=[ 0.00000000e+00 3.83495197e+11 7.66990394e+11 ... -1.15048559e+12
|
||||
-7.66990391e+11 -3.83495194e+11] using build_sim_grid_0 from evaluator
|
||||
attempt 1 to compute spec_0, this time using Rule(targets={'spec_0': 3}, func=<function load_previous_spectrum at 0x7f9e8fca7ca0>, args=['prev_data_dir'])
|
||||
error using load_previous_spectrum : no rule for prev_data_dir
|
||||
attempt 2 to compute spec_0, this time using Rule(targets={'spec_0': 1}, func=<function fft at 0x7f9e8dd27040>, args=['field_0'])
|
||||
attempt 1 to compute field_0, this time using Rule(targets={'field_0': 1}, func=<function ifft at 0x7f9e8dd27160>, args=['spec_0'])
|
||||
error using ifft : cyclic dependency detected : 'spec_0' seems to depend on itself, please provide a value for at least one variable in {'field_0', 'spec_0', 'prev_data_dir'}
|
||||
attempt 2 to compute field_0, this time using Rule(targets={'field_0': 1}, func=<function add_shot_noise at 0x7f9e92c9e280>, args=['pre_field_0', 'quantum_noise', 'w_c', 'w0', 'time_window', 'dt'])
|
||||
attempt 1 to compute pre_field_0, this time using Rule(targets={'pre_field_0': 2, 'peak_power': 1, 'energy': 1, 'width': 1}, func=<function load_and_adjust_field_file at 0x7f9e92c9baf0>, args=['field_file', 't', 'peak_power', 'energy', 'intensity_noise', 'noise_correlation'])
|
||||
attempt 1 to compute peak_power, this time using Rule(targets={'pre_field_0': 2, 'peak_power': 1, 'energy': 1, 'width': 1}, func=<function load_and_adjust_field_file at 0x7f9e92c9baf0>, args=['field_file', 't', 'peak_power', 'energy', 'intensity_noise', 'noise_correlation'])
|
||||
error using load_and_adjust_field_file : cyclic dependency detected : 'peak_power' seems to depend on itself, please provide a value for at least one variable in {'peak_power', 'field_0', 'spec_0', 'prev_data_dir', 'pre_field_0'}
|
||||
attempt 2 to compute peak_power, this time using Rule(targets={'peak_power': 1}, func=<function E0_to_P0 at 0x7f9e92c9bca0>, args=['energy', 't0', 'shape'])
|
||||
attempt 1 to compute energy, this time using Rule(targets={'pre_field_0': 2, 'peak_power': 1, 'energy': 1, 'width': 1}, func=<function load_and_adjust_field_file at 0x7f9e92c9baf0>, args=['field_file', 't', 'peak_power', 'energy', 'intensity_noise', 'noise_correlation'])
|
||||
error using load_and_adjust_field_file : cyclic dependency detected : 'peak_power' seems to depend on itself, please provide a value for at least one variable in {'peak_power', 'field_0', 'energy', 'spec_0', 'prev_data_dir', 'pre_field_0'}
|
||||
attempt 2 to compute energy, this time using Rule(targets={'width': 1, 'peak_power': 1, 'energy': 1}, func=<function measure_custom_field at 0x7f9e92c9ec10>, args=['field_file', 't'])
|
||||
computed width=8.827707794861245e-14 using measure_custom_field from scgenerator.physics.pulse
|
||||
computed peak_power=0.0008031855671188997 using measure_custom_field from scgenerator.physics.pulse
|
||||
computed energy=8.994671799886145e-17 using measure_custom_field from scgenerator.physics.pulse
|
||||
attempt 1 to compute t0, this time using Rule(targets={'t0': 1}, func=<function width_to_t0 at 0x7f9e92c9b670>, args=['width', 'shape'])
|
||||
computed t0=7.497564676748131e-14 using width_to_t0 from scgenerator.physics.pulse
|
||||
computed peak_power=0.0009572054484404637 using E0_to_P0 from scgenerator.physics.pulse
|
||||
computed pre_field_0=[0.+0.j 0.+0.j 0.+0.j ... 0.+0.j 0.+0.j 0.+0.j] using load_and_adjust_field_file from scgenerator.physics.pulse
|
||||
computed field_0=[ 0.00160864+0.02127688j -0.01957086-0.0069138j -0.01820735+0.02002421j
|
||||
... 0.00622707-0.01068023j 0.01055843-0.00319426j
|
||||
0.00117607-0.03033724j] using add_shot_noise from scgenerator.physics.pulse
|
||||
computed spec_0=[ 1.50681561-3.69877757j -5.66353753-2.05828961j -0.54462979-1.50501029j
|
||||
... -1.45621111-3.03434314j 2.6362116 -3.91420005j
|
||||
-2.2820021 -3.24150382j] using fft from numpy.fft
|
||||
attempt 1 to compute beta2_coefficients, this time using Rule(targets={'beta2_coefficients': 1}, func=<function dispersion_coefficients at 0x7f9e9158f3a0>, args=['wl_for_disp', 'beta2_arr', 'w0', 'interpolation_range', 'interpolation_degree'])
|
||||
attempt 1 to compute wl_for_disp, this time using Rule(targets={'wl_for_disp': 2, 'beta2_arr': 2, 'interpolation_range': 2}, func=<function load_custom_dispersion at 0x7f9e9158f0d0>, args=['dispersion_file'])
|
||||
computed wl_for_disp=[7.0000e-07 7.0125e-07 7.0250e-07 ... 2.3960e-06 2.3970e-06 2.3980e-06] using load_custom_dispersion from scgenerator.physics.fiber
|
||||
computed beta2_arr=[ 3.37108270e-26 3.37294702e-26 3.37477437e-26 ... -2.08838856e-25
|
||||
-2.09213746e-25 -2.09589149e-25] using load_custom_dispersion from scgenerator.physics.fiber
|
||||
computed interpolation_range=(7e-07, 2.398e-06) using load_custom_dispersion from scgenerator.physics.fiber
|
||||
interpolating dispersion between 700.0nm and 2398.0nm
|
||||
computed beta2_coefficients=[-2.11874551e-026 1.48178177e-040 -4.15401033e-055 2.03223853e-069
|
||||
-1.65547343e-083 1.26886165e-097 -6.93559198e-112 2.31284341e-126
|
||||
-3.54046694e-141] using dispersion_coefficients from scgenerator.physics.fiber
|
||||
attempt 1 to compute gamma_arr, this time using Rule(targets={'gamma_arr': 1}, func=<function gamma_parameter at 0x7f9e9158caf0>, args=['n2', 'w0', 'A_eff_arr'])
|
||||
attempt 1 to compute A_eff_arr, this time using Rule(targets={'A_eff_arr': 1}, func=<function A_eff_from_V at 0x7f9e9158c790>, args=['core_radius', 'V_eff_arr'])
|
||||
error using A_eff_from_V : no rule for core_radius
|
||||
attempt 2 to compute A_eff_arr, this time using Rule(targets={'A_eff_arr': 1}, func=<function load_custom_A_eff at 0x7f9e9158f040>, args=['A_eff_file', 'l'])
|
||||
error using load_custom_A_eff : no rule for A_eff_file
|
||||
attempt 3 to compute A_eff_arr, this time using Rule(targets={'A_eff_arr': -1}, func=<function constant_A_eff_arr at 0x7f9e9158cb80>, args=['l', 'A_eff'])
|
||||
attempt 1 to compute A_eff, this time using Rule(targets={'A_eff': 1}, func=<function A_eff_from_V at 0x7f9e9158c790>, args=['core_radius', 'V_eff'])
|
||||
error using A_eff_from_V : cyclic dependency detected : 'core_radius' seems to depend on itself, please provide a value for at least one variable in {'core_radius', 'A_eff', 'gamma_arr', 'prev_data_dir', 'A_eff_arr', 'A_eff_file'}
|
||||
attempt 2 to compute A_eff, this time using Rule(targets={'A_eff': 1}, func=<function A_eff_from_diam at 0x7f9e9158cca0>, args=['effective_mode_diameter'])
|
||||
computed A_eff=8.01184666481737e-11 using A_eff_from_diam from scgenerator.physics.fiber
|
||||
computed A_eff_arr=[8.01184666e-11 8.01184666e-11 8.01184666e-11 ... 8.01184666e-11
|
||||
8.01184666e-11 8.01184666e-11] using constant_A_eff_arr from scgenerator.physics.fiber
|
||||
computed gamma_arr=[0.00112355 0.00112355 0.00112355 ... 0.00112355 0.00112355 0.00112355] using gamma_parameter from scgenerator.physics.fiber
|
||||
attempt 1 to compute hr_w, this time using Rule(targets={'hr_w': 1}, func=<function delayed_raman_w at 0x7f9e9158f5e0>, args=['t', 'dt', 'raman_type'])
|
||||
computed hr_w=[1.00168656+0.j 1.00197747-0.00248147j 1.00283647-0.00508742j ...
|
||||
1.00422327+0.00793569j 1.00283647+0.00508742j 1.00197747+0.00248147j] using delayed_raman_w from scgenerator.physics.fiber
|
||||
attempt 1 to compute adapt_step_size, this time using Rule(targets={'adapt_step_size': 1}, func=<function <lambda> at 0x7f9e92cb13a0>, args=['step_size'])
|
||||
computed adapt_step_size=True using <lambda> from scgenerator.utils.parameter
|
||||
attempt 1 to compute dynamic_dispersion, this time using Rule(targets={'dynamic_dispersion': 1}, func=<function <lambda> at 0x7f9e92cb1430>, args=['pressure'])
|
||||
computed dynamic_dispersion=False using <lambda> from scgenerator.utils.parameter
|
||||
Traceback (most recent call last):
|
||||
File "/Users/benoitsierro/Nextcloud/PhD/Supercontinuum/Module/play.py", line 20, in <module>
|
||||
main()
|
||||
File "/Users/benoitsierro/Nextcloud/PhD/Supercontinuum/Module/play.py", line 10, in main
|
||||
pa = Parameters.load(
|
||||
File "/Users/benoitsierro/Nextcloud/PhD/Supercontinuum/Module/src/scgenerator/utils/parameter.py", line 507, in load
|
||||
return cls(**utils.load_toml(path))
|
||||
File "<string>", line 87, in __init__
|
||||
File "/Users/benoitsierro/Nextcloud/PhD/Supercontinuum/Module/src/scgenerator/utils/parameter.py", line 503, in __post_init__
|
||||
setattr(self, k, v)
|
||||
File "/Users/benoitsierro/Nextcloud/PhD/Supercontinuum/Module/src/scgenerator/utils/parameter.py", line 245, in __set__
|
||||
self.validator(self.name, value)
|
||||
File "/Users/benoitsierro/Nextcloud/PhD/Supercontinuum/Module/src/scgenerator/utils/parameter.py", line 41, in _type_checker_wrapped
|
||||
validator(name, n)
|
||||
File "/Users/benoitsierro/Nextcloud/PhD/Supercontinuum/Module/src/scgenerator/utils/parameter.py", line 105, in int_pair
|
||||
raise ValueError(f"{name!r} must be a list or a tuple of 2 int")
|
||||
ValueError: 'fit_parameters' must be a list or a tuple of 2 int
|
||||
10
play.py
10
play.py
@@ -1,5 +1,7 @@
|
||||
from scgenerator import Parameters
|
||||
from scgenerator.physics.simulate import RK4IP
|
||||
import os
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
def main():
|
||||
@@ -7,9 +9,13 @@ def main():
|
||||
try:
|
||||
os.chdir("/Users/benoitsierro/Nextcloud/PhD/Supercontinuum/PCF Simulations")
|
||||
|
||||
pa = Parameters.load("PM1550+PM2000D/PM1550_PM2000D raman_test/initial_config_0.toml")
|
||||
pa = Parameters.load(
|
||||
"/Users/benoitsierro/Nextcloud/PhD/Supercontinuum/PCF Simulations/PM1550+PM2000D/PM1550_RIN.toml"
|
||||
)
|
||||
|
||||
print(pa)
|
||||
plt.plot(pa.t, pa.field_0.imag)
|
||||
plt.plot(pa.t, pa.field_0.real)
|
||||
plt.show()
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from . import initialize, io, math, utils
|
||||
from . import initialize, math, utils
|
||||
from .initialize import (
|
||||
Config,
|
||||
ContinuationParamSequence,
|
||||
@@ -6,11 +6,10 @@ from .initialize import (
|
||||
ParamSequence,
|
||||
RecoveryParamSequence,
|
||||
)
|
||||
from .io import Paths, load_toml
|
||||
from .math import abs2, argclosest, span
|
||||
from .physics import fiber, materials, pulse, simulate, units
|
||||
from .physics.simulate import RK4IP, new_simulation, resume_simulations
|
||||
from .physics.units import PlotRange
|
||||
from .plotting import mean_values_plot, plot_spectrogram, propagation_plot, single_position_plot
|
||||
from .spectra import Pulse, Spectrum
|
||||
from .utils.parameter import BareConfig, Parameters
|
||||
from .utils import Paths, load_toml
|
||||
from .utils.parameter import BareConfig, Parameters, PlotRange
|
||||
|
||||
@@ -6,7 +6,7 @@ import re
|
||||
|
||||
import numpy as np
|
||||
|
||||
from .. import env, io, scripts
|
||||
from .. import env, utils, scripts
|
||||
from ..logger import get_logger
|
||||
from ..physics.fiber import dispersion_coefficients
|
||||
from ..physics.simulate import SequencialSimulations, resume_simulations, run_simulation_sequence
|
||||
@@ -150,13 +150,13 @@ def run_sim(args):
|
||||
|
||||
|
||||
def merge(args):
|
||||
path_trees = io.build_path_trees(Path(args.path))
|
||||
path_trees = utils.build_path_trees(Path(args.path))
|
||||
|
||||
output = env.output_path()
|
||||
if output is None:
|
||||
output = path_trees[0][-1][0].parent.name + " merged"
|
||||
|
||||
io.merge(output, path_trees)
|
||||
utils.merge(output, path_trees)
|
||||
|
||||
|
||||
def prep_ray():
|
||||
|
||||
@@ -1,34 +1,6 @@
|
||||
import matplotlib.pyplot as plt
|
||||
from pathlib import Path
|
||||
|
||||
default_parameters = dict(
|
||||
input_transmission=1.0,
|
||||
name="no name",
|
||||
he_mode=(1, 1),
|
||||
fit_parameters=(0.08, 200e-9),
|
||||
model="custom",
|
||||
length=1,
|
||||
n2=2.2e-20,
|
||||
capillary_resonance_strengths=[],
|
||||
capillary_nested=0,
|
||||
gas_name="vacuum",
|
||||
plasma_density=0,
|
||||
pressure=1e5,
|
||||
temperature=300,
|
||||
quantum_noise=False,
|
||||
intensity_noise=0,
|
||||
shape="gaussian",
|
||||
frep=40e6,
|
||||
behaviors=["spm", "ss"],
|
||||
raman_type="agrawal",
|
||||
parallel=True,
|
||||
repeat=1,
|
||||
tolerated_error=1e-11,
|
||||
interpolation_degree=8,
|
||||
interpolation_range=(200e-9, 3000e-9),
|
||||
ideal_gas=False,
|
||||
recovery_last_stored=0,
|
||||
)
|
||||
|
||||
default_plotting = dict(
|
||||
figsize=(10, 7),
|
||||
|
||||
@@ -34,3 +34,11 @@ class DuplicateParameterError(Exception):
|
||||
|
||||
class IncompleteDataFolderError(FileNotFoundError):
|
||||
pass
|
||||
|
||||
|
||||
class EvaluatorError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NoDefaultError(Exception):
|
||||
pass
|
||||
|
||||
@@ -1,24 +1,23 @@
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import asdict, dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterator, List, Tuple, Union
|
||||
from collections import defaultdict
|
||||
from typing import Any, Iterator, Union
|
||||
|
||||
import numpy as np
|
||||
|
||||
from . import io, utils
|
||||
from .defaults import default_parameters
|
||||
from . import utils
|
||||
from .errors import *
|
||||
from .logger import get_logger
|
||||
from .utils import override_config, required_simulations
|
||||
from .utils.evaluator import Evaluator
|
||||
from .utils.parameter import (
|
||||
BareConfig,
|
||||
Parameters,
|
||||
hc_model_specific_parameters,
|
||||
mandatory_parameters,
|
||||
override_config,
|
||||
required_simulations,
|
||||
)
|
||||
from scgenerator.utils import parameter
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -188,12 +187,12 @@ class Config(BareConfig):
|
||||
|
||||
|
||||
class ParamSequence:
|
||||
def __init__(self, config_dict: Union[Dict[str, Any], os.PathLike, BareConfig]):
|
||||
def __init__(self, config_dict: Union[dict[str, Any], os.PathLike, BareConfig]):
|
||||
"""creates a param sequence from a base config
|
||||
|
||||
Parameters
|
||||
----------
|
||||
config_dict : Union[Dict[str, Any], os.PathLike, BareConfig]
|
||||
config_dict : Union[dict[str, Any], os.PathLike, BareConfig]
|
||||
Can be either a dictionary, a path to a config toml file or BareConfig obj
|
||||
"""
|
||||
if isinstance(config_dict, Config):
|
||||
@@ -202,14 +201,14 @@ class ParamSequence:
|
||||
self.config = Config.from_bare(config_dict)
|
||||
else:
|
||||
if not isinstance(config_dict, Mapping):
|
||||
config_dict = io.load_toml(config_dict)
|
||||
config_dict = utils.load_toml(config_dict)
|
||||
self.config = Config(**config_dict)
|
||||
self.name = self.config.name
|
||||
self.logger = get_logger(__name__)
|
||||
|
||||
self.update_num_sim()
|
||||
|
||||
def __iter__(self) -> Iterator[Tuple[List[Tuple[str, Any]], Parameters]]:
|
||||
def __iter__(self) -> Iterator[tuple[list[tuple[str, Any]], Parameters]]:
|
||||
"""iterates through all possible parameters, yielding a config as well as a flattened
|
||||
computed parameters set each time"""
|
||||
for variable_list, params in required_simulations(self.config):
|
||||
@@ -242,17 +241,17 @@ class ContinuationParamSequence(ParamSequence):
|
||||
----------
|
||||
prev_sim_dir : PathLike
|
||||
path to the folder of the previous simulation containing 'initial_config.toml'
|
||||
new_config : Dict[str, Any]
|
||||
new_config : dict[str, Any]
|
||||
new config
|
||||
"""
|
||||
self.prev_sim_dir = Path(prev_sim_dir)
|
||||
self.bare_configs = BareConfig.load_sequence(new_config.previous_config_file)
|
||||
self.bare_configs.append(new_config)
|
||||
self.bare_configs[0] = Config.from_bare(self.bare_configs[0])
|
||||
final_config = utils.final_config_from_sequence(*self.bare_configs)
|
||||
final_config = parameter.final_config_from_sequence(*self.bare_configs)
|
||||
super().__init__(final_config)
|
||||
|
||||
def __iter__(self) -> Iterator[Tuple[List[Tuple[str, Any]], Parameters]]:
|
||||
def __iter__(self) -> Iterator[tuple[list[tuple[str, Any]], Parameters]]:
|
||||
"""iterates through all possible parameters, yielding a config as well as a flattened
|
||||
computed parameters set each time"""
|
||||
for variable_list, params in required_simulations(*self.bare_configs):
|
||||
@@ -260,12 +259,12 @@ class ContinuationParamSequence(ParamSequence):
|
||||
params.prev_data_dir = str(prev_data_dir.resolve())
|
||||
yield variable_list, params
|
||||
|
||||
def find_prev_data_dirs(self, new_variable_list: List[Tuple[str, Any]]) -> List[Path]:
|
||||
def find_prev_data_dirs(self, new_variable_list: list[tuple[str, Any]]) -> list[Path]:
|
||||
"""finds the previous simulation data that this new config should start from
|
||||
|
||||
Parameters
|
||||
----------
|
||||
new_variable_list : List[Tuple[str, Any]]
|
||||
new_variable_list : list[tuple[str, Any]]
|
||||
as yielded by required_simulations
|
||||
|
||||
Returns
|
||||
@@ -278,7 +277,7 @@ class ContinuationParamSequence(ParamSequence):
|
||||
ValueError
|
||||
no data folder found
|
||||
"""
|
||||
new_target = set(utils.format_variable_list(new_variable_list).split()[2:])
|
||||
new_target = set(parameter.format_variable_list(new_variable_list).split()[2:])
|
||||
path_dic = defaultdict(list)
|
||||
max_in_common = 0
|
||||
for data_dir in self.prev_sim_dir.glob("id*"):
|
||||
@@ -315,7 +314,7 @@ class RecoveryParamSequence(ParamSequence):
|
||||
self.prev_variable_lists = [
|
||||
(
|
||||
set(variable_list[1:]),
|
||||
self.prev_sim_dir / utils.format_variable_list(variable_list),
|
||||
self.prev_sim_dir / parameter.format_variable_list(variable_list),
|
||||
)
|
||||
for variable_list, _ in required_simulations(init_config)
|
||||
]
|
||||
@@ -330,12 +329,12 @@ class RecoveryParamSequence(ParamSequence):
|
||||
)
|
||||
self.update_num_sim(self.num_sim * additional_sims_factor)
|
||||
not_started = self.num_sim
|
||||
sub_folders = io.get_data_dirs(io.get_sim_dir(self.id))
|
||||
sub_folders = utils.get_data_dirs(utils.get_sim_dir(self.id))
|
||||
|
||||
for sub_folder in utils.PBars(
|
||||
sub_folders, "Initial recovery", head_kwargs=dict(unit="sim")
|
||||
):
|
||||
num_left = io.num_left_to_propagate(sub_folder, self.config.z_num)
|
||||
num_left = utils.num_left_to_propagate(sub_folder, self.config.z_num)
|
||||
if num_left == 0:
|
||||
self.num_sim -= 1
|
||||
self.num_steps += num_left
|
||||
@@ -344,26 +343,26 @@ class RecoveryParamSequence(ParamSequence):
|
||||
self.num_steps += not_started * self.config.z_num
|
||||
self.single_sim = self.num_sim == 1
|
||||
|
||||
def __iter__(self) -> Iterator[Tuple[List[Tuple[str, Any]], Parameters]]:
|
||||
def __iter__(self) -> Iterator[tuple[list[tuple[str, Any]], Parameters]]:
|
||||
for variable_list, params in required_simulations(self.config):
|
||||
|
||||
data_dir = io.get_sim_dir(self.id) / utils.format_variable_list(variable_list)
|
||||
data_dir = utils.get_sim_dir(self.id) / parameter.format_variable_list(variable_list)
|
||||
|
||||
if not data_dir.is_dir() or io.find_last_spectrum_num(data_dir) == 0:
|
||||
if (prev_data_dir := self.find_prev_data_dir(variable_list)) is not None:
|
||||
if not data_dir.is_dir() or utils.find_last_spectrum_num(data_dir) == 0:
|
||||
if (prev_data_dir := self.find_prev_data_dirs(variable_list)) is not None:
|
||||
params.prev_data_dir = str(prev_data_dir)
|
||||
yield variable_list, params
|
||||
elif io.num_left_to_propagate(data_dir, self.config.z_num) != 0:
|
||||
elif utils.num_left_to_propagate(data_dir, self.config.z_num) != 0:
|
||||
yield variable_list, params + "Needs to rethink recovery procedure"
|
||||
else:
|
||||
continue
|
||||
|
||||
def find_prev_data_dirs(self, new_variable_list: List[Tuple[str, Any]]) -> List[Path]:
|
||||
def find_prev_data_dirs(self, new_variable_list: list[tuple[str, Any]]) -> list[Path]:
|
||||
"""finds the previous simulation data that this new config should start from
|
||||
|
||||
Parameters
|
||||
----------
|
||||
new_variable_list : List[Tuple[str, Any]]
|
||||
new_variable_list : list[tuple[str, Any]]
|
||||
as yielded by required_simulations
|
||||
|
||||
Returns
|
||||
@@ -411,194 +410,3 @@ def validate_config_sequence(*configs: os.PathLike) -> tuple[str, int]:
|
||||
new_conf = config
|
||||
previous = Config.from_bare(override_config(new_conf, previous))
|
||||
return previous.name, count_variations(*configs)
|
||||
|
||||
|
||||
# def wspace(t, t_num=0):
|
||||
# """frequency array such that x(t) <-> np.fft(x)(w)
|
||||
# Parameters
|
||||
# ----------
|
||||
# t : float or array
|
||||
# float : total width of the time window
|
||||
# array : time array
|
||||
# t_num : int-
|
||||
# if t is a float, specifies the number of points
|
||||
# Returns
|
||||
# ----------
|
||||
# w : array
|
||||
# linspace of frencies corresponding to t
|
||||
# """
|
||||
# if isinstance(t, (np.ndarray, list, tuple)):
|
||||
# dt = t[1] - t[0]
|
||||
# t_num = len(t)
|
||||
# t = t[-1] - t[0] + dt
|
||||
# else:
|
||||
# dt = t / t_num
|
||||
# w = 2 * pi * np.arange(t_num) / t
|
||||
# w = np.where(w >= pi / dt, w - 2 * pi / dt, w)
|
||||
# return w
|
||||
|
||||
|
||||
# def tspace(time_window=None, t_num=None, dt=None):
|
||||
# """returns a time array centered on 0
|
||||
# Parameters
|
||||
# ----------
|
||||
# time_window : float
|
||||
# total time spanned
|
||||
# t_num : int
|
||||
# number of points
|
||||
# dt : float
|
||||
# time resolution
|
||||
|
||||
# at least 2 arguments must be given. They are prioritize as such
|
||||
# t_num > time_window > dt
|
||||
|
||||
# Returns
|
||||
# -------
|
||||
# t : array
|
||||
# a linearily spaced time array
|
||||
# Raises
|
||||
# ------
|
||||
# TypeError
|
||||
# missing at least 1 argument
|
||||
# """
|
||||
# if t_num is not None:
|
||||
# if isinstance(time_window, (float, int)):
|
||||
# return np.linspace(-time_window / 2, time_window / 2, int(t_num))
|
||||
# elif isinstance(dt, (float, int)):
|
||||
# time_window = (t_num - 1) * dt
|
||||
# return np.linspace(-time_window / 2, time_window / 2, t_num)
|
||||
# elif isinstance(time_window, (float, int)) and isinstance(dt, (float, int)):
|
||||
# t_num = int(time_window / dt) + 1
|
||||
# return np.linspace(-time_window / 2, time_window / 2, t_num)
|
||||
# else:
|
||||
# raise TypeError("not enough parameter to determine time vector")
|
||||
|
||||
|
||||
# def recover_params(params: Parameters, data_folder: Path) -> Parameters:
|
||||
# try:
|
||||
# prev = Parameters.load(data_folder / "params.toml")
|
||||
# except FileNotFoundError:
|
||||
# prev = Parameters()
|
||||
# for k, v in filter(lambda el: el[1] is not None, vars(prev).items()):
|
||||
# if getattr(params, k) is None:
|
||||
# setattr(params, k, v)
|
||||
# num, last_spectrum = io.load_last_spectrum(data_folder)
|
||||
# params.spec_0 = last_spectrum
|
||||
# params.field_0 = np.fft.ifft(last_spectrum)
|
||||
# params.recovery_last_stored = num
|
||||
# params.cons_qty = np.load(data_folder / "cons_qty.npy")
|
||||
# return params
|
||||
|
||||
|
||||
# def build_sim_grid(
|
||||
# length: float,
|
||||
# z_num: int,
|
||||
# wavelength: float,
|
||||
# deg: int,
|
||||
# time_window: float = None,
|
||||
# t_num: int = None,
|
||||
# dt: float = None,
|
||||
# ) -> tuple[
|
||||
# np.ndarray, np.ndarray, float, int, float, np.ndarray, float, np.ndarray, np.ndarray, np.ndarray
|
||||
# ]:
|
||||
# """computes a bunch of values that relate to the simulation grid
|
||||
|
||||
# Parameters
|
||||
# ----------
|
||||
# length : float
|
||||
# length of the fiber in m
|
||||
# z_num : int
|
||||
# number of spatial points
|
||||
# wavelength : float
|
||||
# pump wavelength in m
|
||||
# deg : int
|
||||
# dispersion interpolation degree
|
||||
# time_window : float, optional
|
||||
# total width of the temporal grid in s, by default None
|
||||
# t_num : int, optional
|
||||
# number of temporal grid points, by default None
|
||||
# dt : float, optional
|
||||
# spacing of the temporal grid in s, by default None
|
||||
|
||||
# Returns
|
||||
# -------
|
||||
# z_targets : np.ndarray, shape (z_num, )
|
||||
# spatial points in m
|
||||
# t : np.ndarray, shape (t_num, )
|
||||
# temporal points in s
|
||||
# time_window : float
|
||||
# total width of the temporal grid in s, by default None
|
||||
# t_num : int
|
||||
# number of temporal grid points, by default None
|
||||
# dt : float
|
||||
# spacing of the temporal grid in s, by default None
|
||||
# w_c : np.ndarray, shape (t_num, )
|
||||
# centered angular frequencies in rad/s where 0 is the pump frequency
|
||||
# w0 : float
|
||||
# pump angular frequency
|
||||
# w : np.ndarray, shape (t_num, )
|
||||
# actual angualr frequency grid in rad/s
|
||||
# w_power_fact : np.ndarray, shape (deg, t_num)
|
||||
# set of all the necessaray powers of w_c
|
||||
# l : np.ndarray, shape (t_num)
|
||||
# wavelengths in m
|
||||
# """
|
||||
# t = tspace(time_window, t_num, dt)
|
||||
|
||||
# time_window = t.max() - t.min()
|
||||
# dt = t[1] - t[0]
|
||||
# t_num = len(t)
|
||||
# z_targets = np.linspace(0, length, z_num)
|
||||
# w_c, w0, w, w_power_fact = update_frequency_domain(t, wavelength, deg)
|
||||
# l = units.To.m(w)
|
||||
# return z_targets, t, time_window, t_num, dt, w_c, w0, w, w_power_fact, l
|
||||
|
||||
|
||||
# def build_sim_grid_in_place(params: BareParams):
|
||||
# """similar to calling build_sim_grid, but sets the attributes in place"""
|
||||
# (
|
||||
# params.z_targets,
|
||||
# params.t,
|
||||
# params.time_window,
|
||||
# params.t_num,
|
||||
# params.dt,
|
||||
# params.w_c,
|
||||
# params.w0,
|
||||
# params.w,
|
||||
# params.w_power_fact,
|
||||
# params.l,
|
||||
# ) = build_sim_grid(
|
||||
# params.length,
|
||||
# params.z_num,
|
||||
# params.wavelength,
|
||||
# params.interpolation_degree,
|
||||
# params.time_window,
|
||||
# params.t_num,
|
||||
# params.dt,
|
||||
# )
|
||||
|
||||
|
||||
# def update_frequency_domain(
|
||||
# t: np.ndarray, wavelength: float, deg: int
|
||||
# ) -> Tuple[np.ndarray, float, np.ndarray, np.ndarray]:
|
||||
# """updates the frequency grid
|
||||
|
||||
# Parameters
|
||||
# ----------
|
||||
# t : np.ndarray
|
||||
# time array
|
||||
# wavelength : float
|
||||
# wavelength
|
||||
# deg : int
|
||||
# interpolation degree of the dispersion
|
||||
|
||||
# Returns
|
||||
# -------
|
||||
# Tuple[np.ndarray, float, np.ndarray, np.ndarray]
|
||||
# w_c, w0, w, w_power_fact
|
||||
# """
|
||||
# w_c = wspace(t)
|
||||
# w0 = units.m(wavelength)
|
||||
# w = w_c + w0
|
||||
# w_power_fact = np.array([power_fact(w_c, k) for k in range(2, deg + 3)])
|
||||
# return w_c, w0, w, w_power_fact
|
||||
|
||||
@@ -1,409 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Sequence, Tuple
|
||||
|
||||
import numpy as np
|
||||
import pkg_resources as pkg
|
||||
import toml
|
||||
from tqdm.std import Bar
|
||||
|
||||
from scgenerator.utils.parameter import BareConfig
|
||||
|
||||
from . import env
|
||||
from .const import PARAM_FN, PARAM_SEPARATOR, SPEC1_FN, SPECN_FN, Z_FN, __version__
|
||||
from .env import TMP_FOLDER_KEY_BASE
|
||||
from .logger import get_logger
|
||||
|
||||
PathTree = List[Tuple[Path, ...]]
|
||||
|
||||
|
||||
class Paths:
|
||||
_data_files = [
|
||||
"silica.toml",
|
||||
"gas.toml",
|
||||
"hr_t.npz",
|
||||
"submit_job_template.txt",
|
||||
"start_worker.sh",
|
||||
"start_head.sh",
|
||||
]
|
||||
|
||||
paths = {
|
||||
f.split(".")[0]: os.path.abspath(
|
||||
pkg.resource_filename("scgenerator", os.path.join("data", f))
|
||||
)
|
||||
for f in _data_files
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
if key not in cls.paths:
|
||||
if os.path.exists("paths.toml"):
|
||||
with open("paths.toml") as file:
|
||||
paths_dico = toml.load(file)
|
||||
for k, v in paths_dico.items():
|
||||
cls.paths[k] = v
|
||||
if key not in cls.paths:
|
||||
get_logger(__name__).info(
|
||||
f"{key} was not found in path index, returning current working directory."
|
||||
)
|
||||
cls.paths[key] = os.getcwd()
|
||||
|
||||
return cls.paths[key]
|
||||
|
||||
@classmethod
|
||||
def gets(cls, key):
|
||||
"""returned the specified file as a string"""
|
||||
with open(cls.get(key)) as file:
|
||||
return file.read()
|
||||
|
||||
@classmethod
|
||||
def plot(cls, name):
|
||||
"""returns the paths to the specified plot. Used to save new plot
|
||||
example
|
||||
---------
|
||||
fig.savefig(Paths.plot("figure5.pdf"))
|
||||
"""
|
||||
return os.path.join(cls.get("plots"), name)
|
||||
|
||||
|
||||
def conform_toml_path(path: os.PathLike) -> Path:
|
||||
path = Path(path)
|
||||
if not path.name.lower().endswith(".toml"):
|
||||
path = path.parent / (path.name + ".toml")
|
||||
return path
|
||||
|
||||
|
||||
def load_toml(path: os.PathLike):
|
||||
"""returns a dictionary parsed from the specified toml file"""
|
||||
path = conform_toml_path(path)
|
||||
with open(path, mode="r") as file:
|
||||
dico = toml.load(file)
|
||||
dico.setdefault("variable", {})
|
||||
for key in {"simulation", "fiber", "gas", "pulse"} & dico.keys():
|
||||
section = dico.pop(key, {})
|
||||
dico["variable"].update(section.pop("variable", {}))
|
||||
dico.update(section)
|
||||
if len(dico["variable"]) == 0:
|
||||
dico.pop("variable")
|
||||
return dico
|
||||
|
||||
|
||||
def save_toml(path: os.PathLike, dico):
|
||||
"""saves a dictionary into a toml file"""
|
||||
path = conform_toml_path(path)
|
||||
with open(path, mode="w") as file:
|
||||
toml.dump(dico, file)
|
||||
return dico
|
||||
|
||||
|
||||
def save_parameters(
|
||||
params: dict[str, Any], destination_dir: Path, file_name: str = "params.toml"
|
||||
) -> Path:
|
||||
"""saves a parameter dictionary. Note that is does remove some entries, particularly
|
||||
those that take a lot of space ("t", "w", ...)
|
||||
|
||||
Parameters
|
||||
----------
|
||||
params : Dict[str, Any]
|
||||
dictionary to save
|
||||
destination_dir : Path
|
||||
destination directory
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
path to newly created the paramter file
|
||||
"""
|
||||
file_path = destination_dir / file_name
|
||||
|
||||
file_path.parent.mkdir(exist_ok=True)
|
||||
|
||||
# save toml of the simulation
|
||||
with open(file_path, "w") as file:
|
||||
toml.dump(params, file, encoder=toml.TomlNumpyEncoder())
|
||||
|
||||
return file_path
|
||||
|
||||
|
||||
def load_material_dico(name: str) -> dict[str, Any]:
|
||||
"""loads a material dictionary
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
name of the material
|
||||
Returns
|
||||
----------
|
||||
material_dico : dict
|
||||
"""
|
||||
if name == "silica":
|
||||
return toml.loads(Paths.gets("silica"))
|
||||
else:
|
||||
return toml.loads(Paths.gets("gas"))[name]
|
||||
|
||||
|
||||
def get_data_dirs(sim_dir: Path) -> List[Path]:
|
||||
"""returns a list of absolute paths corresponding to a particular run
|
||||
|
||||
Parameters
|
||||
----------
|
||||
sim_dir : Path
|
||||
path to directory containing the initial config file and the spectra sub folders
|
||||
|
||||
Returns
|
||||
-------
|
||||
List[Path]
|
||||
paths to sub folders
|
||||
"""
|
||||
|
||||
return [p.resolve() for p in sim_dir.glob("*") if p.is_dir()]
|
||||
|
||||
|
||||
def update_appended_params(source: Path, destination: Path, z: Sequence):
|
||||
z_num = len(z)
|
||||
params = load_toml(source)
|
||||
if "simulation" in params:
|
||||
params["simulation"]["z_num"] = z_num
|
||||
params["fiber"]["length"] = float(z[-1] - z[0])
|
||||
else:
|
||||
params["z_num"] = z_num
|
||||
params["length"] = float(z[-1] - z[0])
|
||||
save_toml(destination, params)
|
||||
|
||||
|
||||
def build_path_trees(sim_dir: Path) -> List[PathTree]:
|
||||
sim_dir = sim_dir.resolve()
|
||||
path_branches: List[Tuple[Path, ...]] = []
|
||||
to_check = list(sim_dir.glob("id*num*"))
|
||||
with utils.PBars(len(to_check), desc="Building path trees") as pbar:
|
||||
for branch in map(build_path_branch, to_check):
|
||||
if branch is not None:
|
||||
path_branches.append(branch)
|
||||
pbar.update()
|
||||
path_trees = group_path_branches(path_branches)
|
||||
return path_trees
|
||||
|
||||
|
||||
def build_path_branch(data_dir: Path) -> Tuple[Path, ...]:
|
||||
if not data_dir.is_dir():
|
||||
return None
|
||||
path_branch = [data_dir]
|
||||
while (prev_sim_path := load_toml(path_branch[-1] / PARAM_FN).get("prev_data_dir")) is not None:
|
||||
p = Path(prev_sim_path).resolve()
|
||||
if not p.exists():
|
||||
p = Path(*p.parts[-2:]).resolve()
|
||||
path_branch.append(p)
|
||||
return tuple(reversed(path_branch))
|
||||
|
||||
|
||||
def group_path_branches(path_branches: List[Tuple[Path, ...]]) -> List[PathTree]:
|
||||
"""groups path lists
|
||||
|
||||
[
|
||||
("a/id 0 wavelength 100 num 0"," b/id 0 wavelength 100 num 0"),
|
||||
("a/id 2 wavelength 100 num 1"," b/id 2 wavelength 100 num 1"),
|
||||
("a/id 1 wavelength 200 num 0"," b/id 1 wavelength 200 num 0"),
|
||||
("a/id 3 wavelength 200 num 1"," b/id 3 wavelength 200 num 1")
|
||||
]
|
||||
->
|
||||
[
|
||||
(
|
||||
("a/id 0 wavelength 100 num 0", "a/id 2 wavelength 100 num 1"),
|
||||
("b/id 0 wavelength 100 num 0", "b/id 2 wavelength 100 num 1"),
|
||||
)
|
||||
(
|
||||
("a/id 1 wavelength 200 num 0", "a/id 3 wavelength 200 num 1"),
|
||||
("b/id 1 wavelength 200 num 0", "b/id 3 wavelength 200 num 1"),
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path_branches : List[Tuple[Path, ...]]
|
||||
each element of the list is a path to a folder containing data of one simulation
|
||||
|
||||
Returns
|
||||
-------
|
||||
List[PathTree]
|
||||
List of PathTrees to be used in merge
|
||||
"""
|
||||
sort_key = lambda el: el[0]
|
||||
|
||||
size = len(path_branches[0])
|
||||
out_trees_map: Dict[str, Dict[int, Dict[int, Path]]] = {}
|
||||
for branch in path_branches:
|
||||
b_id = utils.branch_id(branch)
|
||||
out_trees_map.setdefault(b_id, {i: {} for i in range(size)})
|
||||
for sim_part, data_dir in enumerate(branch):
|
||||
*_, num = data_dir.name.split()
|
||||
out_trees_map[b_id][sim_part][int(num)] = data_dir
|
||||
|
||||
return [
|
||||
tuple(
|
||||
tuple(w for _, w in sorted(v.items(), key=sort_key))
|
||||
for __, v in sorted(d.items(), key=sort_key)
|
||||
)
|
||||
for d in out_trees_map.values()
|
||||
]
|
||||
|
||||
|
||||
def merge_path_tree(
|
||||
path_tree: PathTree, destination: Path, z_callback: Callable[[int], None] = None
|
||||
):
|
||||
"""given a path tree, copies the file into the right location
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path_tree : PathTree
|
||||
elements of the list returned by group_path_branches
|
||||
destination : Path
|
||||
dir where to save the data
|
||||
"""
|
||||
z_arr: List[float] = []
|
||||
|
||||
destination.mkdir(exist_ok=True)
|
||||
|
||||
for i, (z, merged_spectra) in enumerate(merge_spectra(path_tree)):
|
||||
z_arr.append(z)
|
||||
spec_out_name = SPECN_FN.format(i)
|
||||
np.save(destination / spec_out_name, merged_spectra)
|
||||
if z_callback is not None:
|
||||
z_callback(i)
|
||||
d = np.diff(z_arr)
|
||||
d[d < 0] = 0
|
||||
z_arr = np.concatenate(([z_arr[0]], np.cumsum(d)))
|
||||
np.save(destination / Z_FN, z_arr)
|
||||
update_appended_params(path_tree[-1][0] / PARAM_FN, destination / PARAM_FN, z_arr)
|
||||
|
||||
|
||||
def merge_spectra(
|
||||
path_tree: PathTree,
|
||||
) -> Generator[Tuple[float, np.ndarray], None, None]:
|
||||
for same_sim_paths in path_tree:
|
||||
z_arr = np.load(same_sim_paths[0] / Z_FN)
|
||||
for i, z in enumerate(z_arr):
|
||||
spectra: List[np.ndarray] = []
|
||||
for data_dir in same_sim_paths:
|
||||
spec = np.load(data_dir / SPEC1_FN.format(i))
|
||||
spectra.append(spec)
|
||||
yield z, np.atleast_2d(spectra)
|
||||
|
||||
|
||||
def merge(destination: os.PathLike, path_trees: List[PathTree] = None):
|
||||
|
||||
destination = ensure_folder(Path(destination))
|
||||
|
||||
z_num = 0
|
||||
prev_z_num = 0
|
||||
|
||||
for i, sim_dir in enumerate(sim_dirs(path_trees)):
|
||||
conf = sim_dir / "initial_config.toml"
|
||||
shutil.copy(
|
||||
conf,
|
||||
destination / f"initial_config_{i}.toml",
|
||||
)
|
||||
prev_z_num = load_toml(conf).get("z_num", prev_z_num)
|
||||
z_num += prev_z_num
|
||||
|
||||
pbars = utils.PBars(
|
||||
len(path_trees) * z_num, "Merging", 1, worker_kwargs=dict(total=z_num, desc="current pos")
|
||||
)
|
||||
for path_tree in path_trees:
|
||||
pbars.reset(1)
|
||||
iden = PARAM_SEPARATOR.join(path_tree[-1][0].name.split()[2:-2])
|
||||
merge_path_tree(path_tree, destination / iden, z_callback=lambda i: pbars.update(1))
|
||||
|
||||
|
||||
def sim_dirs(path_trees: List[PathTree]) -> Generator[Path, None, None]:
|
||||
for p in path_trees[0]:
|
||||
yield p[0].parent
|
||||
|
||||
|
||||
def get_sim_dir(task_id: int, path_if_new: Path = None) -> Path:
|
||||
if path_if_new is None:
|
||||
path_if_new = Path("scgenerator data")
|
||||
tmp = env.data_folder(task_id)
|
||||
if tmp is None:
|
||||
tmp = ensure_folder(path_if_new)
|
||||
os.environ[TMP_FOLDER_KEY_BASE + str(task_id)] = str(tmp)
|
||||
tmp = Path(tmp).resolve()
|
||||
if not tmp.exists():
|
||||
tmp.mkdir()
|
||||
return tmp
|
||||
|
||||
|
||||
def set_data_folder(task_id: int, path: os.PathLike):
|
||||
"""stores the path to an existing data folder in the environment
|
||||
|
||||
Parameters
|
||||
----------
|
||||
task_id : int
|
||||
id uniquely identifying the session
|
||||
path : str
|
||||
path to the root of the data folder
|
||||
"""
|
||||
idstr = str(int(task_id))
|
||||
os.environ[TMP_FOLDER_KEY_BASE + idstr] = str(path)
|
||||
|
||||
|
||||
def save_data(data: np.ndarray, data_dir: Path, file_name: str):
|
||||
"""saves numpy array to disk
|
||||
|
||||
Parameters
|
||||
----------
|
||||
data : np.ndarray
|
||||
data to save
|
||||
file_name : str
|
||||
file name
|
||||
task_id : int
|
||||
id that uniquely identifies the process
|
||||
identifier : str, optional
|
||||
identifier in the main data folder of the task, by default ""
|
||||
"""
|
||||
path = data_dir / file_name
|
||||
np.save(path, data)
|
||||
get_logger(__name__).debug(f"saved data in {path}")
|
||||
return
|
||||
|
||||
|
||||
def ensure_folder(path: Path, prevent_overwrite: bool = True) -> Path:
|
||||
"""ensure a folder exists and doesn't overwrite anything if required
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : Path
|
||||
desired path
|
||||
prevent_overwrite : bool, optional
|
||||
whether to create a new directory when one already exists, by default True
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
final path
|
||||
"""
|
||||
|
||||
path = path.resolve()
|
||||
|
||||
# is path root ?
|
||||
if len(path.parts) < 2:
|
||||
return path
|
||||
|
||||
# is a part of path an existing *file* ?
|
||||
parts = path.parts
|
||||
path = Path(path.root)
|
||||
for part in parts:
|
||||
if path.is_file():
|
||||
path = ensure_folder(path, prevent_overwrite=False)
|
||||
path /= part
|
||||
|
||||
folder_name = path.name
|
||||
|
||||
for i in itertools.count():
|
||||
if not path.is_file() and (not prevent_overwrite or not path.is_dir()):
|
||||
path.mkdir(exist_ok=True)
|
||||
return path
|
||||
path = path.parent / (folder_name + f"_{i}")
|
||||
@@ -8,8 +8,9 @@ from typing import TypeVar
|
||||
import numpy as np
|
||||
from scipy.optimize import minimize_scalar
|
||||
|
||||
from .. import math, io
|
||||
from .. import math
|
||||
from . import fiber, materials, units, pulse
|
||||
from .. import utils
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
@@ -53,7 +54,7 @@ def material_dispersion(
|
||||
|
||||
order = np.argsort(w)
|
||||
|
||||
material_dico = io.load_material_dico(material)
|
||||
material_dico = utils.load_material_dico(material)
|
||||
if ideal:
|
||||
n_gas_2 = materials.sellmeier(wavelengths, material_dico, pressure, temperature) + 1
|
||||
else:
|
||||
|
||||
@@ -8,7 +8,7 @@ from scipy.interpolate import interp1d
|
||||
|
||||
from ..logger import get_logger
|
||||
|
||||
from .. import io
|
||||
from .. import utils
|
||||
from ..math import abs2, argclosest, power_fact, u_nm
|
||||
from ..utils.cache import np_cache
|
||||
from . import materials as mat
|
||||
@@ -247,7 +247,7 @@ def n_eff_hasan(
|
||||
|
||||
n_eff_2 = n_gas_2 - (u * wl_for_disp / (pipi * R_eff)) ** 2
|
||||
|
||||
chi_sil = mat.sellmeier(wl_for_disp, io.load_material_dico("silica"))
|
||||
chi_sil = mat.sellmeier(wl_for_disp, utils.load_material_dico("silica"))
|
||||
|
||||
with np.errstate(divide="ignore", invalid="ignore"):
|
||||
for m, strength in enumerate(capillary_resonance_strengths):
|
||||
@@ -670,7 +670,7 @@ def n_eff_pcf(wl_for_disp: np.ndarray, pitch: float, pitch_ratio: float) -> np.n
|
||||
n_eff2 = (wl_for_disp * W / (pi2a)) ** 2 + n_FSM2
|
||||
n_eff = np.sqrt(n_eff2)
|
||||
|
||||
material_dico = io.load_material_dico("silica")
|
||||
material_dico = utils.load_material_dico("silica")
|
||||
chi_mat = mat.sellmeier(wl_for_disp, material_dico)
|
||||
return n_eff + np.sqrt(chi_mat + 1)
|
||||
|
||||
@@ -882,7 +882,7 @@ def delayed_raman_t(t: np.ndarray, raman_type: str) -> np.ndarray:
|
||||
|
||||
elif raman_type == "measured":
|
||||
try:
|
||||
path = io.Paths.get("hr_t")
|
||||
path = utils.Paths.get("hr_t")
|
||||
loaded = np.load(path)
|
||||
except FileNotFoundError:
|
||||
print(
|
||||
@@ -1090,7 +1090,7 @@ def capillary_loss(
|
||||
"""
|
||||
alpha = np.zeros_like(wl_for_disp)
|
||||
mask = wl_for_disp > 0
|
||||
chi_silica = mat.sellmeier(wl_for_disp[mask], io.load_material_dico("silica"))
|
||||
chi_silica = mat.sellmeier(wl_for_disp[mask], utils.load_material_dico("silica"))
|
||||
nu_n = 0.5 * (chi_silica + 2) / np.sqrt(chi_silica)
|
||||
alpha[mask] = nu_n * (u_nm(*he_mode) * wl_for_disp[mask] / pipi) ** 2 * core_radius ** -3
|
||||
return alpha
|
||||
|
||||
@@ -5,14 +5,14 @@ from scipy.integrate import cumulative_trapezoid
|
||||
|
||||
from ..logger import get_logger
|
||||
from . import units
|
||||
from .. import io
|
||||
from .. import utils
|
||||
from .units import NA, c, kB, me, e, hbar
|
||||
|
||||
|
||||
def n_gas_2(
|
||||
wl_for_disp: np.ndarray, gas: str, pressure: float, temperature: float, ideal_gas: bool
|
||||
):
|
||||
material_dico = io.load_material_dico(gas)
|
||||
material_dico = utils.load_material_dico(gas)
|
||||
|
||||
if ideal_gas:
|
||||
n_gas_2 = sellmeier(wl_for_disp, material_dico, pressure, temperature) + 1
|
||||
@@ -103,7 +103,7 @@ def sellmeier(lambda_, material_dico, pressure=None, temperature=None):
|
||||
Parameters
|
||||
----------
|
||||
lambda_ : wl vector over which to compute the refractive index
|
||||
material_dico : material dictionary as explained in scgenerator.io.load_material_dico
|
||||
material_dico : material dictionary as explained in scgenerator.utils.load_material_dico
|
||||
pressure : pressure in mbar if material is a gas. Can be a constant or a tupple if a presure gradient is considered
|
||||
temperature : temperature of the gas in Kelvin
|
||||
Returns
|
||||
@@ -150,7 +150,7 @@ def delta_gas(w, material_dico):
|
||||
Parameters
|
||||
----------
|
||||
w : angular frequency array
|
||||
material_dico : material dictionary as explained in scgenerator.io.load_material_dico
|
||||
material_dico : material dictionary as explained in scgenerator.utils.load_material_dico
|
||||
Returns
|
||||
----------
|
||||
delta_t
|
||||
|
||||
@@ -11,25 +11,23 @@ n is the number of spectra at the same z position and nt is the size of the time
|
||||
|
||||
import itertools
|
||||
import os
|
||||
from dataclasses import astuple, dataclass, fields
|
||||
from pathlib import Path
|
||||
from typing import Literal, Tuple, TypeVar
|
||||
from collections import namedtuple
|
||||
from dataclasses import dataclass, astuple
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from numpy import pi
|
||||
from numpy.fft import fft, fftshift, ifft
|
||||
from scipy import optimize
|
||||
from scipy.interpolate import UnivariateSpline
|
||||
from scipy.optimize import minimize_scalar
|
||||
from scipy.optimize.optimize import OptimizeResult
|
||||
|
||||
from .. import io
|
||||
from scgenerator import utils
|
||||
|
||||
from ..defaults import default_plotting
|
||||
from ..logger import get_logger
|
||||
from ..math import *
|
||||
from ..plotting import plot_setup
|
||||
from . import units
|
||||
|
||||
c = 299792458.0
|
||||
@@ -317,24 +315,18 @@ def L_sol(L_D):
|
||||
return pi / 2 * L_D
|
||||
|
||||
|
||||
def load_previous_spectrum(prev_data_dir: str) -> np.ndarray:
|
||||
num = utils.find_last_spectrum_num(data_dir)
|
||||
return np.load(data_dir / SPEC1_FN.format(num))
|
||||
|
||||
|
||||
def load_field_file(
|
||||
def load_and_adjust_field_file(
|
||||
field_file: str,
|
||||
t: np.ndarray,
|
||||
peak_power: float,
|
||||
energy: float,
|
||||
intensity_noise: float,
|
||||
noise_correlation: float,
|
||||
energy: float = None,
|
||||
peak_power: float = None,
|
||||
) -> np.ndarray:
|
||||
field_data = np.load(field_file)
|
||||
field_interp = interp1d(
|
||||
field_data["time"], field_data["field"], bounds_error=False, fill_value=(0, 0)
|
||||
)
|
||||
field_0 = field_interp(t)
|
||||
field_0 = load_field_file(field_file, t)
|
||||
if energy is not None:
|
||||
curr_energy = np.trapz(abs2(field_0), t)
|
||||
field_0 *=
|
||||
|
||||
field_0 = field_0 * modify_field_ratio(
|
||||
t, field_0, peak_power, energy, intensity_noise, noise_correlation
|
||||
@@ -343,6 +335,15 @@ def load_field_file(
|
||||
return field_0, peak_power, energy, width
|
||||
|
||||
|
||||
def load_field_file(field_file: str, t: np.ndarray) -> np.ndarray:
|
||||
field_data = np.load(field_file)
|
||||
field_interp = interp1d(
|
||||
field_data["time"], field_data["field"], bounds_error=False, fill_value=(0, 0)
|
||||
)
|
||||
field_0 = field_interp(t)
|
||||
return field_0
|
||||
|
||||
|
||||
def correct_wavelength(init_wavelength: float, w_c: np.ndarray, field_0: np.ndarray) -> float:
|
||||
"""
|
||||
finds a new wavelength parameter such that the maximum of the spectrum corresponding
|
||||
@@ -908,7 +909,7 @@ def _detailed_find_lobe_limits(
|
||||
# if the final measurement is good or not
|
||||
|
||||
out_path, fig, ax = (
|
||||
plot_setup(out_path=f"measurement_errors_plots/it_{iterations}_{debug}")
|
||||
(Path(f"measurement_errors_plots/it_{iterations}_{debug}"), *plt.subplots())
|
||||
if debug != ""
|
||||
else (None, None, None)
|
||||
)
|
||||
@@ -1055,6 +1056,10 @@ def measure_field(t: np.ndarray, field: np.ndarray) -> Tuple[float, float, float
|
||||
return fwhm, peak_power, energy
|
||||
|
||||
|
||||
def measure_custom_field(field_file: str, t: np.ndarray) -> float:
|
||||
return measure_field(t, load_field_file(field_file, t))[0]
|
||||
|
||||
|
||||
def remove_2nd_order_dispersion(
|
||||
spectrum: T, w_c: np.ndarray, beta2: float, max_z: float = -100.0
|
||||
) -> tuple[T, OptimizeResult]:
|
||||
|
||||
@@ -3,12 +3,12 @@ import os
|
||||
import random
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple, Type, Union
|
||||
from typing import Type
|
||||
|
||||
import numpy as np
|
||||
|
||||
from .. import env, initialize, io, utils
|
||||
from ..utils import Parameters, BareConfig
|
||||
from .. import env, initialize, utils
|
||||
from ..utils.parameter import Parameters, BareConfig
|
||||
from ..const import PARAM_SEPARATOR
|
||||
from ..errors import IncompleteDataFolderError
|
||||
from ..logger import get_logger
|
||||
@@ -55,7 +55,7 @@ class RK4IP:
|
||||
self.job_identifier = job_identifier
|
||||
self.id = task_id
|
||||
|
||||
self.sim_dir = io.get_sim_dir(self.id)
|
||||
self.sim_dir = utils.get_sim_dir(self.id)
|
||||
self.sim_dir.mkdir(exist_ok=True)
|
||||
self.data_dir = self.sim_dir / self.job_identifier
|
||||
|
||||
@@ -68,7 +68,7 @@ class RK4IP:
|
||||
self.dw = self.w[1] - self.w[0]
|
||||
self.w0 = params.w0
|
||||
self.w_power_fact = params.w_power_fact
|
||||
self.alpha = params.alpha
|
||||
self.alpha = params.alpha_arr
|
||||
self.spec_0 = np.sqrt(params.input_transmission) * params.spec_0
|
||||
self.z_targets = params.z_targets
|
||||
self.z_final = params.length
|
||||
@@ -81,7 +81,7 @@ class RK4IP:
|
||||
self.raman_type = params.raman_type
|
||||
self.hr_w = params.hr_w
|
||||
self.adapt_step_size = params.adapt_step_size
|
||||
self.error_ok = params.tolerated_error
|
||||
self.error_ok = params.tolerated_error if self.adapt_step_size else params.step_size
|
||||
self.dynamic_dispersion = params.dynamic_dispersion
|
||||
self.starting_num = params.recovery_last_stored
|
||||
|
||||
@@ -178,7 +178,7 @@ class RK4IP:
|
||||
name : str
|
||||
file name
|
||||
"""
|
||||
io.save_data(data, self.data_dir, name)
|
||||
utils.save_data(data, self.data_dir, name)
|
||||
|
||||
def run(self):
|
||||
|
||||
@@ -241,7 +241,7 @@ class RK4IP:
|
||||
|
||||
def take_step(
|
||||
self, step: int, h_next_step: float, current_spectrum: np.ndarray
|
||||
) -> Tuple[float, float, np.ndarray]:
|
||||
) -> tuple[float, float, np.ndarray]:
|
||||
"""computes a new spectrum, whilst adjusting step size if required, until the error estimation
|
||||
validates the new spectrum
|
||||
|
||||
@@ -385,8 +385,8 @@ class Simulations:
|
||||
New Simulations child classes can be written and must implement the following
|
||||
"""
|
||||
|
||||
simulation_methods: List[Tuple[Type["Simulations"], int]] = []
|
||||
simulation_methods_dict: Dict[str, Type["Simulations"]] = dict()
|
||||
simulation_methods: list[tuple[Type["Simulations"], int]] = []
|
||||
simulation_methods_dict: dict[str, Type["Simulations"]] = dict()
|
||||
|
||||
def __init_subclass__(cls, priority=0, **kwargs):
|
||||
Simulations.simulation_methods.append((cls, priority))
|
||||
@@ -437,16 +437,16 @@ class Simulations:
|
||||
"""
|
||||
if not self.is_available():
|
||||
raise RuntimeError(f"{self.__class__} is currently not available")
|
||||
self.logger = io.get_logger(__name__)
|
||||
self.logger = get_logger(__name__)
|
||||
self.id = int(task_id)
|
||||
|
||||
self.update(param_seq)
|
||||
|
||||
self.name = self.param_seq.name
|
||||
self.sim_dir = io.get_sim_dir(
|
||||
self.sim_dir = utils.get_sim_dir(
|
||||
self.id, path_if_new=Path(self.name + PARAM_SEPARATOR + "tmp")
|
||||
)
|
||||
io.save_parameters(
|
||||
utils.save_parameters(
|
||||
self.param_seq.config.prepare_for_dump(), self.sim_dir, file_name="initial_config.toml"
|
||||
)
|
||||
|
||||
@@ -455,7 +455,9 @@ class Simulations:
|
||||
@property
|
||||
def finished_and_complete(self):
|
||||
try:
|
||||
io.check_data_integrity(io.get_data_dirs(self.sim_dir), self.param_seq.config.z_num)
|
||||
utils.check_data_integrity(
|
||||
utils.get_data_dirs(self.sim_dir), self.param_seq.config.z_num
|
||||
)
|
||||
return True
|
||||
except IncompleteDataFolderError:
|
||||
return False
|
||||
@@ -470,7 +472,7 @@ class Simulations:
|
||||
def _run_available(self):
|
||||
for variable, params in self.param_seq:
|
||||
v_list_str = utils.format_variable_list(variable)
|
||||
io.save_parameters(params.prepare_for_dump(), self.sim_dir / v_list_str)
|
||||
utils.save_parameters(params.prepare_for_dump(), self.sim_dir / v_list_str)
|
||||
|
||||
self.new_sim(v_list_str, params)
|
||||
self.finish()
|
||||
@@ -582,7 +584,7 @@ class MultiProcSimulations(Simulations, priority=1):
|
||||
p_queue: multiprocessing.Queue,
|
||||
):
|
||||
while True:
|
||||
raw_data: Tuple[List[tuple], Parameters] = queue.get()
|
||||
raw_data: tuple[list[tuple], Parameters] = queue.get()
|
||||
if raw_data == 0:
|
||||
queue.task_done()
|
||||
return
|
||||
@@ -695,17 +697,17 @@ def run_simulation_sequence(
|
||||
sim = new_simulation(config, prev, method)
|
||||
sim.run()
|
||||
prev = sim.sim_dir
|
||||
path_trees = io.build_path_trees(sim.sim_dir)
|
||||
path_trees = utils.build_path_trees(sim.sim_dir)
|
||||
|
||||
final_name = env.get(env.OUTPUT_PATH)
|
||||
if final_name is None:
|
||||
final_name = config.name
|
||||
|
||||
io.merge(final_name, path_trees)
|
||||
utils.merge(final_name, path_trees)
|
||||
|
||||
|
||||
def new_simulation(
|
||||
config: utils.BareConfig,
|
||||
config: BareConfig,
|
||||
prev_sim_dir=None,
|
||||
method: Type[Simulations] = None,
|
||||
) -> Simulations:
|
||||
@@ -729,8 +731,8 @@ def new_simulation(
|
||||
def resume_simulations(sim_dir: Path, method: Type[Simulations] = None) -> Simulations:
|
||||
|
||||
task_id = random.randint(1e9, 1e12)
|
||||
config = io.load_toml(sim_dir / "initial_config.toml")
|
||||
io.set_data_folder(task_id, sim_dir)
|
||||
config = utils.load_toml(sim_dir / "initial_config.toml")
|
||||
utils.set_data_folder(task_id, sim_dir)
|
||||
param_seq = initialize.RecoveryParamSequence(config, task_id)
|
||||
|
||||
return Simulations.new(param_seq, task_id, method)
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
# For example, nm(X) means "I give the number X in nm, figure out the ang. freq."
|
||||
# to be used especially when giving plotting ranges : (400, 1400, nm), (-4, 8, ps), ...
|
||||
|
||||
from typing import Callable, TypeVar, Union
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, TypeVar, Union
|
||||
|
||||
from ..utils import parameter
|
||||
import numpy as np
|
||||
from numpy import pi
|
||||
|
||||
@@ -181,17 +180,6 @@ def is_unit(name, value):
|
||||
raise TypeError("invalid unit specified")
|
||||
|
||||
|
||||
@dataclass
|
||||
class PlotRange:
|
||||
left: float = parameter.Parameter(parameter.type_checker(int, float))
|
||||
right: float = parameter.Parameter(parameter.type_checker(int, float))
|
||||
unit: Callable[[float], float] = parameter.Parameter(is_unit, converter=get_unit)
|
||||
conserved_quantity: bool = parameter.Parameter(parameter.boolean, default=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.left:.1f}-{self.right:.1f} {self.unit.__name__}"
|
||||
|
||||
|
||||
def beta2_coef(beta2_coefficients):
|
||||
fac = 1e27
|
||||
out = np.zeros_like(beta2_coefficients)
|
||||
@@ -231,54 +219,6 @@ def standardize_dictionary(dico):
|
||||
return dico
|
||||
|
||||
|
||||
def sort_axis(axis, plt_range: PlotRange) -> tuple[np.ndarray, np.ndarray, tuple[float, float]]:
|
||||
"""
|
||||
given an axis, returns this axis cropped according to the given range, converted and sorted
|
||||
|
||||
Parameters
|
||||
----------
|
||||
axis : 1D array containing the original axis (usual the w or t array)
|
||||
plt_range : tupple (min, max, conversion_function) used to crop the axis
|
||||
|
||||
Returns
|
||||
-------
|
||||
cropped : the axis cropped, converted and sorted
|
||||
indices : indices to use to slice and sort other array in the same fashion
|
||||
extent : tupple with min and max of cropped
|
||||
|
||||
Example
|
||||
-------
|
||||
w = np.append(np.linspace(0, -10, 20), np.linspace(0, 10, 20))
|
||||
t = np.linspace(-10, 10, 400)
|
||||
W, T = np.meshgrid(w, t)
|
||||
y = np.exp(-W**2 - T**2)
|
||||
|
||||
# Define ranges
|
||||
rw = (-4, 4, s)
|
||||
rt = (-2, 6, s)
|
||||
|
||||
w, cw = sort_axis(w, rw)
|
||||
t, ct = sort_axis(t, rt)
|
||||
|
||||
# slice y according to the given ranges
|
||||
y = y[ct][:, cw]
|
||||
"""
|
||||
if isinstance(plt_range, tuple):
|
||||
plt_range = PlotRange(*plt_range)
|
||||
r = np.array((plt_range.left, plt_range.right), dtype="float")
|
||||
|
||||
indices = np.arange(len(axis))[
|
||||
(axis <= np.max(plt_range.unit(r))) & (axis >= np.min(plt_range.unit(r)))
|
||||
]
|
||||
cropped = axis[indices]
|
||||
order = np.argsort(plt_range.unit.inv(cropped))
|
||||
indices = indices[order]
|
||||
cropped = cropped[order]
|
||||
out_ax = plt_range.unit.inv(cropped)
|
||||
|
||||
return out_ax, indices, (out_ax[0], out_ax[-1])
|
||||
|
||||
|
||||
def to_WL(spectrum: np.ndarray, lambda_: np.ndarray) -> np.ndarray:
|
||||
"""rescales the spectrum because of uneven binning when going from freq to wl
|
||||
|
||||
|
||||
@@ -1,29 +1,22 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import Any, Callable, Dict, Literal, Optional, Tuple, Union
|
||||
from PIL.Image import new
|
||||
from typing import Any, Callable, Literal, Optional, Union
|
||||
|
||||
import matplotlib.gridspec as gs
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from matplotlib.colors import ListedColormap
|
||||
from numpy.core.fromnumeric import mean
|
||||
from scipy.interpolate import UnivariateSpline
|
||||
from scipy.interpolate.interpolate import interp1d
|
||||
from tqdm import utils
|
||||
|
||||
from scgenerator.const import PARAM_SEPARATOR
|
||||
|
||||
from .logger import get_logger
|
||||
|
||||
from . import io, math
|
||||
from . import math
|
||||
from .const import PARAM_SEPARATOR
|
||||
from .defaults import default_plotting as defaults
|
||||
from .math import abs2, make_uniform_1D, span
|
||||
from .math import abs2, span
|
||||
from .physics import pulse, units
|
||||
from .utils.parameter import BareConfig, Parameters
|
||||
from .utils.parameter import Parameters, PlotRange, sort_axis
|
||||
|
||||
RangeType = Tuple[float, float, Union[str, Callable]]
|
||||
RangeType = tuple[float, float, Union[str, Callable]]
|
||||
NO_LIM = object()
|
||||
|
||||
|
||||
@@ -52,9 +45,9 @@ def get_extent(x, y, facx=1, facy=1):
|
||||
def plot_setup(
|
||||
out_path: Path,
|
||||
file_type: str = "png",
|
||||
figsize: Tuple[float, float] = defaults["figsize"],
|
||||
figsize: tuple[float, float] = defaults["figsize"],
|
||||
mode: Literal["default", "coherence", "coherence_T"] = "default",
|
||||
) -> Tuple[Path, plt.Figure, Union[plt.Axes, Tuple[plt.Axes]]]:
|
||||
) -> tuple[Path, plt.Figure, Union[plt.Axes, tuple[plt.Axes]]]:
|
||||
out_path = defaults["name"] if out_path is None else out_path
|
||||
out_path = Path(out_path)
|
||||
plot_name = out_path.name.replace(f".{file_type}", "")
|
||||
@@ -181,7 +174,7 @@ def create_zoom_axis(
|
||||
ymin, ymax = 0, 0
|
||||
for line in lines:
|
||||
xdata = line.get_xdata()
|
||||
xdata, ind, _ = units.sort_axis(xdata, (*xlim, units.s))
|
||||
xdata, ind, _ = sort_axis(xdata, (*xlim, units.s))
|
||||
ydata = line.get_ydata()[ind]
|
||||
inset.plot(
|
||||
xdata, ydata, c=line.get_color(), ls=line.get_linestyle(), lw=line.get_linewidth()
|
||||
@@ -262,7 +255,7 @@ def corner_annotation(text, ax, position="tl", rel_x_offset=0.05, rel_y_offset=0
|
||||
|
||||
def propagation_plot(
|
||||
values: np.ndarray,
|
||||
plt_range: Union[units.PlotRange, RangeType],
|
||||
plt_range: Union[PlotRange, RangeType],
|
||||
params: Parameters,
|
||||
ax: plt.Axes,
|
||||
log: Union[int, float, bool, str] = "1D",
|
||||
@@ -279,7 +272,7 @@ def propagation_plot(
|
||||
----------
|
||||
values : np.ndarray
|
||||
raw values, either complex fields or complex spectra
|
||||
plt_range : Union[units.PlotRange, RangeType]
|
||||
plt_range : Union[PlotRange, RangeType]
|
||||
time, wavelength or frequency range
|
||||
params : Parameters
|
||||
parameters of the simulation
|
||||
@@ -417,7 +410,7 @@ def plot_2D(
|
||||
|
||||
def transform_2D_propagation(
|
||||
values: np.ndarray,
|
||||
plt_range: Union[units.PlotRange, RangeType],
|
||||
plt_range: Union[PlotRange, RangeType],
|
||||
params: Parameters,
|
||||
log: Union[int, float, bool, str] = "1D",
|
||||
skip: int = 1,
|
||||
@@ -428,7 +421,7 @@ def transform_2D_propagation(
|
||||
----------
|
||||
values : np.ndarray, shape (n, nt)
|
||||
values to transform
|
||||
plt_range : Union[units.PlotRange, RangeType]
|
||||
plt_range : Union[PlotRange, RangeType]
|
||||
range
|
||||
params : Parameters
|
||||
parameters of the simulation
|
||||
@@ -468,7 +461,7 @@ def transform_2D_propagation(
|
||||
|
||||
def mean_values_plot(
|
||||
values: np.ndarray,
|
||||
plt_range: Union[units.PlotRange, RangeType],
|
||||
plt_range: Union[PlotRange, RangeType],
|
||||
params: Parameters,
|
||||
ax: plt.Axes,
|
||||
log: Union[float, int, str, bool] = False,
|
||||
@@ -478,7 +471,7 @@ def mean_values_plot(
|
||||
spacing: Union[float, int] = 1,
|
||||
renormalize: bool = True,
|
||||
y_label: str = None,
|
||||
line_labels: Tuple[str, str] = None,
|
||||
line_labels: tuple[str, str] = None,
|
||||
mean_style: dict[str, Any] = None,
|
||||
individual_style: dict[str, Any] = None,
|
||||
) -> tuple[plt.Line2D, list[plt.Line2D]]:
|
||||
@@ -510,7 +503,7 @@ def mean_values_plot(
|
||||
|
||||
def transform_mean_values(
|
||||
values: np.ndarray,
|
||||
plt_range: Union[units.PlotRange, RangeType],
|
||||
plt_range: Union[PlotRange, RangeType],
|
||||
params: Parameters,
|
||||
log: Union[bool, int, float] = False,
|
||||
spacing: Union[int, float] = 1,
|
||||
@@ -521,7 +514,7 @@ def transform_mean_values(
|
||||
----------
|
||||
values : np.ndarray, shape (m, n)
|
||||
values to transform
|
||||
plt_range : Union[units.PlotRange, RangeType]
|
||||
plt_range : Union[PlotRange, RangeType]
|
||||
x axis specifications
|
||||
params : Parameters
|
||||
parameters of the simulation
|
||||
@@ -545,7 +538,7 @@ def transform_mean_values(
|
||||
is_complex, x_axis, plt_range = prep_plot_axis(values, plt_range, params)
|
||||
if is_complex:
|
||||
values = abs2(values)
|
||||
new_axis, ind, ext = units.sort_axis(x_axis, plt_range)
|
||||
new_axis, ind, ext = sort_axis(x_axis, plt_range)
|
||||
values = values[:, ind]
|
||||
if plt_range.unit.type == "WL":
|
||||
values = np.apply_along_axis(units.to_WL, -1, values, new_axis)
|
||||
@@ -636,7 +629,7 @@ def plot_mean(
|
||||
|
||||
def single_position_plot(
|
||||
values: np.ndarray,
|
||||
plt_range: Union[units.PlotRange, RangeType],
|
||||
plt_range: Union[PlotRange, RangeType],
|
||||
params: Parameters,
|
||||
ax: plt.Axes,
|
||||
log: Union[str, int, float, bool] = False,
|
||||
@@ -711,7 +704,7 @@ def plot_1D(
|
||||
|
||||
def transform_1D_values(
|
||||
values: np.ndarray,
|
||||
plt_range: Union[units.PlotRange, RangeType],
|
||||
plt_range: Union[PlotRange, RangeType],
|
||||
params: Parameters,
|
||||
log: Union[int, float, bool] = False,
|
||||
spacing: Union[int, float] = 1,
|
||||
@@ -722,7 +715,7 @@ def transform_1D_values(
|
||||
----------
|
||||
values : np.ndarray, shape (n,)
|
||||
values to plot, may be complex
|
||||
plt_range : Union[units.PlotRange, RangeType]
|
||||
plt_range : Union[PlotRange, RangeType]
|
||||
plot range specification, either (min, max, unit) or a PlotRange obj
|
||||
params : Parameters
|
||||
parameters of the simulations
|
||||
@@ -744,7 +737,7 @@ def transform_1D_values(
|
||||
is_complex, x_axis, plt_range = prep_plot_axis(values, plt_range, params)
|
||||
if is_complex:
|
||||
values = abs2(values)
|
||||
new_axis, ind, ext = units.sort_axis(x_axis, plt_range)
|
||||
new_axis, ind, ext = sort_axis(x_axis, plt_range)
|
||||
values = values[ind]
|
||||
if plt_range.unit.type == "WL":
|
||||
values = units.to_WL(values, new_axis)
|
||||
@@ -814,8 +807,8 @@ def plot_spectrogram(
|
||||
if values.ndim != 1:
|
||||
print("plot_spectrogram can only plot 1D arrays")
|
||||
return
|
||||
x_range: units.PlotRange
|
||||
y_range: units.PlotRange
|
||||
x_range: PlotRange
|
||||
y_range: PlotRange
|
||||
_, x_axis, x_range = prep_plot_axis(values, x_range, params)
|
||||
_, y_axis, y_range = prep_plot_axis(values, y_range, params)
|
||||
|
||||
@@ -862,7 +855,7 @@ def plot_spectrogram(
|
||||
|
||||
|
||||
def uniform_axis(
|
||||
axis: np.ndarray, values: np.ndarray, new_axis_spec: Union[units.PlotRange, RangeType, str]
|
||||
axis: np.ndarray, values: np.ndarray, new_axis_spec: Union[PlotRange, RangeType, str]
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
"""given some values(axis), creates a new uniformly spaced axis and interpolates
|
||||
the values over it.
|
||||
@@ -891,14 +884,14 @@ def uniform_axis(
|
||||
new_axis_spec = "unity"
|
||||
if isinstance(new_axis_spec, str) or callable(new_axis_spec):
|
||||
unit = units.get_unit(new_axis_spec)
|
||||
plt_range = units.PlotRange(unit.inv(axis.min()), unit.inv(axis.max()), new_axis_spec)
|
||||
plt_range = PlotRange(unit.inv(axis.min()), unit.inv(axis.max()), new_axis_spec)
|
||||
elif isinstance(new_axis_spec, tuple):
|
||||
plt_range = units.PlotRange(*new_axis_spec)
|
||||
elif isinstance(new_axis_spec, units.PlotRange):
|
||||
plt_range = PlotRange(*new_axis_spec)
|
||||
elif isinstance(new_axis_spec, PlotRange):
|
||||
plt_range = new_axis_spec
|
||||
else:
|
||||
raise TypeError(f"Don't know how to interpret {new_axis_spec}")
|
||||
tmp_axis, ind, ext = units.sort_axis(axis, plt_range)
|
||||
tmp_axis, ind, ext = sort_axis(axis, plt_range)
|
||||
values = np.atleast_2d(values)
|
||||
if np.allclose((diff := np.diff(tmp_axis))[0], diff):
|
||||
new_axis = tmp_axis
|
||||
@@ -954,11 +947,11 @@ def apply_log(values: np.ndarray, log: Union[str, bool, float, int]) -> np.ndarr
|
||||
|
||||
|
||||
def prep_plot_axis(
|
||||
values: np.ndarray, plt_range: Union[units.PlotRange, RangeType], params: Parameters
|
||||
) -> tuple[bool, np.ndarray, units.PlotRange]:
|
||||
values: np.ndarray, plt_range: Union[PlotRange, RangeType], params: Parameters
|
||||
) -> tuple[bool, np.ndarray, PlotRange]:
|
||||
is_spectrum = values.dtype == "complex"
|
||||
if not isinstance(plt_range, units.PlotRange):
|
||||
plt_range = units.PlotRange(*plt_range)
|
||||
if not isinstance(plt_range, PlotRange):
|
||||
plt_range = PlotRange(*plt_range)
|
||||
if plt_range.unit.type in ["WL", "FREQ", "AFREQ"]:
|
||||
x_axis = params.w.copy()
|
||||
else:
|
||||
|
||||
@@ -1,22 +1,21 @@
|
||||
from itertools import cycle
|
||||
import itertools
|
||||
from itertools import cycle
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterable, Optional
|
||||
from cycler import cycler
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from cycler import cycler
|
||||
from tqdm import tqdm
|
||||
|
||||
from ..utils.parameter import Parameters
|
||||
from ..const import PARAM_SEPARATOR
|
||||
|
||||
from ..initialize import ParamSequence
|
||||
from ..physics import units, fiber
|
||||
from ..spectra import Pulse
|
||||
from ..utils import pretty_format_value, pretty_format_from_sim_name, auto_crop
|
||||
from ..plotting import plot_setup
|
||||
from .. import env, math
|
||||
from ..const import PARAM_SEPARATOR
|
||||
from ..initialize import ParamSequence
|
||||
from ..physics import fiber, units
|
||||
from ..plotting import plot_setup
|
||||
from ..spectra import Pulse
|
||||
from ..utils import auto_crop
|
||||
from ..utils.parameter import Parameters, pretty_format_from_sim_name, pretty_format_value
|
||||
|
||||
|
||||
def fingerprint(params: Parameters):
|
||||
@@ -91,7 +90,7 @@ def plot_dispersion(config_path: Path, lim: tuple[float, float] = None):
|
||||
loss_ax = None
|
||||
plt.sca(left)
|
||||
for style, lbl, params in plot_helper(config_path):
|
||||
if params.alpha is not None and loss_ax is None:
|
||||
if params.alpha_arr is not None and loss_ax is None:
|
||||
loss_ax = right.twinx()
|
||||
if (bbb := tuple(params.beta2_coefficients)) not in already_plotted:
|
||||
already_plotted.add(bbb)
|
||||
@@ -116,7 +115,7 @@ def plot_init(
|
||||
all_labels = []
|
||||
already_plotted = set()
|
||||
for style, lbl, params in plot_helper(config_path):
|
||||
if params.alpha is not None and loss_ax is None:
|
||||
if params.alpha_arr is not None and loss_ax is None:
|
||||
loss_ax = tr.twinx()
|
||||
if (fp := fingerprint(params)) not in already_plotted:
|
||||
already_plotted.add(fp)
|
||||
@@ -214,8 +213,8 @@ def plot_1_dispersion(
|
||||
left.set_xlabel(units.nm.label)
|
||||
right.set_xlabel("wavelength (nm)")
|
||||
|
||||
if params.alpha is not None and loss is not None:
|
||||
loss.plot(1e9 * wl[m], params.alpha[m], c="r", ls="--")
|
||||
if params.alpha_arr is not None and loss is not None:
|
||||
loss.plot(1e9 * wl[m], params.alpha_arr[m], c="r", ls="--")
|
||||
loss.set_ylabel("loss (1/m)", color="r")
|
||||
loss.set_yscale("log")
|
||||
loss.tick_params(axis="y", labelcolor="r")
|
||||
|
||||
@@ -10,7 +10,7 @@ from typing import Tuple
|
||||
import numpy as np
|
||||
|
||||
from ..initialize import validate_config_sequence
|
||||
from ..io import Paths
|
||||
from ..utils import Paths
|
||||
from ..utils.parameter import BareConfig
|
||||
|
||||
|
||||
|
||||
@@ -6,12 +6,12 @@ from typing import Callable, Dict, Iterable, Union
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
|
||||
from . import initialize, io, math
|
||||
from . import initialize, math
|
||||
from .const import SPECN_FN
|
||||
from .logger import get_logger
|
||||
from .physics import pulse, units
|
||||
from .plotting import mean_values_plot, propagation_plot, single_position_plot
|
||||
from .utils.parameter import Parameters
|
||||
from .utils.parameter import Parameters, PlotRange
|
||||
|
||||
|
||||
class Spectrum(np.ndarray):
|
||||
@@ -347,7 +347,7 @@ class Pulse(Sequence):
|
||||
return mean_values_plot(vals, plt_range, self.params, ax, **kwargs)
|
||||
|
||||
def retrieve_plot_values(self, left, right, unit, z_pos, sim_ind):
|
||||
plt_range = units.PlotRange(left, right, unit)
|
||||
plt_range = PlotRange(left, right, unit)
|
||||
if plt_range.unit.type == "TIME":
|
||||
vals = self.all_fields(ind=z_pos)
|
||||
else:
|
||||
|
||||
@@ -4,32 +4,428 @@ scgenerator module but some function may be used in any python program
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import itertools
|
||||
import multiprocessing
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import shutil
|
||||
import threading
|
||||
from collections import abc
|
||||
from copy import deepcopy
|
||||
from dataclasses import asdict, replace
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterable, Iterator, TypeVar, Union
|
||||
from ..errors import IncompleteDataFolderError
|
||||
from typing import Any, Callable, Generator, Iterable, Sequence, TypeVar, Union
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib.arraysetops import isin
|
||||
import pkg_resources as pkg
|
||||
import toml
|
||||
from tqdm import tqdm
|
||||
|
||||
from .. import env
|
||||
from ..const import PARAM_SEPARATOR, SPEC1_FN
|
||||
from ..math import *
|
||||
from .. import io
|
||||
from .parameter import BareConfig, Parameters
|
||||
from ..const import PARAM_FN, PARAM_SEPARATOR, SPEC1_FN, SPECN_FN, Z_FN, __version__
|
||||
from ..env import TMP_FOLDER_KEY_BASE, data_folder, pbar_policy
|
||||
from ..errors import IncompleteDataFolderError
|
||||
from ..logger import get_logger
|
||||
|
||||
|
||||
T_ = TypeVar("T_")
|
||||
|
||||
PathTree = list[tuple[Path, ...]]
|
||||
|
||||
|
||||
class Paths:
|
||||
_data_files = [
|
||||
"silica.toml",
|
||||
"gas.toml",
|
||||
"hr_t.npz",
|
||||
"submit_job_template.txt",
|
||||
"start_worker.sh",
|
||||
"start_head.sh",
|
||||
]
|
||||
|
||||
paths = {
|
||||
f.split(".")[0]: os.path.abspath(
|
||||
pkg.resource_filename("scgenerator", os.path.join("data", f))
|
||||
)
|
||||
for f in _data_files
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
if key not in cls.paths:
|
||||
if os.path.exists("paths.toml"):
|
||||
with open("paths.toml") as file:
|
||||
paths_dico = toml.load(file)
|
||||
for k, v in paths_dico.items():
|
||||
cls.paths[k] = v
|
||||
if key not in cls.paths:
|
||||
get_logger(__name__).info(
|
||||
f"{key} was not found in path index, returning current working directory."
|
||||
)
|
||||
cls.paths[key] = os.getcwd()
|
||||
|
||||
return cls.paths[key]
|
||||
|
||||
@classmethod
|
||||
def gets(cls, key):
|
||||
"""returned the specified file as a string"""
|
||||
with open(cls.get(key)) as file:
|
||||
return file.read()
|
||||
|
||||
@classmethod
|
||||
def plot(cls, name):
|
||||
"""returns the paths to the specified plot. Used to save new plot
|
||||
example
|
||||
---------
|
||||
fig.savefig(Paths.plot("figure5.pdf"))
|
||||
"""
|
||||
return os.path.join(cls.get("plots"), name)
|
||||
|
||||
|
||||
def load_previous_spectrum(prev_data_dir: str) -> np.ndarray:
|
||||
num = find_last_spectrum_num(prev_data_dir)
|
||||
return np.load(prev_data_dir / SPEC1_FN.format(num))
|
||||
|
||||
|
||||
def conform_toml_path(path: os.PathLike) -> Path:
|
||||
path = Path(path)
|
||||
if not path.name.lower().endswith(".toml"):
|
||||
path = path.parent / (path.name + ".toml")
|
||||
return path
|
||||
|
||||
|
||||
def load_toml(path: os.PathLike):
|
||||
"""returns a dictionary parsed from the specified toml file"""
|
||||
path = conform_toml_path(path)
|
||||
with open(path, mode="r") as file:
|
||||
dico = toml.load(file)
|
||||
dico.setdefault("variable", {})
|
||||
for key in {"simulation", "fiber", "gas", "pulse"} & dico.keys():
|
||||
section = dico.pop(key, {})
|
||||
dico["variable"].update(section.pop("variable", {}))
|
||||
dico.update(section)
|
||||
if len(dico["variable"]) == 0:
|
||||
dico.pop("variable")
|
||||
return dico
|
||||
|
||||
|
||||
def save_toml(path: os.PathLike, dico):
|
||||
"""saves a dictionary into a toml file"""
|
||||
path = conform_toml_path(path)
|
||||
with open(path, mode="w") as file:
|
||||
toml.dump(dico, file)
|
||||
return dico
|
||||
|
||||
|
||||
def save_parameters(
|
||||
params: dict[str, Any], destination_dir: Path, file_name: str = "params.toml"
|
||||
) -> Path:
|
||||
"""saves a parameter dictionary. Note that is does remove some entries, particularly
|
||||
those that take a lot of space ("t", "w", ...)
|
||||
|
||||
Parameters
|
||||
----------
|
||||
params : dict[str, Any]
|
||||
dictionary to save
|
||||
destination_dir : Path
|
||||
destination directory
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
path to newly created the paramter file
|
||||
"""
|
||||
file_path = destination_dir / file_name
|
||||
|
||||
file_path.parent.mkdir(exist_ok=True)
|
||||
|
||||
# save toml of the simulation
|
||||
with open(file_path, "w") as file:
|
||||
toml.dump(params, file, encoder=toml.TomlNumpyEncoder())
|
||||
|
||||
return file_path
|
||||
|
||||
|
||||
def load_material_dico(name: str) -> dict[str, Any]:
|
||||
"""loads a material dictionary
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
name of the material
|
||||
Returns
|
||||
----------
|
||||
material_dico : dict
|
||||
"""
|
||||
if name == "silica":
|
||||
return toml.loads(Paths.gets("silica"))
|
||||
else:
|
||||
return toml.loads(Paths.gets("gas"))[name]
|
||||
|
||||
|
||||
def get_data_dirs(sim_dir: Path) -> list[Path]:
|
||||
"""returns a list of absolute paths corresponding to a particular run
|
||||
|
||||
Parameters
|
||||
----------
|
||||
sim_dir : Path
|
||||
path to directory containing the initial config file and the spectra sub folders
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[Path]
|
||||
paths to sub folders
|
||||
"""
|
||||
|
||||
return [p.resolve() for p in sim_dir.glob("*") if p.is_dir()]
|
||||
|
||||
|
||||
def update_appended_params(source: Path, destination: Path, z: Sequence):
|
||||
z_num = len(z)
|
||||
params = load_toml(source)
|
||||
if "simulation" in params:
|
||||
params["simulation"]["z_num"] = z_num
|
||||
params["fiber"]["length"] = float(z[-1] - z[0])
|
||||
else:
|
||||
params["z_num"] = z_num
|
||||
params["length"] = float(z[-1] - z[0])
|
||||
save_toml(destination, params)
|
||||
|
||||
|
||||
def build_path_trees(sim_dir: Path) -> list[PathTree]:
|
||||
sim_dir = sim_dir.resolve()
|
||||
path_branches: list[tuple[Path, ...]] = []
|
||||
to_check = list(sim_dir.glob("id*num*"))
|
||||
with PBars(len(to_check), desc="Building path trees") as pbar:
|
||||
for branch in map(build_path_branch, to_check):
|
||||
if branch is not None:
|
||||
path_branches.append(branch)
|
||||
pbar.update()
|
||||
path_trees = group_path_branches(path_branches)
|
||||
return path_trees
|
||||
|
||||
|
||||
def build_path_branch(data_dir: Path) -> tuple[Path, ...]:
|
||||
if not data_dir.is_dir():
|
||||
return None
|
||||
path_branch = [data_dir]
|
||||
while (prev_sim_path := load_toml(path_branch[-1] / PARAM_FN).get("prev_data_dir")) is not None:
|
||||
p = Path(prev_sim_path).resolve()
|
||||
if not p.exists():
|
||||
p = Path(*p.parts[-2:]).resolve()
|
||||
path_branch.append(p)
|
||||
return tuple(reversed(path_branch))
|
||||
|
||||
|
||||
def group_path_branches(path_branches: list[tuple[Path, ...]]) -> list[PathTree]:
|
||||
"""groups path lists
|
||||
|
||||
[
|
||||
("a/id 0 wavelength 100 num 0"," b/id 0 wavelength 100 num 0"),
|
||||
("a/id 2 wavelength 100 num 1"," b/id 2 wavelength 100 num 1"),
|
||||
("a/id 1 wavelength 200 num 0"," b/id 1 wavelength 200 num 0"),
|
||||
("a/id 3 wavelength 200 num 1"," b/id 3 wavelength 200 num 1")
|
||||
]
|
||||
->
|
||||
[
|
||||
(
|
||||
("a/id 0 wavelength 100 num 0", "a/id 2 wavelength 100 num 1"),
|
||||
("b/id 0 wavelength 100 num 0", "b/id 2 wavelength 100 num 1"),
|
||||
)
|
||||
(
|
||||
("a/id 1 wavelength 200 num 0", "a/id 3 wavelength 200 num 1"),
|
||||
("b/id 1 wavelength 200 num 0", "b/id 3 wavelength 200 num 1"),
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path_branches : list[tuple[Path, ...]]
|
||||
each element of the list is a path to a folder containing data of one simulation
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[PathTree]
|
||||
list of PathTrees to be used in merge
|
||||
"""
|
||||
sort_key = lambda el: el[0]
|
||||
|
||||
size = len(path_branches[0])
|
||||
out_trees_map: dict[str, dict[int, dict[int, Path]]] = {}
|
||||
for branch in path_branches:
|
||||
b_id = branch_id(branch)
|
||||
out_trees_map.setdefault(b_id, {i: {} for i in range(size)})
|
||||
for sim_part, data_dir in enumerate(branch):
|
||||
*_, num = data_dir.name.split()
|
||||
out_trees_map[b_id][sim_part][int(num)] = data_dir
|
||||
|
||||
return [
|
||||
tuple(
|
||||
tuple(w for _, w in sorted(v.items(), key=sort_key))
|
||||
for __, v in sorted(d.items(), key=sort_key)
|
||||
)
|
||||
for d in out_trees_map.values()
|
||||
]
|
||||
|
||||
|
||||
def merge_path_tree(
|
||||
path_tree: PathTree, destination: Path, z_callback: Callable[[int], None] = None
|
||||
):
|
||||
"""given a path tree, copies the file into the right location
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path_tree : PathTree
|
||||
elements of the list returned by group_path_branches
|
||||
destination : Path
|
||||
dir where to save the data
|
||||
"""
|
||||
z_arr: list[float] = []
|
||||
|
||||
destination.mkdir(exist_ok=True)
|
||||
|
||||
for i, (z, merged_spectra) in enumerate(merge_spectra(path_tree)):
|
||||
z_arr.append(z)
|
||||
spec_out_name = SPECN_FN.format(i)
|
||||
np.save(destination / spec_out_name, merged_spectra)
|
||||
if z_callback is not None:
|
||||
z_callback(i)
|
||||
d = np.diff(z_arr)
|
||||
d[d < 0] = 0
|
||||
z_arr = np.concatenate(([z_arr[0]], np.cumsum(d)))
|
||||
np.save(destination / Z_FN, z_arr)
|
||||
update_appended_params(path_tree[-1][0] / PARAM_FN, destination / PARAM_FN, z_arr)
|
||||
|
||||
|
||||
def merge_spectra(
|
||||
path_tree: PathTree,
|
||||
) -> Generator[tuple[float, np.ndarray], None, None]:
|
||||
for same_sim_paths in path_tree:
|
||||
z_arr = np.load(same_sim_paths[0] / Z_FN)
|
||||
for i, z in enumerate(z_arr):
|
||||
spectra: list[np.ndarray] = []
|
||||
for data_dir in same_sim_paths:
|
||||
spec = np.load(data_dir / SPEC1_FN.format(i))
|
||||
spectra.append(spec)
|
||||
yield z, np.atleast_2d(spectra)
|
||||
|
||||
|
||||
def merge(destination: os.PathLike, path_trees: list[PathTree] = None):
|
||||
|
||||
destination = ensure_folder(Path(destination))
|
||||
|
||||
z_num = 0
|
||||
prev_z_num = 0
|
||||
|
||||
for i, sim_dir in enumerate(sim_dirs(path_trees)):
|
||||
conf = sim_dir / "initial_config.toml"
|
||||
shutil.copy(
|
||||
conf,
|
||||
destination / f"initial_config_{i}.toml",
|
||||
)
|
||||
prev_z_num = load_toml(conf).get("z_num", prev_z_num)
|
||||
z_num += prev_z_num
|
||||
|
||||
pbars = PBars(
|
||||
len(path_trees) * z_num, "Merging", 1, worker_kwargs=dict(total=z_num, desc="current pos")
|
||||
)
|
||||
for path_tree in path_trees:
|
||||
pbars.reset(1)
|
||||
iden = PARAM_SEPARATOR.join(path_tree[-1][0].name.split()[2:-2])
|
||||
merge_path_tree(path_tree, destination / iden, z_callback=lambda i: pbars.update(1))
|
||||
|
||||
|
||||
def sim_dirs(path_trees: list[PathTree]) -> Generator[Path, None, None]:
|
||||
for p in path_trees[0]:
|
||||
yield p[0].parent
|
||||
|
||||
|
||||
def get_sim_dir(task_id: int, path_if_new: Path = None) -> Path:
|
||||
if path_if_new is None:
|
||||
path_if_new = Path("scgenerator data")
|
||||
tmp = data_folder(task_id)
|
||||
if tmp is None:
|
||||
tmp = ensure_folder(path_if_new)
|
||||
os.environ[TMP_FOLDER_KEY_BASE + str(task_id)] = str(tmp)
|
||||
tmp = Path(tmp).resolve()
|
||||
if not tmp.exists():
|
||||
tmp.mkdir()
|
||||
return tmp
|
||||
|
||||
|
||||
def set_data_folder(task_id: int, path: os.PathLike):
|
||||
"""stores the path to an existing data folder in the environment
|
||||
|
||||
Parameters
|
||||
----------
|
||||
task_id : int
|
||||
id uniquely identifying the session
|
||||
path : str
|
||||
path to the root of the data folder
|
||||
"""
|
||||
idstr = str(int(task_id))
|
||||
os.environ[TMP_FOLDER_KEY_BASE + idstr] = str(path)
|
||||
|
||||
|
||||
def save_data(data: np.ndarray, data_dir: Path, file_name: str):
|
||||
"""saves numpy array to disk
|
||||
|
||||
Parameters
|
||||
----------
|
||||
data : np.ndarray
|
||||
data to save
|
||||
file_name : str
|
||||
file name
|
||||
task_id : int
|
||||
id that uniquely identifies the process
|
||||
identifier : str, optional
|
||||
identifier in the main data folder of the task, by default ""
|
||||
"""
|
||||
path = data_dir / file_name
|
||||
np.save(path, data)
|
||||
get_logger(__name__).debug(f"saved data in {path}")
|
||||
return
|
||||
|
||||
|
||||
def ensure_folder(path: Path, prevent_overwrite: bool = True) -> Path:
|
||||
"""ensure a folder exists and doesn't overwrite anything if required
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : Path
|
||||
desired path
|
||||
prevent_overwrite : bool, optional
|
||||
whether to create a new directory when one already exists, by default True
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
final path
|
||||
"""
|
||||
|
||||
path = path.resolve()
|
||||
|
||||
# is path root ?
|
||||
if len(path.parts) < 2:
|
||||
return path
|
||||
|
||||
# is a part of path an existing *file* ?
|
||||
parts = path.parts
|
||||
path = Path(path.root)
|
||||
for part in parts:
|
||||
if path.is_file():
|
||||
path = ensure_folder(path, prevent_overwrite=False)
|
||||
path /= part
|
||||
|
||||
folder_name = path.name
|
||||
|
||||
for i in itertools.count():
|
||||
if not path.is_file() and (not prevent_overwrite or not path.is_dir()):
|
||||
path.mkdir(exist_ok=True)
|
||||
return path
|
||||
path = path.parent / (folder_name + f"_{i}")
|
||||
|
||||
|
||||
class PBars:
|
||||
def __init__(
|
||||
@@ -53,7 +449,7 @@ class PBars:
|
||||
self.num_tot: int = task
|
||||
self.iterator = None
|
||||
|
||||
self.policy = env.pbar_policy()
|
||||
self.policy = pbar_policy()
|
||||
if head_kwargs is None:
|
||||
head_kwargs = dict()
|
||||
if worker_kwargs is None:
|
||||
@@ -62,7 +458,7 @@ class PBars:
|
||||
desc="Worker {worker_id}",
|
||||
bar_format="{l_bar}{bar}" "|[{elapsed}<{remaining}, " "{rate_fmt}{postfix}]",
|
||||
)
|
||||
if "print" not in env.pbar_policy():
|
||||
if "print" not in pbar_policy():
|
||||
head_kwargs["file"] = worker_kwargs["file"] = StringIO()
|
||||
self.width = 80
|
||||
head_kwargs["desc"] = desc
|
||||
@@ -190,72 +586,10 @@ def progress_worker(
|
||||
pbars[0].update()
|
||||
|
||||
|
||||
def format_variable_list(l: list[tuple[str, Any]]):
|
||||
joints = 2 * PARAM_SEPARATOR
|
||||
str_list = []
|
||||
for p_name, p_value in l:
|
||||
ps = p_name.replace("/", "").replace(joints[0], "").replace(joints[1], "")
|
||||
vs = (
|
||||
format_value(p_name, p_value)
|
||||
.replace("/", "")
|
||||
.replace(joints[0], "")
|
||||
.replace(joints[1], "")
|
||||
)
|
||||
str_list.append(ps + joints[1] + vs)
|
||||
return joints[0].join(str_list)
|
||||
|
||||
|
||||
def branch_id(branch: tuple[Path, ...]) -> str:
|
||||
return "".join("".join(re.sub(r"id\d+\S*num\d+", "", b.name).split()[2:-2]) for b in branch)
|
||||
|
||||
|
||||
def format_value(name: str, value) -> str:
|
||||
if value is True or value is False:
|
||||
return str(value)
|
||||
elif isinstance(value, (float, int)):
|
||||
try:
|
||||
return getattr(Parameters, name).display(value)
|
||||
except AttributeError:
|
||||
return format(value, ".9g")
|
||||
elif isinstance(value, (list, tuple, np.ndarray)):
|
||||
return "-".join([str(v) for v in value])
|
||||
elif isinstance(value, str):
|
||||
p = Path(value)
|
||||
if p.exists():
|
||||
return p.stem
|
||||
return str(value)
|
||||
|
||||
|
||||
def pretty_format_value(name: str, value) -> str:
|
||||
try:
|
||||
return getattr(Parameters, name).display(value)
|
||||
except AttributeError:
|
||||
return name + PARAM_SEPARATOR + str(value)
|
||||
|
||||
|
||||
def pretty_format_from_sim_name(name: str) -> str:
|
||||
"""formats a pretty version of a simulation directory
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
name of the simulation (directory name)
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
prettier name
|
||||
"""
|
||||
s = name.split(PARAM_SEPARATOR)
|
||||
out = []
|
||||
for key, value in zip(s[::2], s[1::2]):
|
||||
try:
|
||||
out += [key.replace("_", " "), getattr(Parameters, key).display(float(value))]
|
||||
except (AttributeError, ValueError):
|
||||
out.append(key + PARAM_SEPARATOR + value)
|
||||
return PARAM_SEPARATOR.join(out)
|
||||
|
||||
|
||||
def check_data_integrity(sub_folders: list[Path], init_z_num: int):
|
||||
"""checks the integrity and completeness of a simulation data folder
|
||||
|
||||
@@ -299,7 +633,7 @@ def num_left_to_propagate(sub_folder: Path, init_z_num: int) -> int:
|
||||
IncompleteDataFolderError
|
||||
raised if init_z_num doesn't match that specified in the individual parameter file
|
||||
"""
|
||||
z_num = io.load_toml(sub_folder / "params.toml")["z_num"]
|
||||
z_num = load_toml(sub_folder / "params.toml")["z_num"]
|
||||
num_spectra = find_last_spectrum_num(sub_folder) + 1 # because of zero-indexing
|
||||
|
||||
if z_num != init_z_num:
|
||||
@@ -318,105 +652,6 @@ def find_last_spectrum_num(data_dir: Path):
|
||||
return num - 1
|
||||
|
||||
|
||||
def variable_iterator(config: BareConfig) -> Iterator[tuple[list[tuple[str, Any]], dict[str, Any]]]:
|
||||
"""given a config with "variable" parameters, iterates through every possible combination,
|
||||
yielding a a list of (parameter_name, value) tuples and a full config dictionary.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
config : BareConfig
|
||||
initial config obj
|
||||
|
||||
Yields
|
||||
-------
|
||||
Iterator[tuple[list[tuple[str, Any]], dict[str, Any]]]
|
||||
variable_list : a list of (name, value) tuple of parameter name and value that are variable.
|
||||
|
||||
params : a dict[str, Any] to be fed to Parameters
|
||||
"""
|
||||
possible_keys = []
|
||||
possible_ranges = []
|
||||
|
||||
for key, values in config.variable.items():
|
||||
possible_keys.append(key)
|
||||
possible_ranges.append(range(len(values)))
|
||||
|
||||
combinations = itertools.product(*possible_ranges)
|
||||
|
||||
for combination in combinations:
|
||||
indiv_config = {}
|
||||
variable_list = []
|
||||
for i, key in enumerate(possible_keys):
|
||||
parameter_value = config.variable[key][combination[i]]
|
||||
indiv_config[key] = parameter_value
|
||||
variable_list.append((key, parameter_value))
|
||||
param_dict = asdict(config)
|
||||
param_dict.pop("variable")
|
||||
param_dict.update(indiv_config)
|
||||
yield variable_list, param_dict
|
||||
|
||||
|
||||
def required_simulations(
|
||||
*configs: BareConfig,
|
||||
) -> Iterator[tuple[list[tuple[str, Any]], Parameters]]:
|
||||
"""takes the output of `scgenerator.utils.variable_iterator` which is a new dict per different
|
||||
parameter set and iterates through every single necessary simulation
|
||||
|
||||
Yields
|
||||
-------
|
||||
Iterator[tuple[list[tuple[str, Any]], dict]]
|
||||
variable_ind : a list of (name, value) tuple of parameter name and value that are variable. The parameter
|
||||
"num" (how many times this specific parameter set has been yielded already) and "id" (how many parameter sets
|
||||
have been exhausted already) are added to the list to make sure every yielded list is unique.
|
||||
|
||||
dict : a config dictionary for one simulation
|
||||
"""
|
||||
i = 0 # unique sim id
|
||||
for data in itertools.product(*[variable_iterator(config) for config in configs]):
|
||||
all_variable_only, all_params_dict = list(zip(*data))
|
||||
params_dict = all_params_dict[0]
|
||||
for p in all_params_dict[1:]:
|
||||
params_dict.update({k: v for k, v in p.items() if v is not None})
|
||||
variable_only = reduce_all_variable(all_variable_only)
|
||||
for j in range(configs[0].repeat or 1):
|
||||
variable_ind = [("id", i)] + variable_only + [("num", j)]
|
||||
i += 1
|
||||
yield variable_ind, Parameters(**params_dict)
|
||||
|
||||
|
||||
def reduce_all_variable(all_variable: list[list[tuple[str, Any]]]) -> list[tuple[str, Any]]:
|
||||
out = []
|
||||
for n, variable_list in enumerate(all_variable):
|
||||
out += [("fiber", "ABCDEFGHIJKLMNOPQRSTUVWXYZ"[n % 26] * (n // 26 + 1)), *variable_list]
|
||||
return out
|
||||
|
||||
|
||||
def override_config(new: BareConfig, old: BareConfig = None) -> BareConfig:
|
||||
"""makes sure all the parameters set in new are there, leaves untouched parameters in old"""
|
||||
new_dict = asdict(new)
|
||||
if old is None:
|
||||
return BareConfig(**new_dict)
|
||||
variable = deepcopy(old.variable)
|
||||
new_dict = {k: v for k, v in new_dict.items() if v is not None}
|
||||
|
||||
for k, v in new_dict.pop("variable", {}).items():
|
||||
variable[k] = v
|
||||
for k in variable:
|
||||
new_dict[k] = None
|
||||
return replace(old, variable=variable, **new_dict)
|
||||
|
||||
|
||||
def final_config_from_sequence(*configs: BareConfig) -> BareConfig:
|
||||
if len(configs) == 0:
|
||||
raise ValueError("Must provide at least one config")
|
||||
if len(configs) == 1:
|
||||
return configs[0]
|
||||
elif len(configs) == 2:
|
||||
return override_config(*configs[::-1])
|
||||
else:
|
||||
return override_config(configs[-1], final_config_from_sequence(*configs[:-1]))
|
||||
|
||||
|
||||
def auto_crop(x: np.ndarray, y: np.ndarray, rel_thr: float = 0.01) -> np.ndarray:
|
||||
threshold = y.min() + rel_thr * (y.max() - y.min())
|
||||
above_threshold = y > threshold
|
||||
|
||||
@@ -1,21 +1,23 @@
|
||||
import datetime as datetime_module
|
||||
import inspect
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from copy import copy
|
||||
from dataclasses import asdict, dataclass
|
||||
from dataclasses import asdict, dataclass, fields, replace
|
||||
from functools import lru_cache
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, TypeVar, Union
|
||||
import os
|
||||
import numpy as np
|
||||
from tqdm.std import Bar
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Iterable, Optional, TypeVar, Union, Iterator
|
||||
from copy import deepcopy
|
||||
|
||||
from .. import math
|
||||
from ..const import __version__
|
||||
import numpy as np
|
||||
|
||||
from .. import math, utils
|
||||
from ..const import PARAM_SEPARATOR, __version__
|
||||
from ..logger import get_logger
|
||||
from .. import io
|
||||
from ..physics import fiber, materials, pulse, units
|
||||
from ..errors import EvaluatorError, NoDefaultError
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
@@ -99,7 +101,7 @@ def int_pair(name, t):
|
||||
invalid = len(t) != 2
|
||||
for m in t:
|
||||
if invalid or not isinstance(m, int):
|
||||
raise ValueError(f"{name!r} must be a list or a tuple of 2 int")
|
||||
raise ValueError(f"{name!r} must be a list or a tuple of 2 int. got {t!r} instead")
|
||||
|
||||
|
||||
@type_checker(tuple, list)
|
||||
@@ -107,7 +109,7 @@ def float_pair(name, t):
|
||||
invalid = len(t) != 2
|
||||
for m in t:
|
||||
if invalid or not isinstance(m, (int, float)):
|
||||
raise ValueError(f"{name!r} must be a list or a tuple of 2 numbers")
|
||||
raise ValueError(f"{name!r} must be a list or a tuple of 2 numbers. got {t!r} instead")
|
||||
|
||||
|
||||
def literal(*l):
|
||||
@@ -235,8 +237,9 @@ class Parameter:
|
||||
|
||||
def __set__(self, instance, value):
|
||||
if isinstance(value, Parameter):
|
||||
defaut = None if self.default is None else copy(self.default)
|
||||
instance.__dict__[self.name] = defaut
|
||||
# defaut = None if self.default is None else copy(self.default)
|
||||
# instance.__dict__[self.name] = defaut
|
||||
instance.__dict__[self.name] = None
|
||||
else:
|
||||
if value is not None:
|
||||
self.validator(self.name, value)
|
||||
@@ -356,6 +359,7 @@ mandatory_parameters = [
|
||||
"w_power_fact",
|
||||
"alpha",
|
||||
"spec_0",
|
||||
"field_0",
|
||||
"z_targets",
|
||||
"length",
|
||||
"beta2_coefficients",
|
||||
@@ -364,7 +368,7 @@ mandatory_parameters = [
|
||||
"raman_type",
|
||||
"hr_w",
|
||||
"adapt_step_size",
|
||||
"tollerated_error",
|
||||
"tolerated_error",
|
||||
"dynamic_dispersion",
|
||||
"recovery_last_stored",
|
||||
]
|
||||
@@ -394,8 +398,8 @@ class Parameters:
|
||||
pitch: float = Parameter(in_range_excl(0, 1e-3))
|
||||
pitch_ratio: float = Parameter(in_range_excl(0, 1))
|
||||
core_radius: float = Parameter(in_range_excl(0, 1e-3))
|
||||
he_mode: Tuple[int, int] = Parameter(int_pair, default=(1, 1))
|
||||
fit_parameters: Tuple[int, int] = Parameter(int_pair, default=(0.08, 200e-9))
|
||||
he_mode: tuple[int, int] = Parameter(int_pair, default=(1, 1))
|
||||
fit_parameters: tuple[int, int] = Parameter(int_pair, default=(0.08, 200e-9))
|
||||
beta2_coefficients: Iterable[float] = Parameter(num_list)
|
||||
dispersion_file: str = Parameter(string)
|
||||
model: str = Parameter(
|
||||
@@ -430,6 +434,7 @@ class Parameters:
|
||||
shape: str = Parameter(literal("gaussian", "sech"), default="gaussian")
|
||||
wavelength: float = Parameter(in_range_incl(100e-9, 3000e-9), display_info=(1e9, "nm"))
|
||||
intensity_noise: float = Parameter(in_range_incl(0, 1), display_info=(1e2, "%"), default=0)
|
||||
noise_correlation: float = Parameter(in_range_incl(-10, 10), default=0)
|
||||
width: float = Parameter(in_range_excl(0, 1e-9), display_info=(1e15, "fs"))
|
||||
t0: float = Parameter(in_range_excl(0, 1e-9), display_info=(1e15, "fs"))
|
||||
|
||||
@@ -446,8 +451,8 @@ class Parameters:
|
||||
time_window: float = Parameter(positive(float, int))
|
||||
dt: float = Parameter(in_range_excl(0, 5e-15))
|
||||
tolerated_error: float = Parameter(in_range_excl(1e-15, 1e-3), default=1e-11)
|
||||
step_size: float = Parameter(positive(float, int))
|
||||
interpolation_range: Tuple[float, float] = Parameter(float_pair)
|
||||
step_size: float = Parameter(positive(float, int), default=0)
|
||||
interpolation_range: tuple[float, float] = Parameter(float_pair)
|
||||
interpolation_degree: int = Parameter(positive(int), default=8)
|
||||
prev_sim_dir: str = Parameter(string)
|
||||
recovery_last_stored: int = Parameter(non_negative(int), default=0)
|
||||
@@ -457,7 +462,8 @@ class Parameters:
|
||||
field_0: np.ndarray = Parameter(type_checker(np.ndarray))
|
||||
spec_0: np.ndarray = Parameter(type_checker(np.ndarray))
|
||||
beta2: float = Parameter(type_checker(int, float))
|
||||
alpha: np.ndarray = Parameter(type_checker(np.ndarray))
|
||||
alpha_arr: np.ndarray = Parameter(type_checker(np.ndarray))
|
||||
alpha: float = Parameter(positive(float, int), default=0)
|
||||
gamma_arr: np.ndarray = Parameter(type_checker(np.ndarray))
|
||||
A_eff_arr: np.ndarray = Parameter(type_checker(np.ndarray))
|
||||
w: np.ndarray = Parameter(type_checker(np.ndarray))
|
||||
@@ -475,12 +481,12 @@ class Parameters:
|
||||
hr_w: np.ndarray = Parameter(type_checker(np.ndarray))
|
||||
z_targets: np.ndarray = Parameter(type_checker(np.ndarray))
|
||||
const_qty: np.ndarray = Parameter(type_checker(np.ndarray))
|
||||
beta_func: Callable[[float], List[float]] = Parameter(func_validator)
|
||||
beta_func: Callable[[float], list[float]] = Parameter(func_validator)
|
||||
gamma_func: Callable[[float], float] = Parameter(func_validator)
|
||||
datetime: datetime_module.datetime = Parameter(type_checker(datetime_module.datetime))
|
||||
version: str = Parameter(string)
|
||||
|
||||
def prepare_for_dump(self) -> Dict[str, Any]:
|
||||
def prepare_for_dump(self) -> dict[str, Any]:
|
||||
param = asdict(self)
|
||||
param = Parameters.strip_params_dict(param)
|
||||
param["datetime"] = datetime_module.datetime.now()
|
||||
@@ -493,16 +499,21 @@ class Parameters:
|
||||
evaluator.set(**param_dict)
|
||||
for p_name in mandatory_parameters:
|
||||
evaluator.compute(p_name)
|
||||
valid_fields = self.all_parameters()
|
||||
for k, v in evaluator.params.items():
|
||||
if k in param_dict:
|
||||
if k in valid_fields:
|
||||
setattr(self, k, v)
|
||||
|
||||
@classmethod
|
||||
def all_parameters(cls) -> list[str]:
|
||||
return [f.name for f in fields(cls)]
|
||||
|
||||
@classmethod
|
||||
def load(cls, path: os.PathLike) -> "Parameters":
|
||||
return cls(**io.load_toml(path))
|
||||
return cls(**utils.load_toml(path))
|
||||
|
||||
@staticmethod
|
||||
def strip_params_dict(dico: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def strip_params_dict(dico: dict[str, Any]) -> dict[str, Any]:
|
||||
"""prepares a dictionary for serialization. Some keys may not be preserved
|
||||
(dropped because they take a lot of space and can be exactly reconstructed)
|
||||
|
||||
@@ -545,10 +556,6 @@ class Parameters:
|
||||
return out
|
||||
|
||||
|
||||
class EvaluatorError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Rule:
|
||||
def __init__(
|
||||
self,
|
||||
@@ -657,6 +664,12 @@ class Evaluator:
|
||||
self.params = {}
|
||||
self.eval_stats = defaultdict(EvalStat)
|
||||
|
||||
def get_default(self, key: str) -> Any:
|
||||
try:
|
||||
return getattr(Parameters, key).default
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
def compute(self, target: str) -> Any:
|
||||
"""computes a target
|
||||
|
||||
@@ -679,6 +692,8 @@ class Evaluator:
|
||||
"""
|
||||
value = self.params.get(target)
|
||||
if value is None:
|
||||
prefix = "\t" * len(self.__curent_lookup)
|
||||
# Avoid cycles
|
||||
if target in self.__curent_lookup:
|
||||
raise EvaluatorError(
|
||||
"cyclic dependency detected : "
|
||||
@@ -689,13 +704,17 @@ class Evaluator:
|
||||
self.__curent_lookup.add(target)
|
||||
|
||||
if len(self.rules[target]) == 0:
|
||||
raise EvaluatorError(f"no rule for {target}")
|
||||
error = EvaluatorError(f"no rule for {target}")
|
||||
else:
|
||||
error = None
|
||||
|
||||
error = None
|
||||
# try every rule until one succeeds
|
||||
for ii, rule in enumerate(
|
||||
filter(lambda r: self.validate_condition(r), reversed(self.rules[target]))
|
||||
filter(lambda r: self.validate_condition(r), self.rules[target])
|
||||
):
|
||||
self.logger.debug(f"attempt {ii+1} to compute {target}, this time using {rule!r}")
|
||||
self.logger.debug(
|
||||
prefix + f"attempt {ii+1} to compute {target}, this time using {rule!r}"
|
||||
)
|
||||
try:
|
||||
args = [self.compute(k) for k in rule.args]
|
||||
returned_values = rule.func(*args)
|
||||
@@ -710,16 +729,26 @@ class Evaluator:
|
||||
or self.eval_stats[param_name].priority < param_priority
|
||||
):
|
||||
self.logger.info(
|
||||
f"computed {param_name}={returned_value} using {rule.func.__name__} from {rule.func.__module__}"
|
||||
prefix
|
||||
+ f"computed {param_name}={returned_value} using {rule.func.__name__} from {rule.func.__module__}"
|
||||
)
|
||||
self.params[param_name] = returned_value
|
||||
self.eval_stats[param_name] = param_priority
|
||||
self.eval_stats[param_name].priority = param_priority
|
||||
if param_name == target:
|
||||
value = returned_value
|
||||
break
|
||||
except (EvaluatorError, KeyError) as e:
|
||||
except (EvaluatorError, KeyError, NoDefaultError) as e:
|
||||
error = e
|
||||
self.logger.debug(
|
||||
prefix + f"error using {rule.func.__name__} : {str(error).strip()}"
|
||||
)
|
||||
continue
|
||||
else:
|
||||
default = self.get_default(target)
|
||||
if default is None:
|
||||
error = NoDefaultError(prefix + f"No default provided for {target}")
|
||||
else:
|
||||
value = default
|
||||
|
||||
if value is None and error is not None:
|
||||
raise error
|
||||
@@ -749,6 +778,107 @@ class Evaluator:
|
||||
return wrapper
|
||||
|
||||
|
||||
@dataclass
|
||||
class BareConfig(Parameters):
|
||||
variable: dict = VariableParameter(Parameters)
|
||||
|
||||
def __post_init__(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def load(cls, path: os.PathLike) -> "BareConfig":
|
||||
return cls(**utils.load_toml(path))
|
||||
|
||||
@classmethod
|
||||
def load_sequence(cls, *config_paths: os.PathLike) -> list["BareConfig"]:
|
||||
"""Loads a sequence of
|
||||
|
||||
Parameters
|
||||
----------
|
||||
config_paths : os.PathLike
|
||||
either one path (the last config containing previous_config_file parameter)
|
||||
or a list of config path in the order they have to be simulated
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[BareConfig]
|
||||
all loaded configs
|
||||
"""
|
||||
if config_paths[0] is None:
|
||||
return []
|
||||
all_configs = [cls.load(config_paths[0])]
|
||||
if len(config_paths) == 1:
|
||||
while True:
|
||||
if all_configs[0].previous_config_file is not None:
|
||||
all_configs.insert(0, cls.load(all_configs[0].previous_config_file))
|
||||
else:
|
||||
break
|
||||
else:
|
||||
for i, path in enumerate(config_paths[1:]):
|
||||
all_configs.append(cls.load(path))
|
||||
all_configs[i + 1].previous_config_file = config_paths[i]
|
||||
return all_configs
|
||||
|
||||
|
||||
@dataclass
|
||||
class PlotRange:
|
||||
left: float = Parameter(type_checker(int, float))
|
||||
right: float = Parameter(type_checker(int, float))
|
||||
unit: Callable[[float], float] = Parameter(units.is_unit, converter=units.get_unit)
|
||||
conserved_quantity: bool = Parameter(boolean, default=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.left:.1f}-{self.right:.1f} {self.unit.__name__}"
|
||||
|
||||
|
||||
def sort_axis(axis, plt_range: PlotRange) -> tuple[np.ndarray, np.ndarray, tuple[float, float]]:
|
||||
"""
|
||||
given an axis, returns this axis cropped according to the given range, converted and sorted
|
||||
|
||||
Parameters
|
||||
----------
|
||||
axis : 1D array containing the original axis (usual the w or t array)
|
||||
plt_range : tupple (min, max, conversion_function) used to crop the axis
|
||||
|
||||
Returns
|
||||
-------
|
||||
cropped : the axis cropped, converted and sorted
|
||||
indices : indices to use to slice and sort other array in the same fashion
|
||||
extent : tupple with min and max of cropped
|
||||
|
||||
Example
|
||||
-------
|
||||
w = np.append(np.linspace(0, -10, 20), np.linspace(0, 10, 20))
|
||||
t = np.linspace(-10, 10, 400)
|
||||
W, T = np.meshgrid(w, t)
|
||||
y = np.exp(-W**2 - T**2)
|
||||
|
||||
# Define ranges
|
||||
rw = (-4, 4, s)
|
||||
rt = (-2, 6, s)
|
||||
|
||||
w, cw = sort_axis(w, rw)
|
||||
t, ct = sort_axis(t, rt)
|
||||
|
||||
# slice y according to the given ranges
|
||||
y = y[ct][:, cw]
|
||||
"""
|
||||
if isinstance(plt_range, tuple):
|
||||
plt_range = PlotRange(*plt_range)
|
||||
r = np.array((plt_range.left, plt_range.right), dtype="float")
|
||||
|
||||
indices = np.arange(len(axis))[
|
||||
(axis <= np.max(plt_range.unit(r))) & (axis >= np.min(plt_range.unit(r)))
|
||||
]
|
||||
cropped = axis[indices]
|
||||
order = np.argsort(plt_range.unit.inv(cropped))
|
||||
indices = indices[order]
|
||||
cropped = cropped[order]
|
||||
out_ax = plt_range.unit.inv(cropped)
|
||||
|
||||
return out_ax, indices, (out_ax[0], out_ax[-1])
|
||||
|
||||
|
||||
def get_arg_names(func: Callable) -> list[str]:
|
||||
spec = inspect.getfullargspec(func)
|
||||
args = spec.args
|
||||
@@ -780,6 +910,167 @@ def func_rewrite(func: Callable, kwarg_names: list[str], arg_names: list[str] =
|
||||
return out_func
|
||||
|
||||
|
||||
def format_variable_list(l: list[tuple[str, Any]]):
|
||||
joints = 2 * PARAM_SEPARATOR
|
||||
str_list = []
|
||||
for p_name, p_value in l:
|
||||
ps = p_name.replace("/", "").replace(joints[0], "").replace(joints[1], "")
|
||||
vs = (
|
||||
format_value(p_name, p_value)
|
||||
.replace("/", "")
|
||||
.replace(joints[0], "")
|
||||
.replace(joints[1], "")
|
||||
)
|
||||
str_list.append(ps + joints[1] + vs)
|
||||
return joints[0].join(str_list)
|
||||
|
||||
|
||||
def format_value(name: str, value) -> str:
|
||||
if value is True or value is False:
|
||||
return str(value)
|
||||
elif isinstance(value, (float, int)):
|
||||
try:
|
||||
return getattr(Parameters, name).display(value)
|
||||
except AttributeError:
|
||||
return format(value, ".9g")
|
||||
elif isinstance(value, (list, tuple, np.ndarray)):
|
||||
return "-".join([str(v) for v in value])
|
||||
elif isinstance(value, str):
|
||||
p = Path(value)
|
||||
if p.exists():
|
||||
return p.stem
|
||||
return str(value)
|
||||
|
||||
|
||||
def pretty_format_value(name: str, value) -> str:
|
||||
try:
|
||||
return getattr(Parameters, name).display(value)
|
||||
except AttributeError:
|
||||
return name + PARAM_SEPARATOR + str(value)
|
||||
|
||||
|
||||
def pretty_format_from_sim_name(name: str) -> str:
|
||||
"""formats a pretty version of a simulation directory
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
name of the simulation (directory name)
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
prettier name
|
||||
"""
|
||||
s = name.split(PARAM_SEPARATOR)
|
||||
out = []
|
||||
for key, value in zip(s[::2], s[1::2]):
|
||||
try:
|
||||
out += [key.replace("_", " "), getattr(Parameters, key).display(float(value))]
|
||||
except (AttributeError, ValueError):
|
||||
out.append(key + PARAM_SEPARATOR + value)
|
||||
return PARAM_SEPARATOR.join(out)
|
||||
|
||||
|
||||
def variable_iterator(config: BareConfig) -> Iterator[tuple[list[tuple[str, Any]], dict[str, Any]]]:
|
||||
"""given a config with "variable" parameters, iterates through every possible combination,
|
||||
yielding a a list of (parameter_name, value) tuples and a full config dictionary.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
config : BareConfig
|
||||
initial config obj
|
||||
|
||||
Yields
|
||||
-------
|
||||
Iterator[tuple[list[tuple[str, Any]], dict[str, Any]]]
|
||||
variable_list : a list of (name, value) tuple of parameter name and value that are variable.
|
||||
|
||||
params : a dict[str, Any] to be fed to Parameters
|
||||
"""
|
||||
possible_keys = []
|
||||
possible_ranges = []
|
||||
|
||||
for key, values in config.variable.items():
|
||||
possible_keys.append(key)
|
||||
possible_ranges.append(range(len(values)))
|
||||
|
||||
combinations = itertools.product(*possible_ranges)
|
||||
|
||||
for combination in combinations:
|
||||
indiv_config = {}
|
||||
variable_list = []
|
||||
for i, key in enumerate(possible_keys):
|
||||
parameter_value = config.variable[key][combination[i]]
|
||||
indiv_config[key] = parameter_value
|
||||
variable_list.append((key, parameter_value))
|
||||
param_dict = asdict(config)
|
||||
param_dict.pop("variable")
|
||||
param_dict.update(indiv_config)
|
||||
yield variable_list, param_dict
|
||||
|
||||
|
||||
def required_simulations(
|
||||
*configs: BareConfig,
|
||||
) -> Iterator[tuple[list[tuple[str, Any]], Parameters]]:
|
||||
"""takes the output of `scgenerator.utils.variable_iterator` which is a new dict per different
|
||||
parameter set and iterates through every single necessary simulation
|
||||
|
||||
Yields
|
||||
-------
|
||||
Iterator[tuple[list[tuple[str, Any]], dict]]
|
||||
variable_ind : a list of (name, value) tuple of parameter name and value that are variable. The parameter
|
||||
"num" (how many times this specific parameter set has been yielded already) and "id" (how many parameter sets
|
||||
have been exhausted already) are added to the list to make sure every yielded list is unique.
|
||||
|
||||
dict : a config dictionary for one simulation
|
||||
"""
|
||||
i = 0 # unique sim id
|
||||
for data in itertools.product(*[variable_iterator(config) for config in configs]):
|
||||
all_variable_only, all_params_dict = list(zip(*data))
|
||||
params_dict = all_params_dict[0]
|
||||
for p in all_params_dict[1:]:
|
||||
params_dict.update({k: v for k, v in p.items() if v is not None})
|
||||
variable_only = reduce_all_variable(all_variable_only)
|
||||
for j in range(configs[0].repeat or 1):
|
||||
variable_ind = [("id", i)] + variable_only + [("num", j)]
|
||||
i += 1
|
||||
yield variable_ind, Parameters(**params_dict)
|
||||
|
||||
|
||||
def reduce_all_variable(all_variable: list[list[tuple[str, Any]]]) -> list[tuple[str, Any]]:
|
||||
out = []
|
||||
for n, variable_list in enumerate(all_variable):
|
||||
out += [("fiber", "ABCDEFGHIJKLMNOPQRSTUVWXYZ"[n % 26] * (n // 26 + 1)), *variable_list]
|
||||
return out
|
||||
|
||||
|
||||
def override_config(new: BareConfig, old: BareConfig = None) -> BareConfig:
|
||||
"""makes sure all the parameters set in new are there, leaves untouched parameters in old"""
|
||||
new_dict = asdict(new)
|
||||
if old is None:
|
||||
return BareConfig(**new_dict)
|
||||
variable = deepcopy(old.variable)
|
||||
new_dict = {k: v for k, v in new_dict.items() if v is not None}
|
||||
|
||||
for k, v in new_dict.pop("variable", {}).items():
|
||||
variable[k] = v
|
||||
for k in variable:
|
||||
new_dict[k] = None
|
||||
return replace(old, variable=variable, **new_dict)
|
||||
|
||||
|
||||
def final_config_from_sequence(*configs: BareConfig) -> BareConfig:
|
||||
if len(configs) == 0:
|
||||
raise ValueError("Must provide at least one config")
|
||||
if len(configs) == 1:
|
||||
return configs[0]
|
||||
elif len(configs) == 2:
|
||||
return override_config(*configs[::-1])
|
||||
else:
|
||||
return override_config(configs[-1], final_config_from_sequence(*configs[:-1]))
|
||||
|
||||
|
||||
default_rules: list[Rule] = [
|
||||
# Grid
|
||||
*Rule.deduce(
|
||||
@@ -788,26 +1079,16 @@ default_rules: list[Rule] = [
|
||||
["time_window", "t_num", "dt"],
|
||||
2,
|
||||
),
|
||||
Rule("adapt_step_size", lambda step_size: step_size == 0),
|
||||
Rule("dynamic_dispersion", lambda pressure: isinstance(pressure, (list, tuple, np.ndarray))),
|
||||
# Pulse
|
||||
Rule("spec_0", np.fft.fft, ["field_0"]),
|
||||
Rule("field_0", np.fft.ifft, ["spec_0"]),
|
||||
Rule("spec_0", pulse.load_previous_spectrum, priorities=3),
|
||||
Rule("spec_0", utils.load_previous_spectrum, priorities=3),
|
||||
Rule(
|
||||
["pre_field_0", "peak_power", "energy", "width"],
|
||||
pulse.load_field_file,
|
||||
[
|
||||
"field_file",
|
||||
"t",
|
||||
"peak_power",
|
||||
"energy",
|
||||
"intensity_noise",
|
||||
"noise_correlation",
|
||||
"quantum_noise",
|
||||
"w_c",
|
||||
"w0",
|
||||
"time_window",
|
||||
"dt",
|
||||
],
|
||||
pulse.load_and_adjust_field_file,
|
||||
["field_file", "t", "peak_power", "energy", "intensity_noise", "noise_correlation"],
|
||||
priorities=[2, 1, 1, 1],
|
||||
),
|
||||
Rule("pre_field_0", pulse.initial_field, priorities=1),
|
||||
@@ -818,6 +1099,7 @@ default_rules: list[Rule] = [
|
||||
),
|
||||
Rule("peak_power", pulse.E0_to_P0, ["energy", "t0", "shape"]),
|
||||
Rule("peak_power", pulse.soliton_num_to_peak_power),
|
||||
Rule(["width", "peak_power", "energy"], pulse.measure_custom_field),
|
||||
Rule("energy", pulse.P0_to_E0, ["peak_power", "t0", "shape"]),
|
||||
Rule("energy", pulse.mean_power_to_energy),
|
||||
Rule("t0", pulse.width_to_t0),
|
||||
@@ -874,55 +1156,14 @@ default_rules: list[Rule] = [
|
||||
Rule("gamma", lambda gamma_arr: gamma_arr[0]),
|
||||
Rule("gamma_arr", fiber.gamma_parameter, ["n2", "w0", "A_eff_arr"]),
|
||||
# Fiber loss
|
||||
Rule("alpha", fiber.compute_capillary_loss),
|
||||
Rule("alpha", fiber.load_custom_loss),
|
||||
Rule("alpha_arr", fiber.compute_capillary_loss),
|
||||
Rule("alpha_arr", fiber.load_custom_loss),
|
||||
Rule("alpha_arr", lambda alpha, t: np.ones_like(t) * alpha),
|
||||
# gas
|
||||
Rule("n_gas_2", materials.n_gas_2),
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BareConfig(Parameters):
|
||||
variable: dict = VariableParameter(Parameters)
|
||||
|
||||
def __post_init__(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def load(cls, path: os.PathLike) -> "BareConfig":
|
||||
return cls(**io.load_toml(path))
|
||||
|
||||
@classmethod
|
||||
def load_sequence(cls, *config_paths: os.PathLike) -> list["BareConfig"]:
|
||||
"""Loads a sequence of
|
||||
|
||||
Parameters
|
||||
----------
|
||||
config_paths : os.PathLike
|
||||
either one path (the last config containing previous_config_file parameter)
|
||||
or a list of config path in the order they have to be simulated
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[BareConfig]
|
||||
all loaded configs
|
||||
"""
|
||||
if config_paths[0] is None:
|
||||
return []
|
||||
all_configs = [cls.load(config_paths[0])]
|
||||
if len(config_paths) == 1:
|
||||
while True:
|
||||
if all_configs[0].previous_config_file is not None:
|
||||
all_configs.insert(0, cls.load(all_configs[0].previous_config_file))
|
||||
else:
|
||||
break
|
||||
else:
|
||||
for i, path in enumerate(config_paths[1:]):
|
||||
all_configs.append(cls.load(path))
|
||||
all_configs[i + 1].previous_config_file = config_paths[i]
|
||||
return all_configs
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
numero = type_checker(int)
|
||||
|
||||
Reference in New Issue
Block a user