disp and field from file, merging not done yet
This commit is contained in:
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,32 +0,0 @@
|
|||||||
name = "full anomalous"
|
|
||||||
|
|
||||||
[fiber]
|
|
||||||
beta = [ -1.183e-26, 8.1038e-41, -9.5205e-56, 2.0737e-70, -5.3943e-85, 1.3486e-99, -2.5495e-114, 3.0524e-129, -1.714e-144,]
|
|
||||||
gamma = 0.11
|
|
||||||
length = 0.02
|
|
||||||
model = "custom"
|
|
||||||
input_transmission = 1.0
|
|
||||||
|
|
||||||
[pulse]
|
|
||||||
power = 10000
|
|
||||||
t0 = 2.84e-14
|
|
||||||
shape = "gaussian"
|
|
||||||
quantum_noise = false
|
|
||||||
intensity_noise = 0
|
|
||||||
|
|
||||||
[simulation]
|
|
||||||
dt = 1e-15
|
|
||||||
parallel = true
|
|
||||||
raman_type = "measured"
|
|
||||||
repeat = 3
|
|
||||||
t_num = 16384
|
|
||||||
tolerated_error = 1e-9
|
|
||||||
z_num = 64
|
|
||||||
behaviors = [ "spm", "ss",]
|
|
||||||
frep = 80000000.0
|
|
||||||
lower_wavelength_interp_limit = 3e-7
|
|
||||||
upper_wavelength_interp_limit = 1.9e-6
|
|
||||||
ideal_gas = false
|
|
||||||
|
|
||||||
[pulse.variable]
|
|
||||||
wavelength = [ 8.35e-7, 8.3375e-7,]
|
|
||||||
@@ -75,18 +75,19 @@ def run_sim(args):
|
|||||||
method = prep_ray(args)
|
method = prep_ray(args)
|
||||||
configs = args.configs.copy()
|
configs = args.configs.copy()
|
||||||
first_config = configs.pop(0)
|
first_config = configs.pop(0)
|
||||||
|
|
||||||
if args.appendto is None:
|
if args.appendto is None:
|
||||||
sim = new_simulations(first_config, args.id, method=method)
|
sim = new_simulations(first_config, method=method)
|
||||||
else:
|
else:
|
||||||
sim = new_simulations(
|
sim = new_simulations(
|
||||||
first_config, args.id, data_folder=args.appendto, method=method, initial=False
|
first_config, prev_data_folder=args.appendto, method=method, initial=False
|
||||||
)
|
)
|
||||||
sim.run()
|
sim.run()
|
||||||
data_folders = [sim.data_folder]
|
data_folders = [sim.data_folder]
|
||||||
for config in configs:
|
for config in configs:
|
||||||
print("launching", config)
|
print("launching", config)
|
||||||
sim = new_simulations(
|
sim = new_simulations(
|
||||||
config, args.id, data_folder=data_folders[-1], method=method, initial=False
|
config, prev_data_folder=data_folders[-1], method=method, initial=False
|
||||||
)
|
)
|
||||||
sim.run()
|
sim.run()
|
||||||
data_folders.append(sim.data_folder)
|
data_folders.append(sim.data_folder)
|
||||||
|
|||||||
@@ -106,16 +106,21 @@ def capillary_nested(n):
|
|||||||
valid_param_types = dict(
|
valid_param_types = dict(
|
||||||
root=dict(
|
root=dict(
|
||||||
name=lambda s: isinstance(s, str),
|
name=lambda s: isinstance(s, str),
|
||||||
|
prev_data_dir=lambda s: isinstance(s, str),
|
||||||
),
|
),
|
||||||
fiber=dict(
|
fiber=dict(
|
||||||
input_transmission=in_range_incl(num, (0, 1)),
|
input_transmission=in_range_incl(num, (0, 1)),
|
||||||
gamma=num,
|
gamma=num,
|
||||||
|
n2=num,
|
||||||
|
effective_mode_diameter=num,
|
||||||
|
A_eff=num,
|
||||||
pitch=in_range_excl(num, (0, 1e-3)),
|
pitch=in_range_excl(num, (0, 1e-3)),
|
||||||
pitch_ratio=in_range_excl(num, (0, 1)),
|
pitch_ratio=in_range_excl(num, (0, 1)),
|
||||||
core_radius=in_range_excl(num, (0, 1e-3)),
|
core_radius=in_range_excl(num, (0, 1e-3)),
|
||||||
he_mode=he_mode,
|
he_mode=he_mode,
|
||||||
fit_parameters=fit_parameters,
|
fit_parameters=fit_parameters,
|
||||||
beta=beta,
|
beta=beta,
|
||||||
|
dispersion_file=lambda s: isinstance(s, str),
|
||||||
model=string(["pcf", "marcatili", "marcatili_adjusted", "hasan", "custom"]),
|
model=string(["pcf", "marcatili", "marcatili_adjusted", "hasan", "custom"]),
|
||||||
length=num,
|
length=num,
|
||||||
capillary_num=integer,
|
capillary_num=integer,
|
||||||
@@ -133,6 +138,7 @@ valid_param_types = dict(
|
|||||||
),
|
),
|
||||||
pulse=dict(
|
pulse=dict(
|
||||||
field_0=field_0,
|
field_0=field_0,
|
||||||
|
field_file=lambda s: isinstance(s, str),
|
||||||
power=num,
|
power=num,
|
||||||
energy=num,
|
energy=num,
|
||||||
soliton_num=num,
|
soliton_num=num,
|
||||||
@@ -207,6 +213,6 @@ valid_variable = dict(
|
|||||||
)
|
)
|
||||||
|
|
||||||
ENVIRON_KEY_BASE = "SCGENERATOR_"
|
ENVIRON_KEY_BASE = "SCGENERATOR_"
|
||||||
TMP_FOLDER_KEY_BASE = ENVIRON_KEY_BASE + "TMP_"
|
TMP_FOLDER_KEY_BASE = ENVIRON_KEY_BASE + "SC_TMP_"
|
||||||
PREFIX_KEY_BASE = ENVIRON_KEY_BASE + "PREFIX_"
|
PREFIX_KEY_BASE = ENVIRON_KEY_BASE + "PREFIX_"
|
||||||
PARAM_SEPARATOR = " "
|
PARAM_SEPARATOR = " "
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ default_parameters = dict(
|
|||||||
name="no name",
|
name="no name",
|
||||||
he_mode=(1, 1),
|
he_mode=(1, 1),
|
||||||
fit_parameters=(0.08, 200e-9),
|
fit_parameters=(0.08, 200e-9),
|
||||||
model="pcf",
|
model="custom",
|
||||||
length=1,
|
length=1,
|
||||||
capillary_resonance_strengths=[],
|
capillary_resonance_strengths=[],
|
||||||
capillary_nested=0,
|
capillary_nested=0,
|
||||||
|
|||||||
@@ -1,10 +1,13 @@
|
|||||||
import os
|
import os
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from typing import Any, Dict, Iterator, List, Tuple
|
from typing import Any, Dict, Iterator, List, Set, Tuple
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from numpy import pi
|
from numpy import pi
|
||||||
|
from numpy.core.numeric import full
|
||||||
|
from scipy.interpolate.interpolate import interp1d
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from . import defaults, io, utils
|
from . import defaults, io, utils
|
||||||
from .const import hc_model_specific_parameters, valid_param_types, valid_variable
|
from .const import hc_model_specific_parameters, valid_param_types, valid_variable
|
||||||
@@ -42,19 +45,65 @@ class ParamSequence(Mapping):
|
|||||||
|
|
||||||
|
|
||||||
class ContinuationParamSequence(ParamSequence):
|
class ContinuationParamSequence(ParamSequence):
|
||||||
def __init__(self, folder: str, new_config: Dict[str, Any]):
|
def __init__(self, prev_data_folder: str, new_config: Dict[str, Any]):
|
||||||
self.path = folder
|
"""Parameter sequence that builds on a previous simulation but with a new configuration
|
||||||
|
It is recommended that only the fiber and the number of points stored may be changed and
|
||||||
|
changing other parameters could results in unexpected behaviors. The new config doesn't have to
|
||||||
|
be a full configuration (specify only the new parameters).
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
prev_data_folder : str
|
||||||
|
path to the folder of the previous simulation containing 'initial_config.toml'
|
||||||
|
new_config : Dict[str, Any]
|
||||||
|
new config
|
||||||
|
"""
|
||||||
|
self.path = Path(prev_data_folder)
|
||||||
init_config = io.load_previous_parameters(os.path.join(self.path, "initial_config.toml"))
|
init_config = io.load_previous_parameters(os.path.join(self.path, "initial_config.toml"))
|
||||||
new_config = utils.deep_update(init_config, new_config)
|
|
||||||
|
self.prev_variable_lists = [
|
||||||
|
(set(variable_list[1:]), self.path / utils.format_variable_list(variable_list))
|
||||||
|
for variable_list, _ in required_simulations(init_config)
|
||||||
|
]
|
||||||
|
|
||||||
|
new_config = utils.override_config(init_config, new_config)
|
||||||
super().__init__(new_config)
|
super().__init__(new_config)
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[Tuple[List[Tuple[str, Any]], Dict[str, Any]]]:
|
def __iter__(self) -> Iterator[Tuple[List[Tuple[str, Any]], Dict[str, Any]]]:
|
||||||
"""iterates through all possible parameters, yielding a config as well as a flattened
|
"""iterates through all possible parameters, yielding a config as well as a flattened
|
||||||
computed parameters set each time"""
|
computed parameters set each time"""
|
||||||
for variable_list, full_config in required_simulations(self.config):
|
for variable_list, full_config in required_simulations(self.config):
|
||||||
sim_folder = os.path.join(self.path, utils.format_variable_list(variable_list))
|
prev_sim_folder = self.find_prev_data_folder(variable_list)
|
||||||
|
full_config["prev_data_dir"] = str(prev_sim_folder.resolve())
|
||||||
|
|
||||||
yield variable_list, compute_subsequent_paramters(sim_folder, full_config, self.config)
|
yield variable_list, compute_subsequent_paramters(prev_sim_folder, full_config)
|
||||||
|
|
||||||
|
def find_prev_data_folder(self, new_variable_list: List[Tuple[str, Any]]) -> Path:
|
||||||
|
"""finds the previous simulation data that this new config should start from
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
new_variable_list : List[Tuple[str, Any]]
|
||||||
|
as yielded by required_simulations
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
Path
|
||||||
|
path to the data folder
|
||||||
|
|
||||||
|
Raises
|
||||||
|
------
|
||||||
|
ValueError
|
||||||
|
no data folder found
|
||||||
|
"""
|
||||||
|
to_test = set(new_variable_list[1:])
|
||||||
|
for old_v_list, path in self.prev_variable_lists:
|
||||||
|
if to_test.issuperset(old_v_list):
|
||||||
|
return path
|
||||||
|
|
||||||
|
raise ValueError(
|
||||||
|
f"cannot find a previous data folder for {new_variable_list} in {self.path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class RecoveryParamSequence(ParamSequence):
|
class RecoveryParamSequence(ParamSequence):
|
||||||
@@ -244,7 +293,7 @@ def _contains(sub_conf, param):
|
|||||||
return param in sub_conf or param in sub_conf.get("variable", {})
|
return param in sub_conf or param in sub_conf.get("variable", {})
|
||||||
|
|
||||||
|
|
||||||
def _ensure_consistency_fiber(fiber):
|
def _ensure_consistency_fiber(fiber: Dict[str, Any]):
|
||||||
"""ensure the fiber sub-dictionary of the parameter set is consistent
|
"""ensure the fiber sub-dictionary of the parameter set is consistent
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
@@ -263,9 +312,17 @@ def _ensure_consistency_fiber(fiber):
|
|||||||
When at least one required parameter with no default is missing
|
When at least one required parameter with no default is missing
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if _contains(fiber, "beta"):
|
if _contains(fiber, "beta") and not (
|
||||||
|
_contains(fiber, "n2") and _contains(fiber, "effective_mode_diameter")
|
||||||
|
):
|
||||||
fiber = defaults.get(fiber, "gamma", specified_parameters=["beta"])
|
fiber = defaults.get(fiber, "gamma", specified_parameters=["beta"])
|
||||||
fiber["model"] = fiber.get("model", "custom")
|
fiber.setdefault("model", "custom")
|
||||||
|
|
||||||
|
elif _contains(fiber, "dispersion_file") and not (
|
||||||
|
_contains(fiber, "n2") and _contains(fiber, "effective_mode_diameter")
|
||||||
|
):
|
||||||
|
fiber = defaults.get(fiber, "gamma", specified_parameters=["dispersion_file"])
|
||||||
|
fiber.setdefault("model", "custom")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
fiber = defaults.get(fiber, "model")
|
fiber = defaults.get(fiber, "model")
|
||||||
@@ -335,9 +392,12 @@ def _ensure_consistency_pulse(pulse):
|
|||||||
MissingParameterError
|
MissingParameterError
|
||||||
When at least one required parameter with no default is missing
|
When at least one required parameter with no default is missing
|
||||||
"""
|
"""
|
||||||
for param in ["wavelength", "shape", "quantum_noise", "intensity_noise"]:
|
for param in ["wavelength", "quantum_noise", "intensity_noise"]:
|
||||||
pulse = defaults.get(pulse, param)
|
pulse = defaults.get(pulse, param)
|
||||||
|
|
||||||
|
if not _contains(pulse, "field_file"):
|
||||||
|
pulse = defaults.get(pulse, "shape")
|
||||||
|
|
||||||
if _contains(pulse, "soliton_num"):
|
if _contains(pulse, "soliton_num"):
|
||||||
pulse = defaults.get_multiple(
|
pulse = defaults.get_multiple(
|
||||||
pulse, ["power", "energy", "width", "t0"], 1, specified_parameters=["soliton_num"]
|
pulse, ["power", "energy", "width", "t0"], 1, specified_parameters=["soliton_num"]
|
||||||
@@ -461,7 +521,7 @@ def compute_init_parameters(config: Dict[str, Any]) -> Dict[str, Any]:
|
|||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
# copy and flatten the config
|
# copy and flatten the config
|
||||||
params = dict(name=config["name"])
|
params = {k: v for k, v in config.items() if isinstance(v, (str, int, float))}
|
||||||
for section in ["pulse", "fiber", "simulation", "gas"]:
|
for section in ["pulse", "fiber", "simulation", "gas"]:
|
||||||
for key, value in config.get(section, {}).items():
|
for key, value in config.get(section, {}).items():
|
||||||
params[key] = value
|
params[key] = value
|
||||||
@@ -481,9 +541,11 @@ def compute_init_parameters(config: Dict[str, Any]) -> Dict[str, Any]:
|
|||||||
params["lower_wavelength_interp_limit"],
|
params["lower_wavelength_interp_limit"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
temp_gamma = None
|
||||||
|
if "effective_mode_diameter" in params:
|
||||||
|
params["A_eff"] = (params["effective_mode_diameter"] / 2) ** 2 * pi
|
||||||
if "beta" in params:
|
if "beta" in params:
|
||||||
params["beta"] = np.array(params["beta"])
|
params["beta"] = np.array(params["beta"])
|
||||||
temp_gamma = 0
|
|
||||||
params["dynamic_dispersion"] = False
|
params["dynamic_dispersion"] = False
|
||||||
else:
|
else:
|
||||||
params["dynamic_dispersion"] = fiber.is_dynamic_dispersion(params)
|
params["dynamic_dispersion"] = fiber.is_dynamic_dispersion(params)
|
||||||
@@ -503,6 +565,18 @@ def compute_init_parameters(config: Dict[str, Any]) -> Dict[str, Any]:
|
|||||||
params["hr_w"] = fiber.delayed_raman_w(params["t"], params["dt"], params["raman_type"])
|
params["hr_w"] = fiber.delayed_raman_w(params["t"], params["dt"], params["raman_type"])
|
||||||
|
|
||||||
# PULSE
|
# PULSE
|
||||||
|
if "field_file" in params:
|
||||||
|
field_data = np.load(params["field_file"])
|
||||||
|
field_interp = interp1d(
|
||||||
|
field_data["time"], field_data["field"], bounds_error=False, fill_value=(0, 0)
|
||||||
|
)
|
||||||
|
params["field_0"] = field_interp(params["t"])
|
||||||
|
params = _comform_custom_field(params)
|
||||||
|
# Initial field
|
||||||
|
elif "field_0" in params:
|
||||||
|
params = _validate_custom_init_field(params)
|
||||||
|
params = _comform_custom_field(params)
|
||||||
|
else:
|
||||||
params = _update_pulse_parameters(params)
|
params = _update_pulse_parameters(params)
|
||||||
logger.info(f"computed initial N = {params['soliton_num']:.3g}")
|
logger.info(f"computed initial N = {params['soliton_num']:.3g}")
|
||||||
|
|
||||||
@@ -514,10 +588,6 @@ def compute_init_parameters(config: Dict[str, Any]) -> Dict[str, Any]:
|
|||||||
if "intensity_noise" in params:
|
if "intensity_noise" in params:
|
||||||
params = _technical_noise(params)
|
params = _technical_noise(params)
|
||||||
|
|
||||||
# Initial field
|
|
||||||
if "field_0" in params:
|
|
||||||
params = _validate_custom_init_field(params)
|
|
||||||
else:
|
|
||||||
params["field_0"] = pulse.initial_field(
|
params["field_0"] = pulse.initial_field(
|
||||||
params["t"], params["shape"], params["t0"], params["power"]
|
params["t"], params["shape"], params["t0"], params["power"]
|
||||||
)
|
)
|
||||||
@@ -532,22 +602,25 @@ def compute_init_parameters(config: Dict[str, Any]) -> Dict[str, Any]:
|
|||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
def compute_subsequent_paramters(
|
def compute_subsequent_paramters(sim_folder: str, config: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
sim_folder: str, init_config: Dict[str, Any], new_config: Dict[str, Any]
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
|
|
||||||
init_config["fiber"] = new_config["fiber"]
|
params = compute_init_parameters(config)
|
||||||
init_config["simulation"]["z_num"] = new_config.get("simulation", init_config["simulation"])[
|
|
||||||
"z_num"
|
|
||||||
]
|
|
||||||
|
|
||||||
params = compute_init_parameters(init_config)
|
|
||||||
params["spec_0"] = io.load_last_spectrum(sim_folder)[1]
|
params["spec_0"] = io.load_last_spectrum(sim_folder)[1]
|
||||||
params["field_0"] = np.fft.ifft(params["spec_0"]) * params["input_transmission"]
|
params["field_0"] = np.fft.ifft(params["spec_0"]) * params["input_transmission"]
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def _comform_custom_field(params):
|
||||||
|
params["field_0"] = params["field_0"] * pulse.modify_field_ratio(
|
||||||
|
params["field_o"], params.get("power"), params.get("intensity_noise")
|
||||||
|
)
|
||||||
|
params["width"], params["power"], params["energy"] = pulse.measure_field(
|
||||||
|
params["t"], params["field_0"]
|
||||||
|
)
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
def _update_pulse_parameters(params):
|
def _update_pulse_parameters(params):
|
||||||
(
|
(
|
||||||
params["width"],
|
params["width"],
|
||||||
@@ -568,10 +641,11 @@ def _update_pulse_parameters(params):
|
|||||||
|
|
||||||
|
|
||||||
def _validate_custom_init_field(params):
|
def _validate_custom_init_field(params):
|
||||||
if isinstance(params["field_0"], str):
|
field_info = params["field_0"]
|
||||||
field_0 = evaluate_field_equation(params["field_0"], **params)
|
if isinstance(field_info, str):
|
||||||
|
field_0 = evaluate_field_equation(field_info, **params)
|
||||||
params["field_0"] = field_0
|
params["field_0"] = field_0
|
||||||
elif len(params["field_0"]) != params["t_num"]:
|
elif len(field_info) != params["t_num"]:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"initial field is given but doesn't match size and type with the time array"
|
"initial field is given but doesn't match size and type with the time array"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from glob import glob
|
from glob import glob
|
||||||
from typing import Any, Dict, Iterable, List, Tuple
|
from typing import Any, Dict, Iterable, List, Tuple, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from numpy.lib import delete
|
from numpy.lib import delete
|
||||||
@@ -127,8 +127,9 @@ class DataBuffer:
|
|||||||
# return os.path.normpath(p)
|
# return os.path.normpath(p)
|
||||||
|
|
||||||
|
|
||||||
def load_toml(path: str):
|
def load_toml(path: os.PathLike):
|
||||||
"""returns a dictionary parsed from the specified toml file"""
|
"""returns a dictionary parsed from the specified toml file"""
|
||||||
|
path = str(path)
|
||||||
if not path.lower().endswith(".toml"):
|
if not path.lower().endswith(".toml"):
|
||||||
path += ".toml"
|
path += ".toml"
|
||||||
with open(path, mode="r") as file:
|
with open(path, mode="r") as file:
|
||||||
@@ -365,24 +366,21 @@ def load_last_spectrum(path: str) -> Tuple[int, np.ndarray]:
|
|||||||
return num, np.load(os.path.join(path, f"spectrum_{num}.npy"))
|
return num, np.load(os.path.join(path, f"spectrum_{num}.npy"))
|
||||||
|
|
||||||
|
|
||||||
def merge(paths: List[str]):
|
def merge(paths: Union[str, List[str]]):
|
||||||
|
if isinstance(paths, str):
|
||||||
|
paths = [paths]
|
||||||
for path in paths:
|
for path in paths:
|
||||||
merge_same_simulations(path, delete=False)
|
merge_same_simulations(path, delete=False)
|
||||||
|
|
||||||
if len(paths) < 2:
|
|
||||||
return
|
|
||||||
|
|
||||||
append_simulations(paths)
|
|
||||||
|
|
||||||
|
|
||||||
def append_simulations(paths: List[os.PathLike]):
|
def append_simulations(paths: List[os.PathLike]):
|
||||||
paths: List[Path] = [Path(p).resolve() for p in paths]
|
paths: List[Path] = [Path(p).resolve() for p in paths]
|
||||||
master_sim = paths[0]
|
master_sim_path = paths[-1]
|
||||||
merged_path = master_sim.parent / "merged_sims"
|
merged_path = master_sim_path.parent / "merged_sims"
|
||||||
merged_path.mkdir(exist_ok=True)
|
merged_path.mkdir(exist_ok=True)
|
||||||
for i, path in enumerate(paths):
|
for i, path in enumerate(paths):
|
||||||
shutil.copy(path / "initial_config.toml", merged_path / f"initial_config{i}.toml")
|
shutil.copy(path / "initial_config.toml", merged_path / f"initial_config{i}.toml")
|
||||||
for sim in master_sim.glob("*"):
|
for sim in master_sim_path.glob("*"):
|
||||||
if not sim.is_dir() or not str(sim).endswith("merged"):
|
if not sim.is_dir() or not str(sim).endswith("merged"):
|
||||||
continue
|
continue
|
||||||
sim_name = sim.name
|
sim_name = sim.name
|
||||||
@@ -406,6 +404,26 @@ def append_simulations(paths: List[os.PathLike]):
|
|||||||
np.save(merge_sim_path / "z.npy", np.concatenate(z))
|
np.save(merge_sim_path / "z.npy", np.concatenate(z))
|
||||||
|
|
||||||
|
|
||||||
|
def append_and_merge(final_sim_path: os.PathLike, new_name=None):
|
||||||
|
final_sim_path = Path(final_sim_path).resolve()
|
||||||
|
if new_name is None:
|
||||||
|
new_name = final_sim_path.name + " appended"
|
||||||
|
|
||||||
|
appended_path = final_sim_path.parent / new_name
|
||||||
|
appended_path.mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
for sim_path in final_sim_path.glob("id*num*"):
|
||||||
|
path_tree = [sim_path]
|
||||||
|
sim_name = sim_path.name
|
||||||
|
appended_sim_path = appended_path / sim_name
|
||||||
|
appended_sim_path.mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
while (prev_sim_path := load_toml(path_tree[-1] / "params.toml")).get(
|
||||||
|
"prev_sim_dir"
|
||||||
|
) is not None:
|
||||||
|
path_tree.append(Path(prev_sim_path).resolve())
|
||||||
|
|
||||||
|
|
||||||
def merge_same_simulations(path: str, delete=True):
|
def merge_same_simulations(path: str, delete=True):
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
num_separator = PARAM_SEPARATOR + "num" + PARAM_SEPARATOR
|
num_separator = PARAM_SEPARATOR + "num" + PARAM_SEPARATOR
|
||||||
@@ -475,6 +493,8 @@ def get_data_folder(task_id: int, name_if_new: str = "data"):
|
|||||||
if tmp is None:
|
if tmp is None:
|
||||||
tmp = ensure_folder("scgenerator " + name_if_new)
|
tmp = ensure_folder("scgenerator " + name_if_new)
|
||||||
os.environ[TMP_FOLDER_KEY_BASE + idstr] = tmp
|
os.environ[TMP_FOLDER_KEY_BASE + idstr] = tmp
|
||||||
|
elif not os.path.exists(tmp):
|
||||||
|
os.mkdir(tmp)
|
||||||
return tmp
|
return tmp
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
|
from numpy.lib import disp
|
||||||
from numpy.lib.arraysetops import isin
|
from numpy.lib.arraysetops import isin
|
||||||
import toml
|
import toml
|
||||||
from numba import jit
|
from numba import jit
|
||||||
@@ -85,14 +86,14 @@ def dispersion_parameter(n_eff, lambda_):
|
|||||||
return -lambda_ / c * (np.gradient(np.gradient(n_eff, lambda_), lambda_))
|
return -lambda_ / c * (np.gradient(np.gradient(n_eff, lambda_), lambda_))
|
||||||
|
|
||||||
|
|
||||||
def beta2_to_D(beta2, lambda_):
|
def beta2_to_D(beta2, λ):
|
||||||
"""returns the beta2 parameters corresponding to D(lambda_)"""
|
"""returns the D parameter corresponding to beta2(λ)"""
|
||||||
return -(2 * pi * c) / (lambda_ ** 2) * beta2
|
return -(2 * pi * c) / (λ ** 2) * beta2
|
||||||
|
|
||||||
|
|
||||||
def D_to_beta2(D, lambda_):
|
def D_to_beta2(D, λ):
|
||||||
"""returns the D parameter corresponding to beta2(lambda_)"""
|
"""returns the beta2 parameters corresponding to D(λ)"""
|
||||||
return -(lambda_ ** 2) / (2 * pi * c) * D
|
return -(λ ** 2) / (2 * pi * c) * D
|
||||||
|
|
||||||
|
|
||||||
def plasma_dispersion(lambda_, number_density, simple=False):
|
def plasma_dispersion(lambda_, number_density, simple=False):
|
||||||
@@ -524,7 +525,7 @@ def dynamic_HCPCF_dispersion(lambda_, params, material_dico, deg):
|
|||||||
n2 = lambda r: mat.non_linear_refractive_index(material_dico, pressure(r), temp)
|
n2 = lambda r: mat.non_linear_refractive_index(material_dico, pressure(r), temp)
|
||||||
ratio_range = np.linspace(0, 1, 256)
|
ratio_range = np.linspace(0, 1, 256)
|
||||||
|
|
||||||
gamma_grid = np.array([n2(r) * w0 / (A_eff * c) for r in ratio_range])
|
gamma_grid = np.array([gamma_parameter(n2(r), w0, A_eff) for r in ratio_range])
|
||||||
gamma_interp = interp1d(ratio_range, gamma_grid)
|
gamma_interp = interp1d(ratio_range, gamma_grid)
|
||||||
|
|
||||||
beta2_grid = np.array(
|
beta2_grid = np.array(
|
||||||
@@ -543,7 +544,11 @@ def dynamic_HCPCF_dispersion(lambda_, params, material_dico, deg):
|
|||||||
return beta2_func, gamma_func
|
return beta2_func, gamma_func
|
||||||
|
|
||||||
|
|
||||||
def PCF_dispersion(lambda_, pitch, ratio_d, w0=None):
|
def gamma_parameter(n2, w0, A_eff):
|
||||||
|
return n2 * w0 / (A_eff * c)
|
||||||
|
|
||||||
|
|
||||||
|
def PCF_dispersion(lambda_, pitch, ratio_d, w0=None, n2=None, A_eff=None):
|
||||||
"""
|
"""
|
||||||
semi-analytical computation of the dispersion profile of a triangular Index-guiding PCF
|
semi-analytical computation of the dispersion profile of a triangular Index-guiding PCF
|
||||||
|
|
||||||
@@ -623,12 +628,14 @@ def PCF_dispersion(lambda_, pitch, ratio_d, w0=None):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
# effective mode field area (koshiba2004)
|
# effective mode field area (koshiba2004)
|
||||||
|
if A_eff is None:
|
||||||
V_eff = pi2a / lambda_ * np.sqrt(n_co ** 2 - n_FSM2)
|
V_eff = pi2a / lambda_ * np.sqrt(n_co ** 2 - n_FSM2)
|
||||||
w_eff = a_eff * (0.65 + 1.619 / V_eff ** 1.5 + 2.879 / V_eff ** 6)
|
w_eff = a_eff * (0.65 + 1.619 / V_eff ** 1.5 + 2.879 / V_eff ** 6)
|
||||||
A_eff = interp1d(lambda_, w_eff, kind="linear")(units.m.inv(w0)) ** 2 * pi
|
A_eff = interp1d(lambda_, w_eff, kind="linear")(units.m.inv(w0)) ** 2 * pi
|
||||||
|
|
||||||
n2 = 2.6e-20 # FIXME
|
if n2 is None:
|
||||||
gamma = n2 * w0 / (A_eff * c)
|
n2 = 2.6e-20
|
||||||
|
gamma = gamma_parameter(n2, w0, A_eff)
|
||||||
|
|
||||||
return beta2, gamma
|
return beta2, gamma
|
||||||
|
|
||||||
@@ -652,9 +659,16 @@ def dispersion_central(fiber_model, params, deg=8):
|
|||||||
gamma : float
|
gamma : float
|
||||||
nonlinear parameter
|
nonlinear parameter
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if "dispersion_file" in params:
|
||||||
|
disp_file = np.load(params["dispersion_file"])
|
||||||
|
lambda_ = disp_file["wavelength"]
|
||||||
|
D = disp_file["dispersion"]
|
||||||
|
beta2 = D_to_beta2(D, lambda_)
|
||||||
|
gamma = None
|
||||||
|
else:
|
||||||
lambda_ = lambda_for_dispersion()
|
lambda_ = lambda_for_dispersion()
|
||||||
beta2 = np.zeros_like(lambda_)
|
beta2 = np.zeros_like(lambda_)
|
||||||
|
|
||||||
fiber_model = fiber_model.lower()
|
fiber_model = fiber_model.lower()
|
||||||
|
|
||||||
if fiber_model == "pcf":
|
if fiber_model == "pcf":
|
||||||
@@ -663,6 +677,8 @@ def dispersion_central(fiber_model, params, deg=8):
|
|||||||
params["pitch"],
|
params["pitch"],
|
||||||
params["pitch_ratio"],
|
params["pitch_ratio"],
|
||||||
w0=params["w0"],
|
w0=params["w0"],
|
||||||
|
n2=params.get("n2"),
|
||||||
|
A_eff=params.get("A_eff"),
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -696,9 +712,9 @@ def dispersion_central(fiber_model, params, deg=8):
|
|||||||
n2 = mat.non_linear_refractive_index(
|
n2 = mat.non_linear_refractive_index(
|
||||||
material_dico, params["pressure"], params["temperature"]
|
material_dico, params["pressure"], params["temperature"]
|
||||||
)
|
)
|
||||||
gamma = n2 * params["w0"] / (A_eff * c)
|
gamma = gamma_parameter(n2, params["w0"], A_eff)
|
||||||
else:
|
else:
|
||||||
gamma = 0
|
gamma = None
|
||||||
|
|
||||||
# add plasma if wanted
|
# add plasma if wanted
|
||||||
if params["plasma_density"] > 0:
|
if params["plasma_density"] > 0:
|
||||||
@@ -706,6 +722,12 @@ def dispersion_central(fiber_model, params, deg=8):
|
|||||||
|
|
||||||
beta2_coef = dispersion_coefficients(lambda_, beta2, params["w0"], params["interp_range"], deg)
|
beta2_coef = dispersion_coefficients(lambda_, beta2, params["w0"], params["interp_range"], deg)
|
||||||
|
|
||||||
|
if gamma is None:
|
||||||
|
if "A_eff" in params:
|
||||||
|
gamma = gamma_parameter(params.get("n2", 2.6e-20), params["w0"], params["A_eff"])
|
||||||
|
else:
|
||||||
|
gamma = 0
|
||||||
|
|
||||||
return beta2_coef, gamma
|
return beta2_coef, gamma
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ n is the number of spectra at the same z position and nt is the size of the time
|
|||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -76,6 +77,34 @@ def initial_field(t, shape, t0, power):
|
|||||||
raise ValueError(f"shape '{shape}' not understood")
|
raise ValueError(f"shape '{shape}' not understood")
|
||||||
|
|
||||||
|
|
||||||
|
def modify_field_ratio(
|
||||||
|
field: np.ndarray, target_power: float = None, intensity_noise: float = None
|
||||||
|
) -> float:
|
||||||
|
"""multiply a field by this number to get the desired effects
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
field : np.ndarray
|
||||||
|
initial field
|
||||||
|
target_power : float, optional
|
||||||
|
abs2(field).max() == target_power, by default None
|
||||||
|
intensity_noise : float, optional
|
||||||
|
intensity noise, by default None
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
float
|
||||||
|
ratio (multiply field by this number)
|
||||||
|
"""
|
||||||
|
ratio = 1
|
||||||
|
if target_power is not None:
|
||||||
|
ratio *= np.sqrt(target_power / abs2(field).max())
|
||||||
|
if intensity_noise is not None:
|
||||||
|
d_int, _ = technical_noise(intensity_noise)
|
||||||
|
ratio *= np.sqrt(d_int)
|
||||||
|
return ratio
|
||||||
|
|
||||||
|
|
||||||
def conform_pulse_params(
|
def conform_pulse_params(
|
||||||
shape,
|
shape,
|
||||||
width=None,
|
width=None,
|
||||||
@@ -793,3 +822,12 @@ def measure_properties(spectra, t, compress=True, debug=""):
|
|||||||
t_jitter = np.std(t_offset)
|
t_jitter = np.std(t_offset)
|
||||||
|
|
||||||
return qf, mean_g12, fwhm_var, fwhm_abs, int_var, t_jitter
|
return qf, mean_g12, fwhm_var, fwhm_abs, int_var, t_jitter
|
||||||
|
|
||||||
|
|
||||||
|
def measure_field(t: np.ndarray, field: np.ndarray) -> Tuple[float, float, float]:
|
||||||
|
intensity = abs2(field)
|
||||||
|
_, fwhm_lim, _, _ = find_lobe_limits(t, intensity)
|
||||||
|
fwhm = length(fwhm_lim)
|
||||||
|
power = intensity.max()
|
||||||
|
energy = np.trapz(intensity, t)
|
||||||
|
return fwhm, power, energy
|
||||||
|
|||||||
@@ -166,6 +166,7 @@ class RK4IP:
|
|||||||
"""
|
"""
|
||||||
self._save_data(self.current_spectrum, f"spectrum_{num}")
|
self._save_data(self.current_spectrum, f"spectrum_{num}")
|
||||||
self._save_data(self.cons_qty, f"cons_qty")
|
self._save_data(self.cons_qty, f"cons_qty")
|
||||||
|
self.step_saved()
|
||||||
|
|
||||||
def _save_data(self, data: np.ndarray, name: str):
|
def _save_data(self, data: np.ndarray, name: str):
|
||||||
"""calls the appropriate method to save data
|
"""calls the appropriate method to save data
|
||||||
@@ -211,7 +212,6 @@ class RK4IP:
|
|||||||
self._save_current_spectrum(len(self.stored_spectra) - 1)
|
self._save_current_spectrum(len(self.stored_spectra) - 1)
|
||||||
|
|
||||||
self.z_stored.append(self.z)
|
self.z_stored.append(self.z)
|
||||||
self.step_saved()
|
|
||||||
del self.z_targets[0]
|
del self.z_targets[0]
|
||||||
|
|
||||||
# reset the constant step size after a spectrum is stored
|
# reset the constant step size after a spectrum is stored
|
||||||
@@ -315,6 +315,8 @@ class MutliProcRK4IP(RK4IP):
|
|||||||
task_id=0,
|
task_id=0,
|
||||||
n_percent=10,
|
n_percent=10,
|
||||||
):
|
):
|
||||||
|
self.worker_id = worker_id
|
||||||
|
self.p_queue = p_queue
|
||||||
super().__init__(
|
super().__init__(
|
||||||
sim_params,
|
sim_params,
|
||||||
save_data=save_data,
|
save_data=save_data,
|
||||||
@@ -322,8 +324,6 @@ class MutliProcRK4IP(RK4IP):
|
|||||||
task_id=task_id,
|
task_id=task_id,
|
||||||
n_percent=n_percent,
|
n_percent=n_percent,
|
||||||
)
|
)
|
||||||
self.worker_id = worker_id
|
|
||||||
self.p_queue = p_queue
|
|
||||||
|
|
||||||
def step_saved(self):
|
def step_saved(self):
|
||||||
self.p_queue.put((self.worker_id, self.z / self.z_final))
|
self.p_queue.put((self.worker_id, self.z / self.z_final))
|
||||||
@@ -340,6 +340,8 @@ class RayRK4IP(RK4IP):
|
|||||||
task_id=0,
|
task_id=0,
|
||||||
n_percent=10,
|
n_percent=10,
|
||||||
):
|
):
|
||||||
|
self.worker_id = worker_id
|
||||||
|
self.p_actor = p_actor
|
||||||
super().__init__(
|
super().__init__(
|
||||||
sim_params,
|
sim_params,
|
||||||
save_data=save_data,
|
save_data=save_data,
|
||||||
@@ -347,8 +349,6 @@ class RayRK4IP(RK4IP):
|
|||||||
task_id=task_id,
|
task_id=task_id,
|
||||||
n_percent=n_percent,
|
n_percent=n_percent,
|
||||||
)
|
)
|
||||||
self.worker_id = worker_id
|
|
||||||
self.p_actor = p_actor
|
|
||||||
|
|
||||||
def step_saved(self):
|
def step_saved(self):
|
||||||
self.p_actor.update.remote(self.worker_id, self.z / self.z_final)
|
self.p_actor.update.remote(self.worker_id, self.z / self.z_final)
|
||||||
@@ -361,11 +361,13 @@ class Simulations:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
_available_simulation_methods = []
|
_available_simulation_methods = []
|
||||||
|
_available_simulation_methods_dict: Dict[str, Type["Simulations"]] = dict()
|
||||||
|
|
||||||
def __init_subclass__(cls, available: bool, priority=0, **kwargs):
|
def __init_subclass__(cls, available: bool, priority=0, **kwargs):
|
||||||
cls._available = available
|
cls._available = available
|
||||||
if available:
|
if available:
|
||||||
Simulations._available_simulation_methods.append((cls, priority))
|
Simulations._available_simulation_methods.append((cls, priority))
|
||||||
|
Simulations._available_simulation_methods_dict[cls.__name__] = cls
|
||||||
Simulations._available_simulation_methods.sort(key=lambda el: el[1])
|
Simulations._available_simulation_methods.sort(key=lambda el: el[1])
|
||||||
super().__init_subclass__(**kwargs)
|
super().__init_subclass__(**kwargs)
|
||||||
|
|
||||||
@@ -373,7 +375,7 @@ class Simulations:
|
|||||||
def get_best_method(cls):
|
def get_best_method(cls):
|
||||||
return Simulations._available_simulation_methods[-1][0]
|
return Simulations._available_simulation_methods[-1][0]
|
||||||
|
|
||||||
def __init__(self, param_seq: initialize.ParamSequence, task_id=0, data_folder="scgenerator/"):
|
def __init__(self, param_seq: initialize.ParamSequence, task_id=0):
|
||||||
"""
|
"""
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
@@ -466,8 +468,8 @@ class SequencialSimulations(Simulations, available=True, priority=0):
|
|||||||
|
|
||||||
|
|
||||||
class MultiProcSimulations(Simulations, available=True, priority=10):
|
class MultiProcSimulations(Simulations, available=True, priority=10):
|
||||||
def __init__(self, param_seq: initialize.ParamSequence, task_id, data_folder):
|
def __init__(self, param_seq: initialize.ParamSequence, task_id):
|
||||||
super().__init__(param_seq, task_id=task_id, data_folder=data_folder)
|
super().__init__(param_seq, task_id=task_id)
|
||||||
self.sim_jobs_per_node = max(1, os.cpu_count() // 2)
|
self.sim_jobs_per_node = max(1, os.cpu_count() // 2)
|
||||||
self.queue = multiprocessing.JoinableQueue(self.sim_jobs_per_node)
|
self.queue = multiprocessing.JoinableQueue(self.sim_jobs_per_node)
|
||||||
self.progress_queue = multiprocessing.Queue()
|
self.progress_queue = multiprocessing.Queue()
|
||||||
@@ -559,9 +561,8 @@ class RaySimulations(Simulations, available=using_ray, priority=2):
|
|||||||
self,
|
self,
|
||||||
param_seq: initialize.ParamSequence,
|
param_seq: initialize.ParamSequence,
|
||||||
task_id=0,
|
task_id=0,
|
||||||
data_folder="scgenerator/",
|
|
||||||
):
|
):
|
||||||
super().__init__(param_seq, task_id, data_folder)
|
super().__init__(param_seq, task_id)
|
||||||
|
|
||||||
nodes = ray.nodes()
|
nodes = ray.nodes()
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
@@ -661,21 +662,21 @@ class RaySimulations(Simulations, available=using_ray, priority=2):
|
|||||||
|
|
||||||
def new_simulations(
|
def new_simulations(
|
||||||
config_file: str,
|
config_file: str,
|
||||||
task_id: int,
|
prev_data_folder=None,
|
||||||
data_folder="scgenerator/",
|
|
||||||
method: Type[Simulations] = None,
|
method: Type[Simulations] = None,
|
||||||
initial=True,
|
|
||||||
) -> Simulations:
|
) -> Simulations:
|
||||||
|
|
||||||
config = io.load_toml(config_file)
|
config = io.load_toml(config_file)
|
||||||
if initial:
|
task_id = np.random.randint(1e9, 1e12)
|
||||||
|
|
||||||
|
if prev_data_folder is None:
|
||||||
param_seq = initialize.ParamSequence(config)
|
param_seq = initialize.ParamSequence(config)
|
||||||
else:
|
else:
|
||||||
param_seq = initialize.ContinuationParamSequence(data_folder, config)
|
param_seq = initialize.ContinuationParamSequence(prev_data_folder, config)
|
||||||
|
|
||||||
print(f"{param_seq.name=}")
|
print(f"{param_seq.name=}")
|
||||||
|
|
||||||
return _new_simulations(param_seq, task_id, data_folder, method)
|
return _new_simulations(param_seq, task_id, method)
|
||||||
|
|
||||||
|
|
||||||
def resume_simulations(
|
def resume_simulations(
|
||||||
@@ -686,21 +687,22 @@ def resume_simulations(
|
|||||||
io.set_data_folder(task_id, data_folder)
|
io.set_data_folder(task_id, data_folder)
|
||||||
param_seq = initialize.RecoveryParamSequence(config, task_id)
|
param_seq = initialize.RecoveryParamSequence(config, task_id)
|
||||||
|
|
||||||
return _new_simulations(param_seq, task_id, data_folder, method)
|
return _new_simulations(param_seq, task_id, method)
|
||||||
|
|
||||||
|
|
||||||
def _new_simulations(
|
def _new_simulations(
|
||||||
param_seq: initialize.ParamSequence,
|
param_seq: initialize.ParamSequence,
|
||||||
task_id,
|
task_id,
|
||||||
data_folder,
|
|
||||||
method: Type[Simulations],
|
method: Type[Simulations],
|
||||||
) -> Simulations:
|
) -> Simulations:
|
||||||
if method is not None:
|
if method is not None:
|
||||||
return method(param_seq, task_id, data_folder=data_folder)
|
if isinstance(method, str):
|
||||||
|
method = Simulations._available_simulation_methods_dict[method]
|
||||||
|
return method(param_seq, task_id)
|
||||||
elif param_seq.num_sim > 1 and param_seq["simulation", "parallel"] and using_ray:
|
elif param_seq.num_sim > 1 and param_seq["simulation", "parallel"] and using_ray:
|
||||||
return Simulations.get_best_method()(param_seq, task_id, data_folder=data_folder)
|
return Simulations.get_best_method()(param_seq, task_id)
|
||||||
else:
|
else:
|
||||||
return SequencialSimulations(param_seq, task_id, data_folder=data_folder)
|
return SequencialSimulations(param_seq, task_id)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import itertools
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
from typing import Any, Callable, Iterator, List, Mapping, Tuple, Union
|
from typing import Any, Callable, Dict, Iterator, List, Mapping, Tuple, Union
|
||||||
from asyncio import Event
|
from asyncio import Event
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -347,6 +347,34 @@ def deep_update(d: Mapping, u: Mapping):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def override_config(old: Dict[str, Any], new: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
out = deepcopy(old)
|
||||||
|
for section_name, section in new.items():
|
||||||
|
if isinstance(section, Mapping):
|
||||||
|
for param_name, value in section.items():
|
||||||
|
if param_name == "variable" and isinstance(value, Mapping):
|
||||||
|
out[section_name].setdefault("variable", {})
|
||||||
|
for p, v in value.items():
|
||||||
|
# override previously unvariable param
|
||||||
|
if p in old[section_name]:
|
||||||
|
del out[section_name][p]
|
||||||
|
out[section_name]["variable"][p] = v
|
||||||
|
else:
|
||||||
|
# override previously variable param
|
||||||
|
if (
|
||||||
|
"variable" in old[section_name]
|
||||||
|
and isinstance(old[section_name]["variable"], Mapping)
|
||||||
|
and param_name in old[section_name]["variable"]
|
||||||
|
):
|
||||||
|
del out[section_name]["variable"][param_name]
|
||||||
|
if len(out[section_name]["variable"]) == 0:
|
||||||
|
del out[section_name["variable"]]
|
||||||
|
out[section_name][param_name] = value
|
||||||
|
else:
|
||||||
|
out[section_name] = section
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
def formatted_hostname():
|
def formatted_hostname():
|
||||||
s = socket.gethostname().replace(".", "_")
|
s = socket.gethostname().replace(".", "_")
|
||||||
return (PREFIX_KEY_BASE + s).upper()
|
return (PREFIX_KEY_BASE + s).upper()
|
||||||
13
testing/configs/override/fiber2.toml
Normal file
13
testing/configs/override/fiber2.toml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
name = "fiber 2"
|
||||||
|
|
||||||
|
[fiber]
|
||||||
|
beta = [-1.183e-26, 8.1038e-41, -9.5205e-56, 2.0737e-70, -5.3943e-85, 1.3486e-99, -2.5495e-114, 3.0524e-129, -1.714e-144]
|
||||||
|
gamma = 0.13
|
||||||
|
length = 0.05
|
||||||
|
model = "custom"
|
||||||
|
|
||||||
|
[fiber.variable]
|
||||||
|
input_transmission = [0.9, 0.95]
|
||||||
|
|
||||||
|
[simulation]
|
||||||
|
z_num = 16
|
||||||
@@ -11,9 +11,12 @@ def load_conf(name):
|
|||||||
return conf
|
return conf
|
||||||
|
|
||||||
|
|
||||||
def conf_maker(folder):
|
def conf_maker(folder, val=True):
|
||||||
def conf(name):
|
def conf(name):
|
||||||
|
if val:
|
||||||
return initialize.validate(load_conf(folder + "/" + name))
|
return initialize.validate(load_conf(folder + "/" + name))
|
||||||
|
else:
|
||||||
|
return load_conf(folder + "/" + name)
|
||||||
|
|
||||||
return conf
|
return conf
|
||||||
|
|
||||||
@@ -50,6 +53,15 @@ class TestUtilsMethods(unittest.TestCase):
|
|||||||
for value, target in zip(values, s):
|
for value, target in zip(values, s):
|
||||||
self.assertEqual(target, utils.format_value(value))
|
self.assertEqual(target, utils.format_value(value))
|
||||||
|
|
||||||
|
def test_override_config(self):
|
||||||
|
conf = conf_maker("override", False)
|
||||||
|
old = conf("initial_config")
|
||||||
|
new = conf("fiber2")
|
||||||
|
|
||||||
|
over = utils.override_config(old, new)
|
||||||
|
self.assertIn("input_transmission", over["fiber"]["variable"])
|
||||||
|
self.assertNotIn("input_transmission", over["fiber"])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
Reference in New Issue
Block a user