remove tomli dependency

This commit is contained in:
Benoît Sierro
2023-09-26 09:56:29 +02:00
parent b986baa640
commit ce24c1ff38
2 changed files with 6 additions and 101 deletions

View File

@@ -13,14 +13,12 @@ classifiers = [
"License :: OSI Approved :: MIT", "License :: OSI Approved :: MIT",
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3",
] ]
requires-python = ">=3.10" requires-python = ">=3.11"
keywords = ["nonlinear", "fiber optics", "simulation", "runge-kutta"] keywords = ["nonlinear", "fiber optics", "simulation", "runge-kutta"]
dependencies = [ dependencies = [
"numpy", "numpy",
"scipy", "scipy",
"matplotlib", "matplotlib",
"tomli",
"tomli_w",
"numba", "numba",
"tqdm", "tqdm",
"pydantic", "pydantic",

View File

@@ -9,18 +9,16 @@ import itertools
import json import json
import os import os
import re import re
from collections import defaultdict import tomllib
from functools import cache, lru_cache from functools import cache, lru_cache
from pathlib import Path from pathlib import Path
from string import printable as str_printable from string import printable as str_printable
from typing import Any, Callable, MutableMapping, Sequence, TypeVar, Union from typing import Any, Callable, MutableMapping, Sequence, TypeVar, Union
import numpy as np import numpy as np
import tomli
import tomli_w
from scgenerator import io from scgenerator import io
from scgenerator.const import PARAM_FN, PARAM_SEPARATOR, SPEC1_FN, Z_FN from scgenerator.const import PARAM_FN, PARAM_SEPARATOR, SPEC1_FN
from scgenerator.logger import get_logger from scgenerator.logger import get_logger
T_ = TypeVar("T_") T_ = TypeVar("T_")
@@ -111,15 +109,15 @@ def load_toml(descr: os.PathLike) -> dict[str, Any]:
if ":" in descr: if ":" in descr:
path, entry = descr.split(":", 1) path, entry = descr.split(":", 1)
with open(path, "rb") as file: with open(path, "rb") as file:
return tomli.load(file)[entry] return tomllib.load(file)[entry]
else: else:
with open(descr, "rb") as file: with open(descr, "rb") as file:
return tomli.load(file) return tomllib.load(file)
def load_flat(descr: os.PathLike) -> dict[str, Any]: def load_flat(descr: os.PathLike) -> dict[str, Any]:
with open(descr, "rb") as file: with open(descr, "rb") as file:
d = tomli.load(file) d = tomllib.load(file)
if "Fiber" in d: if "Fiber" in d:
for fib in d["Fiber"]: for fib in d["Fiber"]:
for k, v in fib.items(): for k, v in fib.items():
@@ -128,14 +126,6 @@ def load_flat(descr: os.PathLike) -> dict[str, Any]:
return d return d
def save_toml(path: os.PathLike, dico):
"""saves a dictionary into a toml file"""
path = conform_toml_path(path)
with open(path, mode="wb") as file:
tomli_w.dump(dico, file)
return dico
@cache @cache
def load_material_dico(name: str) -> dict[str, Any]: def load_material_dico(name: str) -> dict[str, Any]:
""" """
@@ -277,89 +267,6 @@ def fft_functions(
return np.fft.fft, np.fft.ifft return np.fft.fft, np.fft.ifft
def combine_simulations(path: Path, dest: Path = None):
"""
combines raw simulations into one folder per branch
Parameters
----------
path : Path
source of the simulations (must contain u_xx directories)
dest : Path, optional
if given, moves the simulations to dest, by default None
"""
paths: dict[str, list[Path]] = defaultdict(list)
if dest is None:
dest = path
for p in path.glob("u_*b_*"):
if p.is_dir():
paths[p.name.split()[1]].append(p)
for l in paths.values():
try:
l.sort(key=lambda el: re.search(r"(?<=num )[0-9]+", el.name)[0])
except TypeError:
pass
for pulses in paths.values():
new_path = dest / update_path_name(pulses[0].name)
os.makedirs(new_path, exist_ok=True)
for num, pulse in enumerate(pulses):
params_ok = False
for file in pulse.glob("*"):
if file.name == PARAM_FN:
if not params_ok:
update_params(new_path, file)
params_ok = True
else:
file.unlink()
elif file.name == Z_FN:
file.rename(new_path / file.name)
elif file.name.startswith("spectr") and num == 0:
file.rename(new_path / file.name)
else:
file.rename(new_path / (file.stem + f"_{num}" + file.suffix))
pulse.rmdir()
def update_params(new_path: Path, file: Path):
params = load_toml(file)
if (p := params.get("prev_data_dir")) is not None:
p = Path(p)
params["prev_data_dir"] = str(Path("../..") / p.parent.name / update_path_name(p.name))
params["output_path"] = str(new_path)
save_toml(new_path / PARAM_FN, params)
file.unlink()
def save_parameters(
params: dict[str, Any], destination_dir: Path, file_name: str = PARAM_FN
) -> Path:
"""
saves a parameter dictionary. Note that is does remove some entries, particularly
those that take a lot of space ("t", "w", ...)
Parameters
----------
params : dict[str, Any]
dictionary to save
destination_dir : Path
destination directory
Returns
-------
Path
path to newly created the paramter file
"""
file_path = destination_dir / file_name
os.makedirs(file_path.parent, exist_ok=True)
# save toml of the simulation
with open(file_path, "wb") as file:
tomli_w.dump(params, file)
return file_path
def update_path_name(p: str) -> str: def update_path_name(p: str) -> str:
return re.sub(r"( ?num [0-9]+)|(u_[0-9]+ )", "", p) return re.sub(r"( ?num [0-9]+)|(u_[0-9]+ )", "", p)