Skip to content

Commit

Permalink
Replace configsuite with pydantic
Browse files Browse the repository at this point in the history
  • Loading branch information
oyvindeide committed Jan 25, 2024
1 parent 0125544 commit dad824c
Show file tree
Hide file tree
Showing 8 changed files with 610 additions and 303 deletions.
Empty file added semeio/_docs_utils/__init__.py
Empty file.
102 changes: 102 additions & 0 deletions semeio/_docs_utils/_json_schema_2_rst.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
from copy import deepcopy


def _insert_ref(schema, defs):
for key, value in schema.items():
if isinstance(value, dict):
_insert_ref(value, defs)
elif isinstance(value, list):
for index, val in enumerate(value.copy()):
if isinstance(val, int):
val = str(val)
if "$ref" in val:
value[index] = defs[val["$ref"]].pop("properties")


def _remove_key(schema, del_key):
schema_copy = schema.copy()
for key, val in schema_copy.items():
if key == del_key:
del schema[key]
elif isinstance(val, dict):
_remove_key(schema[key], del_key)


def _replace_key(schema, old_key, new_key):
schema_copy = schema.copy()
for key, val in schema_copy.items():
if key == old_key:
del schema[key]
schema[new_key] = val
key = new_key
if isinstance(val, dict):
_replace_key(schema[key], old_key, new_key)


def _create_docs(schema: dict):
"""While there are alternatives to implementing something new, most of these
rely on a sphinx directive, which we can not easily use here. There are
also libraries such as jsonschema2rst, but they only document their
command line interface, and the result is not immediately valid rst,
and require some post-processing. If a good alternative is found,
this could be removed."""
schema = deepcopy(schema)
schema.pop("type")
_remove_key(schema, "title")
_replace_key(schema, "anyOf", "must be one of")
_replace_key(schema, "enum", "must be one of")
_replace_key(schema, "allOf", "must be")
_replace_key(schema, "minItems", "minimum length")
_replace_key(schema, "maxItems", "maximum length")
defs = schema.pop("$defs")
required = schema.pop("required", [])
_insert_ref(schema, defs)
docs = _make_documentation(schema.pop("properties"), required=required)
docs = docs.replace(
" **must be one of**:\n\n **type**: string\n\n **type**: null\n\n",
"",
)
return docs


def _make_documentation(
schema,
required=None,
level=0,
preface="",
element_seperator="\n\n",
):
indent = level * 2 * " "
docs = []
required = required if required is not None else []
if isinstance(schema, dict):
for key, val in schema.items():
if key == "default" and not val:
continue
if key == "description":
docs += [indent + val.replace("\n", " ")]
elif key == "examples":
docs += [indent + f".. code-block:: yaml\n{val}\n\n"]
elif isinstance(val, dict):
if key in required and level == 0:
key += "*"
docs += [indent + f"**{key}**:"]
docs += [_make_documentation(val, level=level + 1)]
elif isinstance(val, list):
docs += [indent + f"**{key}**:"]
docs += [_make_documentation(val, level=level + 1)]
else:
docs += [indent + f"**{key}**: {val}"]
elif isinstance(schema, list):
list_docs = []
for element in schema:
list_docs += [
_make_documentation(
element, level=level + 1, preface=" ", element_seperator="\n"
)
]
docs += list_docs
else:
schema = schema if isinstance(schema, str) else str(schema)
docs += [indent + preface + schema]
return element_seperator.join(docs)
11 changes: 11 additions & 0 deletions semeio/forward_models/overburden_timeshift/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from .ots_config import OTSConfig
from .ots_vel_surface import OTSVelSurface
from .ots_res_surface import OTSResSurface
from .ots import ots_run

__all__ = [
"ots_run",
"OTSConfig",
"OTSVelSurface",
"OTSResSurface",
]
39 changes: 13 additions & 26 deletions semeio/forward_models/overburden_timeshift/ots.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,46 +6,33 @@
from itertools import product
from pathlib import Path

import configsuite
import numpy as np
import xtgeo
import yaml
from pydantic import ValidationError
from resdata.gravimetry import ResdataSubsidence
from resdata.grid import Grid
from resdata.resfile import Resdata3DKW, ResdataFile
from scipy.interpolate import CloughTocher2DInterpolator

from semeio._exceptions.exceptions import ConfigurationError
from semeio.forward_models.overburden_timeshift.ots_config import build_schema
from semeio.forward_models.overburden_timeshift.ots_res_surface import OTSResSurface
from semeio.forward_models.overburden_timeshift.ots_vel_surface import OTSVelSurface


def extract_ots_context(configuration):
rstfile_path = Path(f"{configuration.eclbase}.UNRST")
if not rstfile_path.exists():
return []
dates = [d.date() for d in ResdataFile(str(rstfile_path)).dates]
return dates
from semeio.forward_models.overburden_timeshift import (
OTSConfig,
OTSResSurface,
OTSVelSurface,
)


def ots_load_params(input_file):
with open(input_file, "r", encoding="utf-8") as fin:
config = yaml.safe_load(fin)
config = configsuite.ConfigSuite(
config,
build_schema(),
deduce_required=True,
extract_validation_context=extract_ots_context,
)
if not config.valid:
try:
with open(input_file, "r", encoding="utf-8") as fin:
config = OTSConfig(**yaml.safe_load(fin))
except ValidationError as err:
raise ConfigurationError(
f"Invalid configuration for config file: {input_file}",
config.errors,
)

input_data = config.snapshot
return input_data
err,
) from err
return config


def write_surface(vintage_pairs, ts, output_dir, type_str, file_format="irap_binary"):
Expand Down
Loading

0 comments on commit dad824c

Please sign in to comment.