Skip to content

Commit

Permalink
resolved comments of bernardo
Browse files Browse the repository at this point in the history
  • Loading branch information
mpvanderschelling committed Nov 12, 2023
1 parent e6160d6 commit 2d03be1
Show file tree
Hide file tree
Showing 4 changed files with 159 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ A reference (:code:`Path`) will be saved to the :attr:`~f3dasm.design.Experiment
└── jobs.pkl
In the :attr:`~f3dasm.design.ExperimentData.output_data`, a reference to the stored object (e.g. :code:`/output_numpy/0.npy`) will be automatically appended to the parameter.
In the output data of the :class:`~f3dasm.design.experimentdata.ExperimentData` object, a reference path (e.g. :code:`/output_numpy/0.npy`) to the stored object will be saved.


:mod:`f3dasm` has built-in storing functions for numpy :class:`~numpy.ndarray`, pandas :class:`~pandas.DataFrame` and xarray :class:`~xarray.DataArray` and :class:`~xarray.Dataset`.
Expand Down
16 changes: 8 additions & 8 deletions src/f3dasm/_src/datageneration/abaqus/abaqus_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
Note
----
I'm assuming that every process could have a different pre-processing,
execution and post-processing function.
execution and post-processing function.
But they are used in that order and only once.
If multiple e.g. pre_process steps need to be taken, then they should
be combined in one function.
be combined in one function.
"""

# Modules
Expand Down Expand Up @@ -41,22 +41,22 @@ def pre_process(
----------
experiment_sample : ExperimentSample
The design to run the data generator on. Will be handled by
the pipeline.
the pipeline.
folder_path : str
Path of the folder where the python script is located
python_file : str
Name of the python file to be executed
function_name : str, optional
Name of the function within the python file to be executed,
by default "main"
by default "main"
name : str, optional
Name of the job, by default "job"
remove_temp_files : bool, optional
Note
----
The python file should create an .inp input-file based on the
information of the experiment sample named <job_number>.inp.
information of the experiment sample named <job_number>.inp.
"""

sim_info = kwargs
Expand Down Expand Up @@ -100,14 +100,14 @@ def post_process(
----------
experiment_sample : ExperimentSample
The design to run the data generator on.
Will be handled by the pipeline.
Will be handled by the pipeline.
folder_path : str
Path of the folder where the python script is located
python_file : str
Name of the python file to be executed
function_name : str, optional
Name of the function within the python file to be executed,
by default "main"
by default "main"
name : str, optional
Name of the job, by default "job"
remove_temp_files : bool, optional
Expand All @@ -116,7 +116,7 @@ def post_process(
Note
----
The post-processing python file should write the results of your
simulation to a pickle file
simulation to a pickle file
with the name: results.pkl. This file will be handled by the pipeline.
"""

Expand Down
49 changes: 39 additions & 10 deletions src/f3dasm/_src/datageneration/functions/adapters/augmentor.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# Standard
from abc import ABC, abstractmethod
from copy import copy
from typing import List
from typing import List, Optional

# Third-party
import autograd.numpy as np
Expand Down Expand Up @@ -136,22 +136,25 @@ class FunctionAugmentor:
"""Combination of Augmentors that can change the input and
output of an objective function
Args:
input_augmentors (List[Augmentor]): list of input augmentors
output_augmentors (List[Augmentor]): list of output augmentors
Parameters
----------
input_augmentors : List[Augmentor]
list of input augmentors
outpu_augmentors : List[Augmentor]
list of output augmentors
"""

def __init__(
self, input_augmentors: List[_Augmentor] = None,
output_augmentors: List[_Augmentor] = None):
self, input_augmentors: Optional[List[_Augmentor]] = None,
output_augmentors: Optional[List[_Augmentor]] = None):
"""Combination of augmentors that can change the input and output of
an objective function
Parameters
----------
input_augmentors, optional
input_augmentors : Optional[List[_Augmentor]]
list of input augmentors, by default None
output_augmentors, optional
output_augmentors: Optional[List[_Augmentor]]
list of output augmentors, by default None
"""
self.input_augmentors = [] if \
Expand Down Expand Up @@ -262,10 +265,36 @@ def augment_reverse_output(self, y: np.ndarray) -> np.ndarray:


def _scale_vector(x: np.ndarray, scale: np.ndarray) -> np.ndarray:
"""Scale a vector x to a given scale"""
"""Scale a vector x to a given scale
Parameters
----------
x : np.ndarray
vector to be scaled
scale : np.ndarray
scale to be scaled towards
Returns
-------
np.ndarray
scaled vector
"""
return (scale[:, 1] - scale[:, 0]) * x + scale[:, 0]


def _descale_vector(x: np.ndarray, scale: np.ndarray) -> np.ndarray:
"""Inverse of the _scale_vector() function"""
"""Inverse of the _scale_vector() function
Parameters
----------
x : np.ndarray
scaled vector
scale : np.ndarray
scale to be scaled towards
Returns
-------
np.ndarray
descaled vector
"""
return (x - scale[:, 0]) / (scale[:, 1] - scale[:, 0])
111 changes: 111 additions & 0 deletions src/f3dasm/_src/experimentdata/_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,59 +42,141 @@
# Storing methods
# =============================================================================


class _Store:
"""Base class for storing and loading output data from disk"""
suffix: int

def __init__(self, object: Any, path: Path):
"""
Protocol class for storing and loading output data from disk
Parameters
----------
object : Any
object to store
path : Path
location to store the object to
"""
self.path = path
self.object = object

def store(self) -> None:
"""
Protocol class for storing objects to disk
Raises
------
NotImplementedError
Raises if the method is not implemented
"""
raise NotImplementedError()

def load(self) -> Any:
"""
Protocol class for loading objects to disk
Returns
-------
Any
The loaded object
Raises
------
NotImplementedError
Raises if the method is not implemented
"""
raise NotImplementedError()


class PickleStore(_Store):
"""Class to store and load objects using the pickle protocol"""
suffix: str = '.pkl'

def store(self) -> None:
"""
Store the object to disk using the pickle protocol
"""
with open(self.path.with_suffix(self.suffix), 'wb') as file:
pickle.dump(self.object, file)

def load(self) -> Any:
"""
Load the object from disk using the pickle protocol
Returns
-------
Any
The loaded object
"""
with open(self.path.with_suffix(self.suffix), 'rb') as file:
return pickle.load(file)


class NumpyStore(_Store):
"""Class to store and load objects using the numpy protocol"""
suffix: int = '.npy'

def store(self) -> None:
"""
Store the object to disk using the numpy protocol
"""
np.save(file=self.path.with_suffix(self.suffix), arr=self.object)

def load(self) -> np.ndarray:
"""
Load the object from disk using the numpy protocol
Returns
-------
np.ndarray
The loaded object
"""
return np.load(file=self.path.with_suffix(self.suffix))


class PandasStore(_Store):
"""Class to store and load objects using the pandas protocol"""
suffix: int = '.csv'

def store(self) -> None:
"""
Store the object to disk using the pandas protocol
"""
self.object.to_csv(self.path.with_suffix(self.suffix))

def load(self) -> pd.DataFrame:
"""
Load the object from disk using the pandas protocol
Returns
-------
pd.DataFrame
The loaded object
"""
return pd.read_csv(self.path.with_suffix(self.suffix))


class XarrayStore(_Store):
"""Class to store and load objects using the xarray protocol"""
suffix: int = '.nc'

def store(self) -> None:
"""
Store the object to disk using the xarray protocol
"""
self.object.to_netcdf(self.path.with_suffix(self.suffix))

def load(self) -> xr.DataArray | xr.Dataset:
"""
Load the object from disk using the xarray protocol
Returns
-------
xr.DataArray | xr.Dataset
The loaded object
"""
return xr.open_dataset(self.path.with_suffix(self.suffix))


Expand All @@ -112,6 +194,35 @@ def load(self) -> xr.DataArray | xr.Dataset:

def load_object(path: Path, experimentdata_directory: Path,
store_method: Type[_Store] = PickleStore) -> Any:
"""
Load an object from disk from a given path and storing method
Parameters
----------
path : Path
path of the object to load
experimentdata_directory : Path
path of the f3dasm project directory
store_method : Type[_Store], optional
storage method protocol, by default PickleStore
Returns
-------
Any
the object loaded from disk
Raises
------
ValueError
Raises if no matching store type is found
Note
----
If no store method is provided, the function will try to find a matching
store type based on the suffix of the item's path. If no matching store
type is found, the function will raise a ValueError. By default, the
function will use the PickleStore protocol to load the object from disk.
"""

_path = experimentdata_directory / path

Expand Down

0 comments on commit 2d03be1

Please sign in to comment.