Skip to content

Commit

Permalink
Merge pull request #5 from ornlneutronimaging/4_multiple_set
Browse files Browse the repository at this point in the history
Fixed work. this refs #4
  • Loading branch information
KedoKudo authored Mar 10, 2022
2 parents fef696c + 80456d3 commit 4805cdb
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 4 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -356,3 +356,6 @@ $RECYCLE.BIN/

# End of https://www.toptal.com/developers/gitignore/api/macos,linux,windows,pycharm,vscode,vim,python,jupyternotebooks
.vscode/settings.json

# pycharm
.idea
13 changes: 10 additions & 3 deletions neutronimaging/detector_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,12 @@ def skipping_meta_data(meta: pd.DataFrame) -> pd.DataFrame:
return pd.concat(_with_skips)


def load_images(raw_imamge_dir: str) -> Type[Normalization]:
"""Loading all Images into memory"""
def load_images(raw_imamge_dir: str, nbr_of_duplicated_runs: int = 1) -> Type[Normalization]:
"""Loading all Images into memory
if the nbr_of_duplicated_runs is higher than 1 (default value) that means the MCP produced by
mistake other sets of the same data, those must be removed and not used in the reconstruction
"""
import glob
from neutronimaging.util import in_jupyter

Expand All @@ -81,6 +85,9 @@ def load_images(raw_imamge_dir: str) -> Type[Normalization]:
]
_img_names.sort()

final_index = int(len(_img_names) / nbr_of_duplicated_runs)
_img_names = _img_names[0: final_index]

o_norm.load(file=_img_names, notebook=in_jupyter())
return o_norm

Expand Down Expand Up @@ -131,7 +138,7 @@ def correct_images(
import os

_file_root = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(_file_root, "../data")
test_data_dir = os.path.join(_file_root, "../../../../NeutronImagingScripts/data")
#
shutter_counts_file = os.path.join(test_data_dir, "OB_1_005_ShutterCount.txt")
df_shutter_count = read_shutter_count(shutter_counts_file)
Expand Down
10 changes: 9 additions & 1 deletion scripts/mcp_detector_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,14 @@
output_dir = args["<output_dir>"]
skip_first_last_img = args["--skipimg"]
verbose = args["--verbose"]

# in some rare instances, the MCP creates a duplicate set of the run in the same folder
# we need to only consider the first set in the autoreduction
shutter_count_files = glob.glob(input_dir + "/*_ShutterCount.txt")
nbr_of_duplicated_runs = len(shutter_count_files)
if nbr_of_duplicated_runs > 1:
print(f"The folder contains {nbr_of_duplicated_runs} sets of the same data!")

shutter_count_file = glob.glob(input_dir + "/*_ShutterCount.txt")[0]
shutter_time_file = glob.glob(input_dir + "/*_ShutterTimes.txt")[0]
spectra_file = glob.glob(input_dir + "/*_Spectra.txt")[0]
Expand All @@ -64,7 +72,7 @@

# load images
print("Loading images into memory")
o_norm = load_images(input_dir)
o_norm = load_images(input_dir, nbr_of_duplicated_runs)

# perform image correction
print("Perform correction")
Expand Down

0 comments on commit 4805cdb

Please sign in to comment.