Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ewm7922 defect pixel masks #521

Open
wants to merge 5 commits into
base: next
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 17 additions & 18 deletions src/snapred/backend/dao/ingredients/ReductionIngredients.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import List, Optional
from typing import Dict, List, Optional

from pydantic import BaseModel, ConfigDict

Expand All @@ -22,14 +22,12 @@ class ReductionIngredients(BaseModel):
useLiteMode: bool
timestamp: float

pixelGroups: List[PixelGroup]
# Changed to dict keyed by group ID
pixelGroups: Dict[int, PixelGroup]
unmaskedPixelGroups: List[PixelGroup]

# these should come from calibration / normalization records
# But will not exist if we proceed without calibration / normalization
# NOTE: These are peaks for normalization, and thus should use the
# Calibrant Sample for the Normalization
detectorPeaksMany: Optional[List[List[GroupPeakList]]] = None
# Peaks now stored in a dict keyed by group ID
detectorPeaksMany: Optional[Dict[int, List[GroupPeakList]]] = None
smoothingParameter: Optional[float]
calibrantSamplePath: Optional[str]
peakIntensityThreshold: Optional[float]
Expand All @@ -45,29 +43,30 @@ def preprocess(self) -> PreprocessReductionIngredients:
# At present, `PreprocessReductionIngredients` has no required parameters.
return PreprocessReductionIngredients()

def getDetectorPeaks(self, groupingIndex: int) -> List[GroupPeakList]:
# Consider renaming `groupingIndex` to `groupID` for clarity.
def getDetectorPeaks(self, groupID: int) -> Optional[List[GroupPeakList]]:
if self.detectorPeaksMany is None:
return None
return self.detectorPeaksMany[groupingIndex]
return self.detectorPeaksMany.get(groupID)

def groupProcessing(self, groupingIndex: int) -> ReductionGroupProcessingIngredients:
return ReductionGroupProcessingIngredients(pixelGroup=self.pixelGroups[groupingIndex])
def groupProcessing(self, groupID: int) -> ReductionGroupProcessingIngredients:
return ReductionGroupProcessingIngredients(pixelGroup=self.pixelGroups[groupID])

def generateFocussedVanadium(self, groupingIndex: int) -> GenerateFocussedVanadiumIngredients:
def generateFocussedVanadium(self, groupID: int) -> GenerateFocussedVanadiumIngredients:
return GenerateFocussedVanadiumIngredients(
smoothingParameter=self.smoothingParameter,
pixelGroup=self.pixelGroups[groupingIndex],
detectorPeaks=self.getDetectorPeaks(groupingIndex),
pixelGroup=self.pixelGroups[groupID],
detectorPeaks=self.getDetectorPeaks(groupID),
artificialNormalizationIngredients=self.artificialNormalizationIngredients,
)

def applyNormalization(self, groupingIndex: int) -> ApplyNormalizationIngredients:
def applyNormalization(self, groupID: int) -> ApplyNormalizationIngredients:
return ApplyNormalizationIngredients(
pixelGroup=self.pixelGroups[groupingIndex],
pixelGroup=self.pixelGroups[groupID],
)

def effectiveInstrument(self, groupingIndex: int) -> EffectiveInstrumentIngredients:
return EffectiveInstrumentIngredients(unmaskedPixelGroup=self.unmaskedPixelGroups[groupingIndex])
def effectiveInstrument(self, groupID: int) -> EffectiveInstrumentIngredients:
return EffectiveInstrumentIngredients(unmaskedPixelGroup=self.unmaskedPixelGroups[groupID])

model_config = ConfigDict(
extra="forbid",
Expand Down
95 changes: 64 additions & 31 deletions src/snapred/backend/recipe/ReductionRecipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,10 @@ def unbagGroceries(self, groceries: Dict[str, Any]):
self.groceries = groceries.copy()
self.sampleWs = groceries["inputWorkspace"]
self.normalizationWs = groceries.get("normalizationWorkspace", "")
self.maskWs = groceries.get("combinedMask", "")
self.groupingWorkspaces = groceries["groupingWorkspaces"]
self.maskWs = groceries.get("combinedPixelMask", "")
self.groupingWorkspaces = {
groupIndex: workspaceName for groupIndex, workspaceName in enumerate(groceries["groupingWorkspaces"])
}

def _cloneWorkspace(self, inputWorkspace: str, outputWorkspace: str) -> str:
self.mantidSnapper.CloneWorkspace(
Expand All @@ -81,7 +83,7 @@ def _cloneIntermediateWorkspace(self, inputWorkspace: str, outputWorkspace: str)
"Cloning workspace...", InputWorkspace=inputWorkspace, OutputWorkspace=outputWorkspace
)
self.mantidSnapper.executeQueue()
return inputWorkspace
return outputWorkspace

def _deleteWorkspace(self, workspace: str):
self.mantidSnapper.DeleteWorkspace(
Expand Down Expand Up @@ -155,12 +157,16 @@ def _applyRecipe(self, recipe: Type[Recipe], ingredients_, **kwargs):
recipe().cook(ingredients_, self.groceries)
else:
raise RuntimeError(
(
f"{recipe.__name__} ::"
" Missing non-default input workspace with groceries:"
f" {self.groceries} and kwargs: {kwargs}"
)
f"{recipe.__name__} :: InputWorkspace '{inputWorkspace}' does not exist in Mantid workspace dictionary." # noqa: E501
)
if "groupingWorkspace" in kwargs:
groupingWorkspace = kwargs["groupingWorkspace"]
if not self.mantidSnapper.mtd.doesExist(groupingWorkspace):
raise RuntimeError(
f"{recipe.__name__} :: GroupingWorkspace '{groupingWorkspace}' does not exist in Mantid workspace dictionary." # noqa: E501
)
self.groceries.update(kwargs)
recipe().cook(ingredients_, self.groceries)

def _getNormalizationWorkspaceName(self, groupingIndex: int):
return f"reduced_normalization_{groupingIndex}_{wnvf.formatTimestamp(self.ingredients.timestamp)}"
Expand All @@ -174,31 +180,63 @@ def _prepGroupingWorkspaces(self, groupingIndex: int):

groupingName = self.ingredients.pixelGroups[groupingIndex].focusGroup.name.lower()
reducedOutputWs = wng.reductionOutput().runNumber(runNumber).group(groupingName).timestamp(timestamp).build()

sampleClone = self._cloneWorkspace(self.sampleWs, reducedOutputWs)
self.groceries["inputWorkspace"] = sampleClone

normalizationClone = None
if self.normalizationWs:
normalizationClone = self._cloneWorkspace(
self.normalizationWs,
self._getNormalizationWorkspaceName(groupingIndex),
)
self.groceries["normalizationWorkspace"] = normalizationClone
return sampleClone, normalizationClone

def _isGroupFullyMasked(self, groupingWorkspace: str) -> bool:
maskWorkspace = self.mantidSnapper.mtd[self.maskWs]
groupWorkspace = self.mantidSnapper.mtd[groupingWorkspace]

totalMaskedPixels = 0
totalGroupPixels = 0
return sampleClone, normalizationClone

for i in range(groupWorkspace.getNumberHistograms()):
group_spectra = groupWorkspace.readY(i)
for spectrumIndex in group_spectra:
if maskWorkspace.readY(int(spectrumIndex))[0] == 1:
totalMaskedPixels += 1
totalGroupPixels += 1
return totalMaskedPixels == totalGroupPixels
def _checkMaskedPixels(self, groupingWorkspace: str) -> bool:
try:
# Extract the focus group name from the grouping workspace name
focusGroupName = groupingWorkspace.split("__")[1].rsplit("_", 1)[0]
except IndexError:
self.logger().error(f"Unexpected groupingWorkspace format: '{groupingWorkspace}'. Skipping this workspace.")
return True # Skip execution for invalid format

# Retrieve the PixelGroup matching the focus group name
pixelGroup = next(
(
group
for group in self.ingredients.pixelGroups
if group.focusGroup.name.lower() == focusGroupName.lower()
),
None,
)
if not pixelGroup:
self.logger().error(f"No matching PixelGroup found for {groupingWorkspace}")
return True

# Check if all pixels in the group are masked
allMasked = all(param.isMasked for param in pixelGroup.pixelGroupingParameters.values())
if allMasked:
self.logger().warning(
f"All pixels in group '{focusGroupName}' (workspace: '{groupingWorkspace}') are masked. "
"Skipping this group for the reduction.\n"
)
return True
return False

def _removeFullyMaskedGroups(self):
keysRemoved = [
groupIndex
for groupIndex, groupingWs in self.groupingWorkspaces.items()
if self.maskWs and self._checkMaskedPixels(groupingWs)
]

for key in keysRemoved:
self.groupingWorkspaces.pop(key)
self.ingredients.pixelGroups.pop(key)
if self.ingredients.detectorPeaksMany is not None:
self.ingredients.detectorPeaksMany.pop(key)

def queueAlgos(self):
pass
Expand All @@ -207,8 +245,8 @@ def execute(self):
data: Dict[str, Any] = {"result": False}

# Retain unfocused data for comparison.
if self.keepUnfocused:
data["unfocusedWS"] = self._prepareUnfocusedData(self.sampleWs, self.maskWs, self.convertUnitsTo)
if self.maskWs:
self._removeFullyMaskedGroups()

# 1. PreprocessReductionRecipe
outputs = []
Expand All @@ -227,16 +265,11 @@ def execute(self):
)
self._cloneIntermediateWorkspace(self.normalizationWs, "normalization_preprocessed")

for groupingIndex, groupingWs in enumerate(self.groupingWorkspaces):
for groupingIndex, groupingWs in self.groupingWorkspaces.items():
self.groceries["groupingWorkspace"] = groupingWs

if self.maskWs and self._isGroupFullyMasked(groupingWs):
if self.maskWs and self._checkMaskedPixels(groupingWs):
# Notify the user of a fully masked group, but continue with the workflow
self.logger().warning(
f"\nAll pixels masked within {groupingWs} schema.\n"
+ "Skipping all algorithm execution for this group.\n"
+ "This will affect future reductions."
)
continue

sampleClone, normalizationClone = self._prepGroupingWorkspaces(groupingIndex)
Expand Down
3 changes: 2 additions & 1 deletion src/snapred/backend/service/ReductionService.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,8 @@ def _createReductionRecord(
calibration=calibration,
normalization=normalization,
pixelGroupingParameters={
pg.focusGroup.name: [pg[gid] for gid in pg.groupIDs] for pg in ingredients.pixelGroups
pg.focusGroup.name: [pg.pixelGroupingParameters[gid] for gid in pg.groupIDs]
for pg in ingredients.pixelGroups.values()
},
workspaceNames=workspaceNames,
)
Expand Down
23 changes: 19 additions & 4 deletions src/snapred/backend/service/SousChef.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,21 +267,36 @@ def prepReductionIngredients(
self, ingredients: FarmFreshIngredients, combinedPixelMask: Optional[WorkspaceName] = None
) -> ReductionIngredients:
ingredients_ = ingredients.model_copy()
# some of the reduction ingredients MUST match those used in the calibration/normalization processes

# Some of the reduction ingredients MUST match those used in the calibration/normalization processes
ingredients_ = self._pullCalibrationRecordFFI(ingredients_)
ingredients_, smoothingParameter, calibrantSamplePath = self._pullNormalizationRecordFFI(ingredients_)
ingredients_.calibrantSamplePath = calibrantSamplePath

# Convert pixelGroups to a dictionary keyed by group ID
pixelGroups_dict = {
i: group for i, group in enumerate(self.prepManyPixelGroups(ingredients_, combinedPixelMask))
}
unmaskedPixelGroups_dict = {i: group for i, group in enumerate(self.prepManyPixelGroups(ingredients_))}

# Convert detectorPeaksMany to a dictionary if it exists
detectorPeaksMany_dict = (
{i: peaks for i, peaks in enumerate(self.prepManyDetectorPeaks(ingredients_))}
if self.prepManyDetectorPeaks(ingredients_)
else None
)

# Create the ReductionIngredients object
return ReductionIngredients(
runNumber=ingredients_.runNumber,
useLiteMode=ingredients_.useLiteMode,
timestamp=ingredients_.timestamp,
pixelGroups=self.prepManyPixelGroups(ingredients_, combinedPixelMask),
unmaskedPixelGroups=self.prepManyPixelGroups(ingredients_),
pixelGroups=pixelGroups_dict,
unmaskedPixelGroups=list(unmaskedPixelGroups_dict.values()),
smoothingParameter=smoothingParameter,
calibrantSamplePath=ingredients_.calibrantSamplePath,
peakIntensityThreshold=self._getThresholdFromCalibrantSample(ingredients_.calibrantSamplePath),
detectorPeaksMany=self.prepManyDetectorPeaks(ingredients_),
detectorPeaksMany=detectorPeaksMany_dict,
keepUnfocused=ingredients_.keepUnfocused,
convertUnitsTo=ingredients_.convertUnitsTo,
)
Expand Down
2 changes: 1 addition & 1 deletion tests/data/snapred-data
Submodule snapred-data updated from 59443a to 819167
Loading