Skip to content

Commit

Permalink
docker minus credentials
Browse files Browse the repository at this point in the history
  • Loading branch information
tathey1 committed May 11, 2023
1 parent 2d23e63 commit 165f52a
Show file tree
Hide file tree
Showing 4 changed files with 71 additions and 53 deletions.
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ RUN pip install -e .
RUN chmod +x ./.aws.sh
RUN ./.aws.sh


CMD python experiments/BrainLine/scripts/soma_detect_image.py

# Old version
# FROM python:3.8-slim
Expand All @@ -28,7 +28,7 @@ RUN ./.aws.sh
# WORKDIR /usr/src/app

# #RUN apt-get update \
# # && apt-get install -y --no-install-recommends git \
# # && apt-get install -y --no-install-recommends git
# # && apt-get purge -y --auto-remove \
# RUN apt-get update
# RUN apt-get install -y --no-install-recommends \
Expand Down
115 changes: 68 additions & 47 deletions brainlit/BrainLine/apply_ilastik.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,8 +495,9 @@ def apply_ilastik_parallel(
corners_chunks = [corners[i : i + 100] for i in range(0, len(corners), 100)]

for corners_chunk in tqdm(corners_chunks, desc="corner chunks"):
Parallel(n_jobs=self.ncpu)(
delayed(self._process_chunk)(
if self.ncpu == 1:
for corner in tqdm(corners_chunk, leave=False):
self._process_chunk(
corner[0],
corner[1],
volume_base_dir,
Expand All @@ -506,8 +507,20 @@ def apply_ilastik_parallel(
self.object_type,
results_dir,
)
for corner in tqdm(corners_chunk, leave=False)
)
else:
Parallel(n_jobs=self.ncpu)(
delayed(self._process_chunk)(
corner[0],
corner[1],
volume_base_dir,
layer_names,
threshold,
data_dir,
self.object_type,
results_dir,
)
for corner in tqdm(corners_chunk, leave=False)
)
for f in os.listdir(data_dir):
os.remove(os.path.join(data_dir, f))

Expand Down Expand Up @@ -566,50 +579,58 @@ def _process_chunk(
fname = f"image_{c1[0]}_{c1[1]}_{c1[2]}.h5"
fname = data_dir / fname

with h5py.File(fname, "w") as f:
dset = f.create_dataset("image_3channel", data=image_3channel)

subprocess.run(
[
f"{self.ilastik_path}",
"--headless",
f"--project={self.ilastik_project}",
fname,
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# subprocess.run(["/Applications/ilastik-1.3.3post3-OSX.app/Contents/ilastik-release/run_ilastik.sh", "--headless", "--project=/Users/thomasathey/Documents/mimlab/mouselight/ailey/benchmark_formal/brain3/matt_benchmark_formal_brain3.ilp", fname], stdout=subprocess.PIPE, stderr=subprocess.PIPE)

fname_prob = str(fname).split(".")[0] + "_Probabilities.h5"
with h5py.File(fname_prob, "r") as f:
pred = f.get("exported_data")
if object_type == "soma":
fname_results = f"image_{c1[0]}_{c1[1]}_{c1[2]}_somas.txt"
fname_results = results_dir / fname_results
pred = pred[0, :, :, :]
mask = pred > threshold
labels = measure.label(mask)
props = measure.regionprops(labels)
for attempt in range(3):
with h5py.File(fname, "w") as f:
dset = f.create_dataset("image_3channel", data=image_3channel)

results = []
for prop in props:
if prop["area"] > area_threshold:
location = list(np.add(c1, prop["centroid"]))
results.append(location)
if len(results) > 0:
with open(fname_results, "w") as f2:
for location in results:
f2.write(str(location))
f2.write("\n")
elif object_type == "axon":
dir_mask = volume_base_dir + "axon_mask"
vol_mask = CloudVolume(
dir_mask, parallel=1, mip=mip, fill_missing=True, compress=False
)
pred = pred[1, :, :, :]
mask = np.array(pred > threshold).astype("uint64")
vol_mask[c1[0] : c2[0], c1[1] : c2[1], c1[2] : c2[2]] = mask
subprocess.run(
[
f"{self.ilastik_path}",
"--headless",
f"--project={self.ilastik_project}",
fname,
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)

fname_prob = str(fname).split(".")[0] + "_Probabilities.h5"
try:
with h5py.File(fname_prob, "r") as f:
pred = f.get("exported_data")
except:
if attempt >= 2:
raise ValueError(f"Tried to evaluate thrice and failed")
if os.path.isfile(fname_prob):
os.remove(fname_prob)
continue

if object_type == "soma":
fname_results = f"image_{c1[0]}_{c1[1]}_{c1[2]}_somas.txt"
fname_results = results_dir / fname_results
pred = pred[0, :, :, :]
mask = pred > threshold
labels = measure.label(mask)
props = measure.regionprops(labels)

results = []
for prop in props:
if prop["area"] > area_threshold:
location = list(np.add(c1, prop["centroid"]))
results.append(location)
if len(results) > 0:
with open(fname_results, "w") as f2:
for location in results:
f2.write(str(location))
f2.write("\n")
elif object_type == "axon":
dir_mask = volume_base_dir + "axon_mask"
vol_mask = CloudVolume(
dir_mask, parallel=1, mip=mip, fill_missing=True, compress=False
)
pred = pred[1, :, :, :]
mask = np.array(pred > threshold).astype("uint64")
vol_mask[c1[0] : c2[0], c1[1] : c2[1], c1[2] : c2[2]] = mask

def collect_soma_results(self, brain_id: str):
"""Combine all soma detections and post to neuroglancer. Intended for use after apply_ilastik_parallel.
Expand Down
3 changes: 0 additions & 3 deletions experiments/BrainLine/scripts/soma_detect_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,6 @@
# Ilastik will run in "headless mode", and the following paths are needed to do so:
ilastik_path = "/brainlit_dir/ilastik-1.4.0-Linux/run_ilastik.sh" # "/Applications/ilastik-1.4.0b21-OSX.app/Contents/ilastik-release/run_ilastik.sh" # "/data/tathey1/matt_wright/ilastik/ilastik-1.4.0rc5-Linux/run_ilastik.sh" # path to ilastik executable
ilastik_project = "/brainlit_dir/experiments/BrainLine/data/models/soma/matt_soma_rabies_pix_3ch.ilp" # "/Users/thomasathey/Documents/mimlab/mouselight/ailey/detection_soma/matt_soma_rabies_pix_3ch.ilp" # "/data/tathey1/matt_wright/ilastik/soma_model/matt_soma_rabies_pix_3ch.ilp" # path to ilastik project
ilastik_path = "/Applications/ilastik-1.4.0b21-OSX.app/Contents/ilastik-release/run_ilastik.sh" # "/data/tathey1/matt_wright/ilastik/ilastik-1.4.0rc5-Linux/run_ilastik.sh" # path to ilastik executable
ilastik_project = "/Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/experiments/BrainLine/data/models/soma/matt_soma_rabies_pix_3ch.ilp" # "/data/tathey1/matt_wright/ilastik/soma_model/matt_soma_rabies_pix_3ch.ilp" # path to ilastik project



min_coords = [544, 1660, -1]
Expand Down
2 changes: 1 addition & 1 deletion experiments/BrainLine/soma_analysis.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -704,7 +704,7 @@
"metadata": {},
"source": [
"```\n",
"python -m cloudreg.scripts.registration -input_s3_path precomputed://s3://smartspim-precomputed-volumes/2023_04_12/MS37/Ch_561 --output_s3_path precomputed://s3://smartspim-precomputed-volumes/2023_04_12/MS37/atlas_to_target --atlas_s3_path https://open-neurodata.s3.amazonaws.com/ara_2016/sagittal_50um/average_50um --parcellation_s3_path https://open-neurodata.s3.amazonaws.com/ara_2016/sagittal_10um/annotation_10um_2017 --atlas_orientation PIR -orientation RAI --rotation 0 0 0 --translation 0 0 0 --fixed_scale 1.07 -log_s3_path precomputed://s3://smartspim-precomputed-volumes/2023_04_12/MS37/atlas_to_target --missing_data_correction True --grid_correction False --bias_correction True --regularization 5000.0 --iterations 3000 --registration_resolution 100\n",
"python -m cloudreg.scripts.registration -input_s3_path precomputed://s3://smartspim-precomputed-volumes/2023_04_14/MS25/Ch_561 --output_s3_path precomputed://s3://smartspim-precomputed-volumes/2023_04_14/MS25/atlas_to_target --atlas_s3_path https://open-neurodata.s3.amazonaws.com/ara_2016/sagittal_50um/average_50um --parcellation_s3_path https://open-neurodata.s3.amazonaws.com/ara_2016/sagittal_10um/annotation_10um_2017 --atlas_orientation PIR -orientation RPI --rotation 0 0 0 --translation 0 0 0 --fixed_scale 1. -log_s3_path precomputed://s3://smartspim-precomputed-volumes/2023_04_14/MS25/atlas_to_target --missing_data_correction True --grid_correction False --bias_correction True --regularization 5000.0 --iterations 3000 --registration_resolution 100\n",
"```"
]
},
Expand Down

0 comments on commit 165f52a

Please sign in to comment.