Skip to content

Commit

Permalink
version 0.3.3rc1 (#127)
Browse files Browse the repository at this point in the history

* remove boost version constrain in environment.yml

* sets ref_effector as optional in model_factory

* fixes #120

* fixes uncaught bug in find_IHs

* refactored solver 

* new topo_changed attribute

* Bug fixes with solver decorators, history refactoring

* reindroduced segment assignement after IH
  • Loading branch information
glyg authored Feb 7, 2019
1 parent ce34b9a commit 51840ab
Show file tree
Hide file tree
Showing 15 changed files with 2,186 additions and 2,112 deletions.
3,997 changes: 1,989 additions & 2,008 deletions doc/bibliography/tyssue.bib

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ channels:
- conda-forge

dependencies:
- boost-cpp 1.68
- boost-cpp
- mpfr
- python>=3.6
- numpy
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
## Thanks to them!
MAJOR = 0
MINOR = 3
MICRO = 2
MICRO = "3rc1"
ISRELEASED = True
VERSION = "%d.%d.%s" % (MAJOR, MINOR, MICRO)

Expand Down
8 changes: 5 additions & 3 deletions tests/topology/test_bulk_topology.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def test_IH_transition():
sheet.sanitize()
datasets = extrude(sheet.datasets, method="translation")

eptm = Epithelium("test_IHt", datasets, bulk_spec())
eptm = Monolayer("test_IHt", datasets, bulk_spec())
BulkGeometry.update_all(eptm)
Nc, Nf, Ne, Nv = eptm.Nc, eptm.Nf, eptm.Ne, eptm.Nv
eptm.settings["threshold_length"] = 1e-3
Expand All @@ -30,7 +30,9 @@ def test_IH_transition():
invalid = eptm.get_invalid()
assert np.alltrue(1 - invalid)
assert np.alltrue(eptm.edge_df["sub_vol"] > 0)
assert len(eptm.face_df[eptm.face_df.segment == "apical"]) == len(eptm.cell_df)
assert (
eptm.face_df[eptm.face_df.segment == "apical"].shape[0] == eptm.cell_df.shape[0]
)


def test_HI_transition():
Expand All @@ -39,7 +41,7 @@ def test_HI_transition():
sheet.sanitize()
datasets = extrude(sheet.datasets, method="translation")

eptm = Epithelium("test_HIt", datasets, bulk_spec())
eptm = Monolayer("test_HIt", datasets, bulk_spec())
BulkGeometry.update_all(eptm)
Nc, Nf, Ne, Nv = eptm.Nc, eptm.Nf, eptm.Ne, eptm.Nv
eptm.settings["threshold_length"] = 1e-3
Expand Down
2 changes: 1 addition & 1 deletion tyssue/collisions/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
from .intersection import self_intersections
from .solvers import solve_collisions, CollidingBoxes
from .solvers import auto_collisions, CollidingBoxes
16 changes: 11 additions & 5 deletions tyssue/collisions/solvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
log = logging.getLogger(__name__)


def solve_collisions(fun):
def auto_collisions(fun):
"""Decorator to solve collisions detections after the
execution of the decorated function.
Expand All @@ -24,6 +24,7 @@ def solve_collisions(fun):

@wraps(fun)
def with_collision_correction(*args, **kwargs):
log.debug("checking for collisions")
eptm, geom = args[:2]
position_buffer = eptm.vert_df[eptm.coords].copy()
res = fun(*args, **kwargs)
Expand All @@ -47,8 +48,10 @@ def solve_bulk_collisions(eptm, position_buffer):
index=sub_sheet.vert_df.index,
columns=sub_sheet.coords,
)
solve_sheet_collisions(sub_sheet, sub_buffer)
eptm.vert_df.loc[pos_idx, eptm.coords] = sub_sheet.vert_df[eptm.coords].values
changed = solve_sheet_collisions(sub_sheet, sub_buffer)
if changed:
eptm.vert_df.loc[pos_idx, eptm.coords] = sub_sheet.vert_df[eptm.coords].values
return changed


def solve_sheet_collisions(sheet, position_buffer):
Expand All @@ -58,7 +61,9 @@ def solve_sheet_collisions(sheet, position_buffer):
log.info("%d intersections were detected", intersecting_edges.shape[0])
shyness = sheet.settings.get("shyness", 1e-10)
boxes = CollidingBoxes(sheet, position_buffer, intersecting_edges)
boxes.solve_collisions(shyness)
success = boxes.solve_collisions(shyness)
return success
return False


class CollidingBoxes:
Expand Down Expand Up @@ -147,7 +152,7 @@ def solve_collisions(self, shyness=1e-10):
plane_found = False

if not plane_found:
return 0
return False

upper_bounds.index.name = "vert"
upper_bounds = (
Expand All @@ -174,6 +179,7 @@ def solve_collisions(self, shyness=1e-10):
self.sheet.vert_df.loc[
corrections.index.values, self.sheet.coords
] = corrections
return True

def _collision_plane(self, face_pair, shyness):

Expand Down
73 changes: 48 additions & 25 deletions tyssue/core/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,38 +41,47 @@ def __init__(self, sheet, extra_cols=None):
extra_cols = defaultdict(list, **extra_cols)

self.sheet = sheet
self.t = 0
self.time_id = 0

self.datasets = {}
self.columns = {}
vcols = sheet.coords + extra_cols["vert"]
self.vcols = _filter_columns(vcols, sheet.vert_df.columns, "vertex")
self.vert_h = sheet.vert_df[self.vcols].reset_index(drop=False)
if not "t" in self.vcols:
self.vert_h["t"] = 0

ecols = ["srce", "trgt", "face"] + extra_cols["edge"]
self.ecols = _filter_columns(ecols, sheet.edge_df.columns, "edge")
self.edge_h = sheet.edge_df[self.ecols].reset_index(drop=False)
if not "t" in self.ecols:
self.edge_h["t"] = 0
if not "time_id" in self.vcols:
self.vert_h["time_id"] = 0
self.datasets["vert"] = self.vert_h
self.columns["vert"] = self.vcols

fcols = extra_cols["face"]
self.fcols = _filter_columns(fcols, sheet.face_df.columns, "face")
self.face_h = sheet.face_df[self.fcols].reset_index(drop=False)
if not "t" in self.fcols:
self.face_h["t"] = 0
self.datasets = {"vert": self.vert_h, "edge": self.edge_h, "face": self.face_h}
self.columns = {"vert": self.vcols, "edge": self.ecols, "face": self.fcols}
if not "time_id" in self.fcols:
self.face_h["time_id"] = 0
self.datasets["face"] = self.face_h
self.columns["face"] = self.fcols

if sheet.cell_df is not None:
ccols = extra_cols["cell"]
self.ccols = _filter_columns(ccols, sheet.cell_df.columns, "cell")
self.cell_h = sheet.cell_df[self.ccols].reset_index(drop=False)
if not "t" in self.ccols:
self.cell_h["t"] = 0
if not "time_id" in self.ccols:
self.cell_h["time_id"] = 0
self.datasets["cell"] = self.cell_h
self.columns["cell"] = self.ccols
extra_cols["edge"].append("cell")

def record(self, to_record=["vert"]):
ecols = ["srce", "trgt", "face"] + extra_cols["edge"]
self.ecols = _filter_columns(ecols, sheet.edge_df.columns, "edge")
self.edge_h = sheet.edge_df[self.ecols].reset_index(drop=False)
if not "time_id" in self.ecols:
self.edge_h["time_id"] = 0
self.datasets["edge"] = self.edge_h
self.columns["edge"] = self.ecols
self.time_index = []
self.time_stamps = []

def record(self, to_record=["vert"], time_stamp=None):
"""Appends a copy of the sheet datasets to the history instance.
Parameters
Expand All @@ -81,19 +90,25 @@ def record(self, to_record=["vert"]):
the datasets from self.sheet to be saved
"""
self.time_id += 1
self.time_index.append(self.time_id)

if time_stamp is not None:
self.time_stamps.append(time_stamp)

self.t += 1
for element in to_record:
hist = self.datasets[element]
cols = self.columns[element]
df = self.sheet.datasets[element][cols].reset_index(drop=False)
if not "t" in cols:
times = pd.Series(np.ones((df.shape[0],), dtype=int) * self.t, name="t")
if not "time_id" in cols:
times = pd.Series(
np.ones((df.shape[0],)) * self.time_id, name="time_id"
)
df = pd.concat([df, times], ignore_index=False, axis=1, sort=False)
hist = pd.concat([hist, df], ignore_index=True, axis=0, sort=False)
self.datasets[element] = hist

def retrieve(self, time):
def retrieve(self, index):
"""Return datasets at time `time`.
If a specific dataset was not recorded at time time, the closest record before that
Expand All @@ -103,14 +118,22 @@ def retrieve(self, time):
for element in self.datasets:
hist = self.datasets[element]
cols = self.columns[element]
df = _retrieve(hist, time)
df = _retrieve(hist, index)
df = df.set_index(element)[cols]
sheet_datasets[element] = df

return sheet_datasets

def __iter__(self):

for t in self.time_index:
sheet = type(self.sheet)(
f"{self.sheet.identifier}_{t:04d}", self.retrieve(t), self.sheet.specs
)
yield t, sheet


def _retrieve(dset, time):
times = dset["t"].values
t = times[times <= time][-1]
return dset[dset["t"] == t]
def _retrieve(dset, time_id):
times = dset["time_id"].values
t = times[times <= time_id][-1]
return dset[dset["time_id"] == t]
41 changes: 41 additions & 0 deletions tyssue/core/monolayer.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
"""Monolayer epithelium objects
"""

import logging
import numpy as np
import pandas as pd

Expand All @@ -7,6 +11,8 @@
from ..generation import extrude, subdivide_faces
from ..geometry.bulk_geometry import BulkGeometry

logger = logging.getLogger(name=__name__)


class Monolayer(Epithelium):
"""
Expand All @@ -19,6 +25,7 @@ def __init__(self, name, datasets, specs=None, coords=None):
self.vert_df["is_active"] = 1
self.cell_df["is_alive"] = 1
self.face_df["is_alive"] = 1
self.reset_topo()
BulkGeometry.update_all(self)

@classmethod
Expand Down Expand Up @@ -87,6 +94,40 @@ def get_sub_sheet(self, segment):
specs = {k: self.specs[k] for k in ["face", "edge", "vert", "settings"]}
return Sheet(self.identifier + segment, datasets, specs)

def guess_vert_segment(self, vert):
"""Infers the vertex segment from its surrounding edges.
"""
v_edges = self.edge_df[self.edge_df["srce"] == vert]
if v_edges.shape[0] == 0:
logger.info("Vertex %d not found", vert)
return
if v_edges.shape[0] == 12:
self.vert_df.loc[vert, ["segment"]] = "lateral"
return
intersect = {"apical", "basal"}.intersection(v_edges["segment"])
if len(intersect) == 2:
logger.info("Segment of vertex %d could not be determined", vert)
self.vert_df.loc[vert, ["segment"]] = "unknown"
elif not intersect:
self.vert_df.loc[vert, ["segment"]] = "lateral"
else: # intersect is {"apical"} or {"basal"}
self.vert_df.loc[vert, ["segment"]], = intersect

def guess_face_segment(self, face):
"""Infers the face segment from its surrounding edges.
"""
face_edges = self.edge_df[self.edge_df.face == face]
if face_edges.shape[0] == 0:
logger.info("face %d not found", face)
v_segments = set(self.vert_df.loc[face_edges["srce"], "segment"])
if len(v_segments) == 2:
self.face_df.loc[face, "segment"] = "lateral"
elif len(v_segments) == 1:
new_segment, = v_segments
self.face_df.loc[face, "segment"] = new_segment


class MonolayerWithLamina(Monolayer):
"""
Expand Down
10 changes: 9 additions & 1 deletion tyssue/core/objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,10 @@ def __init__(self, identifier, datasets, specs=None, coords=None, maxbackup=5):
self.bbox = None
if "is_active" in self.vert_df.columns:
self.active_verts = self.vert_df[self.vert_df.is_active == 1].index
else:
self.active_verts = self.vert_df.index
self.set_bbox()
self.topo_changed = False

@property
def vert_df(self):
Expand Down Expand Up @@ -252,11 +255,13 @@ def update_num_faces(self):
self.cell_df["num_faces"] = self.edge_df.groupby("cell").apply(
lambda df: df["face"].unique().size
)
self.cell_df["num_ridges"] = self.edge_df.cell.value_counts()

def reset_topo(self):
"""Recomputes the number of sides for the faces and the
number of faces for the cells.
"""
log.debug("Resetting topology")
self.update_num_sides()
if "is_active" in self.vert_df.columns:
self.active_verts = self.vert_df[self.vert_df.is_active == 1].index
Expand Down Expand Up @@ -662,8 +667,11 @@ def set_bbox(self, margin=0.0):
)

def reset_index(self):
"""Resets the datasets indices to have continuous indices
"""Resets the datasets to have continuous indices
"""
log.debug("reseting index for %s", self.identifier)
self.topo_changed = True

new_vertidx = pd.Series(
np.arange(self.vert_df.shape[0]), index=self.vert_df.index
)
Expand Down
7 changes: 6 additions & 1 deletion tyssue/dynamics/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,24 @@
from ..utils import to_nd


def model_factory(effectors, ref_effector):
def model_factory(effectors, ref_effector=None):
"""Produces a Model class with the provided effectors.
Parameters
----------
effectors : list of :class:`.effectors.AbstractEffectors` classes.
ref_effector : optional, default None
if passed, will be used for normalization,
by default, the last effector in the list is used
Returns
-------
NewModel : a Model derived class with compute_enregy and compute_gradient
methods
"""
if ref_effector is None:
ref_effector = effectors[-1]

class NewModel:

Expand Down
26 changes: 26 additions & 0 deletions tyssue/solvers/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import logging

log = logging.getLogger(__name__)


class TopologyChangeError(ValueError):
""" Raised when trying to assign values without
the correct length to an epithelium dataset
"""

pass


def set_pos(eptm, geom, pos):
"""Updates the vertex position of the :class:`Epithelium` object.
Assumes that pos is passed as a 1D array to be reshaped as (eptm.Nv, eptm.dim)
"""
log.debug("set pos")
if eptm.topo_changed:
# reset the switch and interupt what we were doing
eptm.topo_changed = False
raise TopologyChangeError
eptm.vert_df.loc[eptm.active_verts, eptm.coords] = pos.reshape((-1, eptm.dim))
geom.update_all(eptm)
Loading

0 comments on commit 51840ab

Please sign in to comment.