-
Notifications
You must be signed in to change notification settings - Fork 20
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'master' of github.com:DamCB/tyssue
- Loading branch information
Showing
14 changed files
with
2,247 additions
and
112 deletions.
There are no files selected for viewing
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,28 +1,13 @@ | ||
|
||
from ..json_parser import load_spec | ||
import os | ||
|
||
CURRENT_DIR = os.path.abspath(os.path.dirname(__file__)) | ||
|
||
def core_spec(): | ||
"""Solver default specification for scipy.optimize.minimize gradient | ||
descent | ||
.. code-block:: | ||
{ | ||
'face': { | ||
'is_alive': true, | ||
}, | ||
'edge': { | ||
'face': 0, | ||
'srce': 0, | ||
'trgt': 0 | ||
}, | ||
'vert': { | ||
'is_active': true, | ||
} | ||
} | ||
def bulk_spec(): | ||
""" | ||
Default settings to perform edge subdivisions, | ||
see tyssue.io.point_cloud.py | ||
""" | ||
specfile = os.path.join(CURRENT_DIR, 'core.json') | ||
specfile = os.path.join(CURRENT_DIR, 'bulk.json') | ||
return load_spec(specfile) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
import os | ||
from ..json_parser import load_spec | ||
|
||
|
||
CURRENT_DIR = os.path.abspath(os.path.dirname(__file__)) | ||
|
||
def bulk_spec(): | ||
""" | ||
Default settings to perform edge subdivisions, | ||
see tyssue.io.point_cloud.py | ||
""" | ||
specfile = os.path.join(CURRENT_DIR, 'bulk.json') | ||
return load_spec(specfile) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
|
||
{"start": 0.0, | ||
"stop": 1.0, | ||
"gamma": 1.0, | ||
"density": 4, | ||
"noise": 0, | ||
"scale": 1} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
import pandas as pd | ||
import numpy as np | ||
|
||
|
||
def write_storm_csv(filename, points, | ||
coords=['x', 'y', 'z'], | ||
split_by=None, **csv_args): | ||
""" | ||
Saves a point cloud array in the storm format | ||
""" | ||
columns = ['frame', 'x [nm]', 'y [nm]', 'z [nm]', | ||
'uncertainty_xy', 'uncertainty_z'] | ||
points = points.dropna() | ||
storm_points = pd.DataFrame(np.zeros((points.shape[0], 6)), | ||
columns=columns) | ||
storm_points[['x [nm]', 'y [nm]', 'z [nm]']] = points[coords].values | ||
storm_points['frame'] = 1 | ||
storm_points[['uncertainty_xy', | ||
'uncertainty_z']] = 2.1 | ||
# tab separated values are faster and more portable than excel | ||
if split_by is None: | ||
if not filename.endswith('.csv'): | ||
filename = filename+'.csv' | ||
storm_points.to_csv(filename, **csv_args) | ||
elif split_by in points.columns(): | ||
storm_points[split_by] = points[split_by] | ||
# separated files by the column split_by | ||
storm_points.groupby(split_by).apply( | ||
lambda df:df.to_csv('{}_{}.csv'.format( | ||
filename, df[split_by].iloc[0]), | ||
**csv_args)) |
Empty file.
Oops, something went wrong.