Skip to content

Commit e309f4f

Browse files
committed
Add 'TimeSeriesReader' for stream input
* Fix LEMS path to cell parsing, to implicitly define fractionAlong = 0.5 when even the segment is not specified
1 parent b666d1c commit e309f4f

13 files changed

+826
-173
lines changed

CHANGELOG.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
Latest
22

3+
- Add experimental extension to NeuroML: 'TimeSeriesReader' for streaming time series data from external sources, into the simulation. Access to these time series is through VariableRequirements, for now.
34
- Add experimental extension to LEMS: 'VariableRequirement' for CoreNEURON-style POINTERs, which is accessible through the experimental "EdenCustomSetup" file format
45
- Add experimental file format for direct, per-instance customization of model parameters
56
- Eliminate LEMS constants from the model's memory footprint

VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
0.2.3a1
1+
0.2.3a3

eden/Common.h

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -133,6 +133,9 @@ static inline std::string accurate_string( double val ){
133133
return tmps;
134134
}
135135

136+
// Split URL into scheme and auth+path, if scheme is present (otherwise it's a "URL reference")
137+
bool GetUrlScheme(const std::string &url, std::string &scheme, std::string &auth_path);
138+
136139
// Tokenize a string, as with String.split() in string-capable languages
137140
std::vector<std::string> string_split(const std::string& str, const std::string& delim);
138141

eden/Eden.cpp

Lines changed: 424 additions & 76 deletions
Large diffs are not rendered by default.

eden/NeuroML.cpp

Lines changed: 159 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -133,6 +133,7 @@ std::string GetRelativeFilePath(const std::string &origin_path, const std::strin
133133
// For each token, split by slash (may contain backslashes)
134134
for( const auto token : tokens ){
135135
// For each sub-token, split by backslash
136+
// TODO for windows only?
136137
for( const auto new_token : string_split( token, "\\") ){
137138
new_tokens.push_back(new_token);
138139
}
@@ -1052,6 +1053,13 @@ bool Model::GetLemsQuantityPathType(const Network &net, const Simulation::LemsQu
10521053

10531054
return GetLemsQuantityPathType_InputInstance(path.input, input, type, dimension);
10541055
}
1056+
// experimental extensions
1057+
else if(path.type == Simulation::LemsQuantityPath::DATAREADER){
1058+
const Network::TimeSeriesReader &reader = net.data_readers.get(path.reader.read_seq);
1059+
const Network::TimeSeriesReader::InputColumn &column = reader.columns.get(path.reader.colu_seq);
1060+
type = NamespaceThing::STATE; dimension = column.dimension;
1061+
return true;
1062+
}
10551063
else if(path.type == Simulation::LemsQuantityPath::NETWORK){
10561064
// not supported yet!
10571065
return false;
@@ -1217,6 +1225,11 @@ bool Model::LemsQuantityPathToString(const Network &net, const Simulation::LemsQ
12171225
const InputSource &input = input_sources.get(list.component);
12181226
return LemsQuantityPathToString(input, path.input, ret);
12191227
}
1228+
// experimental extensions
1229+
else if(type == Path::DATAREADER){
1230+
ret += net.data_readers.getName(path.reader.read_seq) + ("["+accurate_string(path.reader.inst_seq)+"]/") + net.data_readers.get(path.reader.read_seq).columns.getName(path.reader.colu_seq);
1231+
return true;
1232+
}
12201233
else{
12211234
printf("path to string: type %d not supported yet\n", (int)type);
12221235
return false;
@@ -1372,6 +1385,7 @@ bool Model::ParseLemsSegmentLocator(const ILogProxy &log, const std::vector<std:
13721385
if( morph.segments.size() == 1 ){
13731386
// set it to default
13741387
segment_id = 0;
1388+
path.fractionAlong = 0.5;
13751389
}
13761390
else{
13771391
std::string complaint = "target path needs segment ID, because cell has multiple segments. Setting to implicit default: segment ID = 0";
@@ -1382,6 +1396,7 @@ bool Model::ParseLemsSegmentLocator(const ILogProxy &log, const std::vector<std:
13821396
// TODO perhaps stop complaining after warning too many times
13831397

13841398
segment_id = 0;
1399+
path.fractionAlong = 0.5;
13851400
}
13861401
}
13871402

@@ -1863,6 +1878,42 @@ bool Model::ParseLemsQuantityPath(const ILogProxy &log, const char *qty_str, con
18631878
return ParseLemsQuantityPath_SynapticComponent(log, syn, tokens, path.synapse, tokens_consumed);
18641879
// FIXME record a synapse to make sure it works
18651880
}
1881+
// experimental extensions
1882+
else if( (group_seq = net.data_readers.get_id(sId)) >= 0 ){
1883+
path.type = Simulation::LemsQuantityPath::DATAREADER;
1884+
const Network::TimeSeriesReader &reader = net.data_readers.get(group_seq);
1885+
1886+
path.reader.read_seq = group_seq;
1887+
if(!ParseLemsGroupLocator(log, tokens, "data reader", net.data_readers, [](const auto &reader, Int id){return ( 0 <= id && id < reader.instances ) ? id : -1 ;}, path.reader.read_seq, path.reader.inst_seq, tokens_consumed)) return false;
1888+
// TODO add shortcut without group locator for single instance?
1889+
1890+
// now branch according to property type
1891+
if(!(tokens_consumed+1 <= (Int)tokens.size())){
1892+
// if the elements have just one property, its name can be skipped
1893+
if(reader.columns.size() == 1){
1894+
path.reader.colu_seq = 0;
1895+
return true;
1896+
}
1897+
else{
1898+
log.error("incomplete path for datareader element");
1899+
return false;
1900+
}
1901+
}
1902+
else{
1903+
const char *sProp = tokens[tokens_consumed].c_str(); tokens_consumed++;
1904+
if(!(tokens_consumed == (Int)tokens.size())){
1905+
log.error("path for datareader element too large");
1906+
return false;
1907+
}
1908+
1909+
path.reader.colu_seq = reader.columns.get_id(sProp);
1910+
if(path.reader.colu_seq < 0){
1911+
log.error("property %s not found in datareader %s", sProp, net.data_readers.getName(group_seq));
1912+
return false;
1913+
}
1914+
return true;
1915+
}
1916+
}
18661917
// parse them for upcoming refs
18671918
// and get type!
18681919
// FIXME fractionAlong for transmission !
@@ -2514,6 +2565,24 @@ bool ParseAcrossSegOrSegGroup(const ImportLogger &log, const pugi::xml_node &eAp
25142565
}
25152566
}
25162567

2568+
// get the scaling factor that applies, compared to native units, for that unit name
2569+
bool ValidateGetUnits(const ILogProxy &log, const DimensionSet &dimensions, const Dimension &dimension, const char *unit_name, LemsUnit &units){
2570+
if(!dimensions.Has(dimension)){
2571+
log.error("there are no specified %s units", dimensions.Stringify(dimension).c_str() );
2572+
return false;
2573+
}
2574+
for( auto scale : dimensions.GetUnits(dimension) ){
2575+
if(strcmp(unit_name, scale.name.c_str()) == 0){
2576+
units = scale;
2577+
return true;
2578+
}
2579+
}
2580+
//unit name not found in list!
2581+
std::string known_list; for( auto scale : dimensions.GetUnits(dimension) ) known_list += " ", known_list += scale.name;
2582+
log.error("unknown units: %s for %s (supported:%s)", unit_name, dimensions.Stringify(dimension).c_str(), known_list.c_str() );
2583+
return false;
2584+
}
2585+
25172586
//NeuroML physical quantities consist of a numeric, along with an unit name (such as meter, kilometer, etc.) qualifying the quantity the numeric represents. So NeuroML reader code has to check the unit name, to properly read the quantity.
25182587
template<typename UnitType>
25192588
bool ParseQuantity(const ImportLogger &log, const pugi::xml_node &eLocation, const char *attr_name, Real &num){
@@ -2599,26 +2668,29 @@ bool ParseLemsQuantity(const ImportLogger &log, const pugi::xml_node &eLocation,
25992668
return false;
26002669
}
26012670
//then get the scaling factor that applies, compared to native units, for that unit name
2602-
if(!dimensions.Has(dimension)){
2603-
log.error(eLocation, "there are no specified %s units for attribute %s", dimensions.Stringify(dimension).c_str(), attr_name );
2671+
LemsUnit scale = dimensions.GetNative(Dimension::Unity());
2672+
if(!ValidateGetUnits(LogWithElement(log,eLocation), dimensions, dimension, unit_name, scale)) return false;
2673+
num = scale.ConvertTo( pure_number, dimensions.GetNative(dimension) );
2674+
//printf("valll %f\n",num);
2675+
return true;
2676+
}
2677+
2678+
bool ParseDimensionAttribute(const ImportLogger &log, const pugi::xml_node &eTag, const DimensionSet &dimensions, Dimension &dimension, const char *sAttributeName = "dimension"){
2679+
auto sDimension = eTag.attribute(sAttributeName).value();
2680+
if(!*sDimension){
2681+
log.error(eTag, "%s attribute missing", sAttributeName); // TODO RequiredAttribute ?
26042682
return false;
26052683
}
2606-
for( auto scale : dimensions.GetUnits(dimension) ){
2607-
if(strcmp(unit_name, scale.name.c_str()) == 0){
2608-
2609-
num = scale.ConvertTo( pure_number, dimensions.GetNative(dimension) );
2610-
//printf("valll %f\n",num);
2611-
return true;
2612-
}
2613-
}
2614-
//unit name not found in list!
2615-
std::string known_list; for( auto scale : dimensions.GetUnits(dimension) ) known_list += " ", known_list += scale.name;
2616-
log.error(eLocation, "unknown %s attribute units: %s for %s (supported:%s)", attr_name, unit_name, dimensions.Stringify(dimension).c_str(), known_list.c_str() );
2617-
return false;
26182684

2685+
// find the dimension
2686+
if( !dimensions.Has(sDimension) ){
2687+
log.error(eTag, "unknown dimension %s", sDimension);
2688+
return false;
2689+
}
2690+
dimension = dimensions.Get(sDimension);
2691+
return true;
26192692
}
26202693

2621-
26222694
template<typename UnitType>
26232695
bool ParseValueAcrossSegOrSegGroup(const ImportLogger &log, const pugi::xml_node &eAppliedOn, const char *attr_name, const Morphology &morph , ValueAcrossSegOrSegGroup &applied_on){
26242696

@@ -5844,6 +5916,74 @@ struct ImportState{
58445916
}
58455917
net.input_lists.add(list, list_name);
58465918
}
5919+
// extensions!
5920+
else if(strcmp(eNetEl.name(), "EdenTimeSeriesReader") == 0){
5921+
const auto &eRea = eNetEl;
5922+
5923+
auto name = RequiredNmlId(log, eRea);
5924+
if(!name) return false;
5925+
if(net.data_readers.has(name)){
5926+
log.error(eRea, "%s %s already defined", eRea.name(), name);
5927+
return false;
5928+
}
5929+
5930+
Network::TimeSeriesReader reader;
5931+
5932+
const char *sHref = RequiredAttribute( log, eRea, "href");
5933+
if(!sHref) return false;
5934+
auto &url = reader.source_url;
5935+
url = sHref; // NB validate on backend, until the url format is standardized LATER (or even better, leave it up to the backend?)
5936+
5937+
// XXX if it's a file url, much like the current behaviour of OutputFile, it's relative to the cwd of the program, not the file being parsed! TODO specify a way to selecthow to behave, or at least document -- filename
5938+
// here's a trick: use "no uri scheme" to mean "relative to xml file" and "file://" to mean "relative to working directory", TODO document it.
5939+
std::string scheme, auth_path;
5940+
if(!GetUrlScheme(url, scheme, auth_path)){
5941+
const char *loading_from_file = log.GetFilenameFromElement(eRea);
5942+
url = GetRelativeFilePath((loading_from_file ? loading_from_file : "."), url);
5943+
}
5944+
// else keep the url as is
5945+
5946+
const char *sFormat = RequiredAttribute( log, eRea, "format");
5947+
if(!sFormat) return false;
5948+
reader.data_format = sFormat; // NB validate on backend, until the url format is standardized LATER (or even better, leave it up to the backend?)
5949+
5950+
const char *sInstances = RequiredAttribute( log, eRea, "instances");
5951+
if(!sInstances) return false;
5952+
if(!( StrToL(sInstances, reader.instances) && reader.instances > 0 )){
5953+
log.error(eRea, " \"instances\" must be a positive integer, not %s", sInstances);
5954+
return false;
5955+
}
5956+
5957+
for(const auto &eReaEl : eRea.children()){
5958+
if( strcmp(eReaEl.name(), "InputColumn") == 0 ){
5959+
5960+
auto name = RequiredNmlId(log, eReaEl);
5961+
if(!name) return false;
5962+
if(reader.columns.has(name)){
5963+
log.error(eReaEl, "%s %s already defined in %s", eReaEl.name(), name, eRea.name());
5964+
return false;
5965+
}
5966+
5967+
Network::TimeSeriesReader::InputColumn column = {Dimension::Unity(), dimensions.GetNative(Dimension::Unity())};
5968+
if(!ParseDimensionAttribute(log, eReaEl, dimensions, column.dimension)) return false;
5969+
const char *sUnits = RequiredAttribute( log, eReaEl, "units"); if(!sInstances) return false;
5970+
if(!ValidateGetUnits(LogWithElement(log,eReaEl), dimensions, column.dimension, sUnits, column.units)) return false;
5971+
5972+
reader.columns.add(column, name); // yay!
5973+
}
5974+
else{
5975+
// unknown, ignore
5976+
}
5977+
}
5978+
5979+
// one last consistency check
5980+
if(reader.columns.contents.empty()){
5981+
log.error(eRea, "%s must have one or more <InputColumn>s", eRea.name());
5982+
return false;
5983+
}
5984+
5985+
net.data_readers.add(reader, name); // yay!
5986+
}
58475987
else{
58485988
// unknown, ignore
58495989
}
@@ -6640,7 +6780,7 @@ struct ImportState{
66406780
std::string filename_s = GetRelativeFilePath((loading_from_file ? loading_from_file : "."), sRelFilename);
66416781
const char *filename = filename_s.c_str();
66426782

6643-
std::ifstream fin(filename, std::ios::binary);
6783+
std::ifstream fin(filename, std::ios::binary); // binary mode, to count CRLF as two bytes
66446784
// check if opening a file failed
66456785
if (fin.fail()) {
66466786
log.error(eSimEl, "could not open file \"%s\": %s", filename, strerror(errno));
@@ -6823,11 +6963,11 @@ struct ImportState{
68236963
log.error(lineno, units_token, "there are no specified %s units", dimensions.Stringify(dimension).c_str() );
68246964
return false;
68256965
}
6826-
bool units_ok = false;
6966+
bool units_ok = false; // TODO ValidateGetUnits
68276967
for( auto scale : dimensions.GetUnits(dimension) ){
68286968
if(unit_name == scale.name){
68296969
real_value_units = scale;
6830-
value = scale.ConvertTo( value, dimensions.GetNative(dimension) ); // adjust units right away, if it's a single value. If it's multi, ajust as the values rows are being read.
6970+
value = scale.ConvertTo( value, dimensions.GetNative(dimension) ); // adjust units right away, if it's a single value. If it's multi, adjust as the values rows are being read.
68316971
units_ok = true;
68326972
break;
68336973
}
@@ -8061,18 +8201,7 @@ struct ImportState{
80618201
};
80628202

80638203
auto ParseBaseNamedProperty = [ &dimensions = dimensions ](const ImportLogger &log, const pugi::xml_node &eProp, ComponentType::BaseNamedProperty &prop_record){
8064-
auto dimension = eProp.attribute("dimension").value();
8065-
if(!*dimension){
8066-
log.error(eProp, "dimension attribute missing");
8067-
return false;
8068-
}
8069-
8070-
// find the dimension
8071-
if( !dimensions.Has(dimension) ){
8072-
log.error(eProp, "unknown dimension %s", dimension);
8073-
return false;
8074-
}
8075-
prop_record.dimension = dimensions.Get(dimension);
8204+
if(!ParseDimensionAttribute(log, eProp, dimensions, prop_record.dimension)) return false;
80768205
return true;
80778206
};
80788207

0 commit comments

Comments
 (0)