Skip to content

Commit

Permalink
Some refactorings
Browse files Browse the repository at this point in the history
  • Loading branch information
rnebot committed Apr 19, 2020
1 parent f5bebb9 commit 4f9af3f
Show file tree
Hide file tree
Showing 6 changed files with 33 additions and 25 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ FROM python:3.7.3-slim-stretch
# 2) Create, run and remove on exit, using autogenerated configuration (NO NEED FOR STEPS 3, 4)
# docker run --rm -p 8085:80 magicnexush2020/magic-nis-backend:latest
#
# 3) docker cp nis_docker_naples.conf /app/backend/restful_service/nis_docker_naples.conf
# 3) docker cp nis_docker_naples.conf /app/nexinfosys/restful_service/nis_docker_naples.conf
#
# 4) docker start nis-local && docker logs nis-local -f
#
Expand Down
13 changes: 8 additions & 5 deletions installer_cmd_maker.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,13 @@ def elaborate_pyinstaller_command_line(system, output_name, output_type, config_
output_type_option = ""

cmd = f"""
<Install a Python environment>
<In Windows, install Visual Studio Community>
<Install a Python environment (tested with Anaconda)>
<In Windows, execute the following commands in a special BASH shell, available after installing "gitforwindows">
git clone ...
git clone https://github.com/MAGIC-nexus/nis-backend
<In Windows, execute the following commands in a special BASH shell, available after installing "gitforwindows". Run as Administrator>
<Execute the following two "pip" lines IF it is the first time OR if there is an update in requirements.txt or in "pyinstaller">
<In Windows: conda install -c conda-forge fiona> (the first time)>
pip install -r requirements.txt
pip install pyinstaller
Expand Down Expand Up @@ -108,14 +111,14 @@ def elaborate_pyinstaller_command_line(system, output_name, output_type, config_
<Modify config file. Different in Windows and Linux>
{set_var} MAGIC_NIS_SERVICE_CONFIG_FILE={config_file_name} {lsep}
{set_var} MAGIC_NIS_SERVICE_CONFIG_FILE={config_file_name.replace("/", sep)} {lsep}
dist{sep}{output_name}{sep+output_name if output_type == "onedir" else ""}
"""
return cmd


if __name__ == '__main__':
cfg_file = "/home/rnebot/Dropbox/nis-backend-config/nis_local.conf"
system_type = "linux" # "linux", "windows", "macosx", None (autodetect)
dist_type = "onedir" # "onedir", "onefile"
system_type = "windows" # "linux", "windows", "macosx", None (autodetect)
dist_type = "onefile" # "onedir", "onefile"
print(elaborate_pyinstaller_command_line(system_type, "nis-backend", dist_type, cfg_file))
27 changes: 15 additions & 12 deletions nexinfosys/model_services/workspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,18 +358,7 @@ def obtain_problem_statement(dynamic_scenario_parameters: Dict = None) -> Proble
problem_statement = obtain_problem_statement(dynamic_scenario_parameters)

# Obtain "parameters" Dataset
params_keys = []
params_data = []
for scenario_name, scenario_exp_params in problem_statement.scenarios.items(): # type: str, dict
p = evaluate_parameters_for_scenario(global_parameters, scenario_exp_params)
for k, v in p.items():
params_keys.append((scenario_name, k))
params_data.append(v)

df = pd.DataFrame(params_data,
index=pd.MultiIndex.from_tuples(params_keys, names=["Scenario", "Parameter"]),
columns=["Value"])
datasets["params"] = get_dataset(df, "params", "Parameter values per Scenario")
datasets["params"] = obtain_parameters_dataset(global_parameters, problem_statement)

solver_type_param = glb_idx.get(Parameter.partial_key("NISSolverType"))
solver_type_param = solver_type_param[0]
Expand Down Expand Up @@ -465,6 +454,20 @@ def prepare_model(state) -> NoReturn:
glb_idx.put(fr.key(), fr)


def obtain_parameters_dataset(global_parameters: List[Parameter], problem_statement: ProblemStatement):
params_keys = []
params_data = []
for scenario_name, scenario_exp_params in problem_statement.scenarios.items(): # type: str, dict
p = evaluate_parameters_for_scenario(global_parameters, scenario_exp_params)
for k, v in p.items():
params_keys.append((scenario_name, k))
params_data.append(v)

df = pd.DataFrame(params_data,
index=pd.MultiIndex.from_tuples(params_keys, names=["Scenario", "Parameter"]),
columns=["Value"])
return get_dataset(df, "params", "Parameter values per Scenario")

# #####################################################################################################################
# >>>> INTERACTIVE SESSION <<<<
# #####################################################################################################################
Expand Down
6 changes: 3 additions & 3 deletions nexinfosys/restful_service/service_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1183,13 +1183,13 @@ def query_state_list_results(isess):
for f in dataset_formats],
) for k in datasets
] +
[dict(name="FG",
[dict(name="interfaces_graph",
type="graph",
description="Graph of Interfaces, Quantities; Scales and Exchanges",
formats=[dict(format=f,
url=nis_api_base + F"/isession/rsession/state_query/flow_graph.{f.lower()}")
for f in graph_formats]),
dict(name="PG",
dict(name="processors_graph",
type="graph",
description="Processors and exchanges graph",
formats=[dict(format=f,
Expand All @@ -1203,7 +1203,7 @@ def query_state_list_results(isess):
dict(format=f, url=nis_api_base + F"/isession/rsession/state_query/sankey_graph.{f.lower()}")
for f in ["JSON"]]),
] +
[dict(name="P_GIS",
[dict(name="processors_geolayer",
type="geolayer",
description="Processors",
formats=[
Expand Down
6 changes: 3 additions & 3 deletions nexinfosys/solving/flow_graph_solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -1178,25 +1178,25 @@ def export_solver_data(datasets, data, dynamic_scenario, glb_idx, global_paramet

if not dynamic_scenario:
ds_name = "flow_graph_solution"
ds_flows_name = "flow_graph_edges_matrix"
ds_flows_name = "flow_graph_solution_edges"
ds_indicators_name = "flow_graph_solution_indicators"
df_global_indicators_name = "flow_graph_global_indicators"
ds_benchmarks_name = "flow_graph_solution_benchmarks"
ds_global_benchmarks_name = "flow_graph_solution_global_benchmarks"
ds_stakeholders_name = "benchmarks_and_stakeholders"
else:
ds_name = "dyn_flow_graph_solution"
ds_flows_name = "dyn_flow_graph_edges_matrix"
ds_flows_name = "dyn_flow_graph_solution_edges"
ds_indicators_name = "dyn_flow_graph_solution_indicators"
df_global_indicators_name = "dyn_flow_graph_global_indicators"
ds_benchmarks_name = "dyn_flow_graph_solution_benchmarks"
ds_global_benchmarks_name = "dyn_flow_graph_solution_global_benchmarks"
ds_stakeholders_name = "benchmarks_and_stakeholders"

for d, name, label in [(df, ds_name, "Flow Graph Solver - Interfaces"),
(ds_flow_values, ds_flows_name, "Flow Graph Solver Edges - Interfaces"),
(df_local_indicators, ds_indicators_name, "Flow Graph Solver - Local Indicators"),
(df_global_indicators, df_global_indicators_name, "Flow Graph Solver - Global Indicators"),
(ds_flow_values, ds_flows_name, "Flow Graph Edges Matrix - Interfaces"),
(ds_benchmarks, ds_benchmarks_name, "Flow Graph Solver - Local Benchmarks"),
(ds_global_benchmarks, ds_global_benchmarks_name, "Flow Graph Solver - Global Benchmarks"),
(ds_stakeholders, ds_stakeholders_name, "Benchmarks - Stakeholders")
Expand Down
4 changes: 3 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,12 @@ gunicorn==19.9.0
Cython==0.29.7
# WINDOWS: REMOVE and USE "conda install -c conda-forge python-blosc" (install "gitforwindows", then open terminal using "Run as Administrator")
blosc>=1.8.1
brightway2==2.3
# Commented because no LCA dataset was made available during the lifetime of the project, found no way to integrate with this powerful LCA package
# brightway2==2.3
matplotlib>=3.0.3
#psycopg2==2.7.3.2 # Removed because it requires having PostgreSQL installed. It is explicitly in the Dockerfile
webdavclient==1.0.8
# May be troublesome in generating Windows executable
owlready2==0.23
celery>=4.3.0
pykml==0.1.3
Expand Down

0 comments on commit 4f9af3f

Please sign in to comment.