diff --git a/Dockerfile b/Dockerfile index 0bee072..51fc140 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,7 +18,7 @@ FROM python:3.7.3-slim-stretch # 2) Create, run and remove on exit, using autogenerated configuration (NO NEED FOR STEPS 3, 4) # docker run --rm -p 8085:80 magicnexush2020/magic-nis-backend:latest # -# 3) docker cp nis_docker_naples.conf /app/backend/restful_service/nis_docker_naples.conf +# 3) docker cp nis_docker_naples.conf /app/nexinfosys/restful_service/nis_docker_naples.conf # # 4) docker start nis-local && docker logs nis-local -f # diff --git a/installer_cmd_maker.py b/installer_cmd_maker.py index 6c657dd..42b8e67 100644 --- a/installer_cmd_maker.py +++ b/installer_cmd_maker.py @@ -53,10 +53,13 @@ def elaborate_pyinstaller_command_line(system, output_name, output_type, config_ output_type_option = "" cmd = f""" - + + -git clone ... +git clone https://github.com/MAGIC-nexus/nis-backend + + (the first time)> pip install -r requirements.txt pip install pyinstaller @@ -108,7 +111,7 @@ def elaborate_pyinstaller_command_line(system, output_name, output_type, config_ -{set_var} MAGIC_NIS_SERVICE_CONFIG_FILE={config_file_name} {lsep} +{set_var} MAGIC_NIS_SERVICE_CONFIG_FILE={config_file_name.replace("/", sep)} {lsep} dist{sep}{output_name}{sep+output_name if output_type == "onedir" else ""} """ return cmd @@ -116,6 +119,6 @@ def elaborate_pyinstaller_command_line(system, output_name, output_type, config_ if __name__ == '__main__': cfg_file = "/home/rnebot/Dropbox/nis-backend-config/nis_local.conf" - system_type = "linux" # "linux", "windows", "macosx", None (autodetect) - dist_type = "onedir" # "onedir", "onefile" + system_type = "windows" # "linux", "windows", "macosx", None (autodetect) + dist_type = "onefile" # "onedir", "onefile" print(elaborate_pyinstaller_command_line(system_type, "nis-backend", dist_type, cfg_file)) diff --git a/nexinfosys/model_services/workspace.py b/nexinfosys/model_services/workspace.py index 3233cff..5e3e552 100644 --- a/nexinfosys/model_services/workspace.py +++ b/nexinfosys/model_services/workspace.py @@ -358,18 +358,7 @@ def obtain_problem_statement(dynamic_scenario_parameters: Dict = None) -> Proble problem_statement = obtain_problem_statement(dynamic_scenario_parameters) # Obtain "parameters" Dataset - params_keys = [] - params_data = [] - for scenario_name, scenario_exp_params in problem_statement.scenarios.items(): # type: str, dict - p = evaluate_parameters_for_scenario(global_parameters, scenario_exp_params) - for k, v in p.items(): - params_keys.append((scenario_name, k)) - params_data.append(v) - - df = pd.DataFrame(params_data, - index=pd.MultiIndex.from_tuples(params_keys, names=["Scenario", "Parameter"]), - columns=["Value"]) - datasets["params"] = get_dataset(df, "params", "Parameter values per Scenario") + datasets["params"] = obtain_parameters_dataset(global_parameters, problem_statement) solver_type_param = glb_idx.get(Parameter.partial_key("NISSolverType")) solver_type_param = solver_type_param[0] @@ -465,6 +454,20 @@ def prepare_model(state) -> NoReturn: glb_idx.put(fr.key(), fr) +def obtain_parameters_dataset(global_parameters: List[Parameter], problem_statement: ProblemStatement): + params_keys = [] + params_data = [] + for scenario_name, scenario_exp_params in problem_statement.scenarios.items(): # type: str, dict + p = evaluate_parameters_for_scenario(global_parameters, scenario_exp_params) + for k, v in p.items(): + params_keys.append((scenario_name, k)) + params_data.append(v) + + df = pd.DataFrame(params_data, + index=pd.MultiIndex.from_tuples(params_keys, names=["Scenario", "Parameter"]), + columns=["Value"]) + return get_dataset(df, "params", "Parameter values per Scenario") + # ##################################################################################################################### # >>>> INTERACTIVE SESSION <<<< # ##################################################################################################################### diff --git a/nexinfosys/restful_service/service_main.py b/nexinfosys/restful_service/service_main.py index c144e48..7fd45ac 100644 --- a/nexinfosys/restful_service/service_main.py +++ b/nexinfosys/restful_service/service_main.py @@ -1183,13 +1183,13 @@ def query_state_list_results(isess): for f in dataset_formats], ) for k in datasets ] + - [dict(name="FG", + [dict(name="interfaces_graph", type="graph", description="Graph of Interfaces, Quantities; Scales and Exchanges", formats=[dict(format=f, url=nis_api_base + F"/isession/rsession/state_query/flow_graph.{f.lower()}") for f in graph_formats]), - dict(name="PG", + dict(name="processors_graph", type="graph", description="Processors and exchanges graph", formats=[dict(format=f, @@ -1203,7 +1203,7 @@ def query_state_list_results(isess): dict(format=f, url=nis_api_base + F"/isession/rsession/state_query/sankey_graph.{f.lower()}") for f in ["JSON"]]), ] + - [dict(name="P_GIS", + [dict(name="processors_geolayer", type="geolayer", description="Processors", formats=[ diff --git a/nexinfosys/solving/flow_graph_solver.py b/nexinfosys/solving/flow_graph_solver.py index ab2813c..b62537c 100644 --- a/nexinfosys/solving/flow_graph_solver.py +++ b/nexinfosys/solving/flow_graph_solver.py @@ -1178,7 +1178,7 @@ def export_solver_data(datasets, data, dynamic_scenario, glb_idx, global_paramet if not dynamic_scenario: ds_name = "flow_graph_solution" - ds_flows_name = "flow_graph_edges_matrix" + ds_flows_name = "flow_graph_solution_edges" ds_indicators_name = "flow_graph_solution_indicators" df_global_indicators_name = "flow_graph_global_indicators" ds_benchmarks_name = "flow_graph_solution_benchmarks" @@ -1186,7 +1186,7 @@ def export_solver_data(datasets, data, dynamic_scenario, glb_idx, global_paramet ds_stakeholders_name = "benchmarks_and_stakeholders" else: ds_name = "dyn_flow_graph_solution" - ds_flows_name = "dyn_flow_graph_edges_matrix" + ds_flows_name = "dyn_flow_graph_solution_edges" ds_indicators_name = "dyn_flow_graph_solution_indicators" df_global_indicators_name = "dyn_flow_graph_global_indicators" ds_benchmarks_name = "dyn_flow_graph_solution_benchmarks" @@ -1194,9 +1194,9 @@ def export_solver_data(datasets, data, dynamic_scenario, glb_idx, global_paramet ds_stakeholders_name = "benchmarks_and_stakeholders" for d, name, label in [(df, ds_name, "Flow Graph Solver - Interfaces"), + (ds_flow_values, ds_flows_name, "Flow Graph Solver Edges - Interfaces"), (df_local_indicators, ds_indicators_name, "Flow Graph Solver - Local Indicators"), (df_global_indicators, df_global_indicators_name, "Flow Graph Solver - Global Indicators"), - (ds_flow_values, ds_flows_name, "Flow Graph Edges Matrix - Interfaces"), (ds_benchmarks, ds_benchmarks_name, "Flow Graph Solver - Local Benchmarks"), (ds_global_benchmarks, ds_global_benchmarks_name, "Flow Graph Solver - Global Benchmarks"), (ds_stakeholders, ds_stakeholders_name, "Benchmarks - Stakeholders") diff --git a/requirements.txt b/requirements.txt index 201d723..43e5e75 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,10 +10,12 @@ gunicorn==19.9.0 Cython==0.29.7 # WINDOWS: REMOVE and USE "conda install -c conda-forge python-blosc" (install "gitforwindows", then open terminal using "Run as Administrator") blosc>=1.8.1 -brightway2==2.3 +# Commented because no LCA dataset was made available during the lifetime of the project, found no way to integrate with this powerful LCA package +# brightway2==2.3 matplotlib>=3.0.3 #psycopg2==2.7.3.2 # Removed because it requires having PostgreSQL installed. It is explicitly in the Dockerfile webdavclient==1.0.8 +# May be troublesome in generating Windows executable owlready2==0.23 celery>=4.3.0 pykml==0.1.3