Skip to content

Commit

Permalink
2.1.4
Browse files Browse the repository at this point in the history
  • Loading branch information
mbaudis committed Jan 31, 2025
1 parent c7ea2c0 commit f6ef6dd
Show file tree
Hide file tree
Showing 92 changed files with 618 additions and 1,246 deletions.
4 changes: 1 addition & 3 deletions beaconplusWeb/env/staging
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
SERVER=https://staging.beaconplus.org/
NEXT_PUBLIC_API_PATH=/
NEXT_PUBLIC_PREFETCH_API_PATH=$SERVER
NEXT_PUBLIC_USE_PROXY=false
NEXT_PUBLIC_SITE_URL=$SERVER
NEXT_PUBLIC_SITE_URL=https://beaconplus.progenetix.org/
5 changes: 4 additions & 1 deletion beaconplusWeb/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion beaconplusWeb/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
"lodash": "^4.17.21",
"match-sorter": "^6.2.0",
"materialize-css": "^1.0.0-rc.2",
"next": "^12.1.0",
"next": "^12.3.4",
"prop-types": "^15.7.2",
"react": "^17.0.2",
"react-chartjs-2": "^5.2.0",
Expand Down
5 changes: 2 additions & 3 deletions bycon/byconServiceLibs/bycon_bundler.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def read_pgx_file(self, filepath):
if line.startswith("#"):
h_lines.append(line)

d_lines, fieldnames = read_tsv_to_dictlist(self.filepath, max_count=0)
d_lines, fieldnames = ByconTSVreader().file_to_dictlist(self.filepath, max_count=0)
self.header = h_lines
self.data = d_lines
self.fieldnames = fieldnames
Expand All @@ -104,7 +104,7 @@ def read_probedata_file(self, filepath):
self.filepath = filepath
self.probedata = []

p_lines, fieldnames = read_tsv_to_dictlist(self.filepath, max_count=0)
p_lines, fieldnames = ByconTSVreader().file_to_dictlist(self.filepath, max_count=0)

p_o = {
"probe_id": False,
Expand Down Expand Up @@ -404,7 +404,6 @@ def __flatten_keyed_bundle(self):
#--------------------------------------------------------------------------#

def __analysisBundleCreateIsets(self, label=""):

# self.dataset_ids = list(set([cs.get("dataset_id", "NA") for cs in self.bundle["analyses"]]))
GB = GenomeBins()
for ds_id in self.datasets_results.keys():
Expand Down
132 changes: 87 additions & 45 deletions bycon/byconServiceLibs/datatable_utils.py
Original file line number Diff line number Diff line change
@@ -1,52 +1,111 @@
import csv, re, requests
# from attrdictionary import AttrDict
import csv, re, requests, sys
from os import path

# bycon
from bycon import RefactoredValues, prdbug, prdlhead, prjsonnice, BYC, BYC_PARS, ENV
from bycon import (
BYC,
BYC_PARS,
RefactoredValues,
prdbug,
prdlhead,
prjsonnice
)

services_lib_path = path.join(path.dirname(path.abspath(__file__)))
sys.path.append(services_lib_path)
from file_utils import ExportFile


################################################################################

def export_datatable_download(flattened_data):
dt_m = BYC["datatable_mappings"]
r_t = BYC.get("response_entity_id", "___none___")
if not r_t in dt_m["$defs"]:
return
sel_pars = BYC_PARS.get("delivery_keys", [])
io_params = dt_m["$defs"][ r_t ]["parameters"]
if len(sel_pars) > 0:
io_params = { k: v for k, v in io_params.items() if k in sel_pars }
prdlhead(f'{r_t}.tsv')
header = create_table_header(io_params)
print("\t".join( header ))

for pgxdoc in flattened_data:
line = [ ]
for par, par_defs in io_params.items():
class ByconDatatableExporter:
def __init__(self, file_type=None):
self.datatable_mappings = BYC.get("datatable_mappings", {"$defs": {}})
self.entity = BYC.get("response_entity_id", "___none___")
if not self.entity in self.datatable_mappings["$defs"]:
# TODO: proper error handling
return

self.file_name = f'{self.entity}.tsv'

sel_pars = BYC_PARS.get("delivery_keys", [])
io_params = self.datatable_mappings["$defs"][self.entity]["parameters"]
if len(sel_pars) > 0:
io_params = { k: v for k, v in io_params.items() if k in sel_pars }

self.io_params = io_params


# -------------------------------------------------------------------------#
# ----------------------------- public ------------------------------------#
# -------------------------------------------------------------------------#

def stream_datatable(self, flattened_data):
prdlhead(self.file_name)
print(f'{self.__create_table_header()}\n')
for pgxdoc in flattened_data:
print(f'{self.__table_line_from_pgxdoc(pgxdoc)}\n')
exit()


# -------------------------------------------------------------------------#

def export_datatable(self, flattened_data):
if not (table_file := ExportFile().check_outputfile_path()):
return

t_f = open(table_file, "w")
t_f.write(f'{self.__create_table_header()}\n')
for pgxdoc in flattened_data:
t_f.write(f'{self.__table_line_from_pgxdoc(pgxdoc)}\n')
t_f.close()
exit()


# -------------------------------------------------------------------------#
# ---------------------------- private ------------------------------------#
# -------------------------------------------------------------------------#

def __table_line_from_pgxdoc(self, pgxdoc):
line = []
for par, par_defs in self.io_params.items():
parameter_type = par_defs.get("type", "string")
db_key = par_defs.get("db_key", "___undefined___")
v = get_nested_value(pgxdoc, db_key)
# TODO !!! This does not handle the object exports properly !!!
if isinstance(v, list):
line.append("&&".join(map(str, (v))))
else:
line.append(str(v))
print("\t".join( line ))
line.append(RefactoredValues(par_defs).strVal(v))
return "\t".join( line )

exit()
# -------------------------------------------------------------------------#

def __create_table_header(self):
"""
"""
header_labs = [ ]
for par, par_defs in self.io_params.items():
pres = par_defs.get("prefix_split", {})
if len(pres.keys()) < 1:
header_labs.append( par )
continue
for pre in pres.keys():
header_labs.append( par+"_id"+"___"+pre )
header_labs.append( par+"_label"+"___"+pre )

return "\t".join(header_labs)


################################################################################
################################################################################
################################################################################

def import_datatable_dict_line(parent, fieldnames, lineobj, primary_scope="biosample"):
dt_m = BYC["datatable_mappings"]
if not primary_scope in dt_m["$defs"]:
return
io_params = dt_m["$defs"][ primary_scope ]["parameters"]
def_params = create_table_header(io_params)
for f_n in fieldnames:
if "#"in f_n:
continue
if f_n not in def_params:
if f_n not in io_params.keys():
continue
if not (par_defs := io_params.get(f_n, {})):
continue
Expand Down Expand Up @@ -81,23 +140,6 @@ def import_datatable_dict_line(parent, fieldnames, lineobj, primary_scope="biosa

return parent

################################################################################

def create_table_header(io_params):
"""
"""
header_labs = [ ]
for par, par_defs in io_params.items():
pres = par_defs.get("prefix_split", {})
if len(pres.keys()) < 1:
header_labs.append( par )
continue
for pre in pres.keys():
header_labs.append( par+"_id"+"___"+pre )
header_labs.append( par+"_label"+"___"+pre )

return header_labs


################################################################################

Expand Down
24 changes: 21 additions & 3 deletions bycon/byconServiceLibs/export_file_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@
from bycon import (
BYC,
BYC_PARS,
DB_MONGOHOST,
ByconVariant,
DB_MONGOHOST,
ENV,
prdbug,
return_paginated_list,
select_this_server,
Expand All @@ -17,8 +18,7 @@
services_lib_path = path.join( path.dirname( path.abspath(__file__) ) )
sys.path.append( services_lib_path )
from interval_utils import GenomeBins
from service_helpers import open_text_streaming, close_text_streaming
from datatable_utils import assign_nested_value, get_nested_value
from datatable_utils import get_nested_value

################################################################################

Expand Down Expand Up @@ -491,3 +491,21 @@ def export_vcf_download(datasets_results, ds_id):

close_text_streaming()


################################################################################

def open_text_streaming(filename="data.pgxseg"):
if not "___shell___" in ENV:
print('Content-Type: text/plain')
if not "browser" in filename:
print('Content-Disposition: attachment; filename="{}"'.format(filename))
print('status: 200')
print()


################################################################################

def close_text_streaming():
print()
prdbug(f'... closing text streaming at {datetime.datetime.now().strftime("%H:%M:%S")}')
exit()
Loading

0 comments on commit f6ef6dd

Please sign in to comment.