diff --git a/doc/conf.py b/doc/conf.py index 422a4c89..4a0a2d13 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -11,11 +11,18 @@ import os from unittest import mock -MOCK_MODULES = ['numpy', 'scipy', 'tables', - 'sapphire', 'sapphire.utils', 'sapphire.storage', - 'sapphire.transformations', 'sapphire.analysis', - 'sapphire.analysis.event_utils', - 'sapphire.analysis.calibration'] +MOCK_MODULES = [ + 'numpy', + 'scipy', + 'tables', + 'sapphire', + 'sapphire.utils', + 'sapphire.storage', + 'sapphire.transformations', + 'sapphire.analysis', + 'sapphire.analysis.event_utils', + 'sapphire.analysis.calibration', +] sys.modules.update((name, mock.MagicMock()) for name in MOCK_MODULES) # The directory that contains settings_develop.py @@ -23,13 +30,14 @@ # Set up the Django settings/environment import django + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "publicdb.settings_develop") django.setup() # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. @@ -42,7 +50,7 @@ source_suffix = '.rst' # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' @@ -62,23 +70,23 @@ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). @@ -86,13 +94,13 @@ # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- @@ -104,17 +112,17 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. @@ -132,69 +140,68 @@ # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'publicdbdoc' + def setup(app): app.add_css_file('hisparc_style.css') + # -- Options for LaTeX output -------------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -'papersize': 'a4', - -# The font size ('10pt', '11pt' or '12pt'). -'pointsize': '11pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', + # The paper size ('letterpaper' or 'a4paper'). + 'papersize': 'a4', + # The font size ('10pt', '11pt' or '12pt'). + 'pointsize': '11pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'publicdb.tex', 'Public Database Documentation', - 'Arne de Laat', 'manual'), + ('index', 'publicdb.tex', 'Public Database Documentation', 'Arne de Laat', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -203,32 +210,29 @@ def setup(app): # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'publicdb', 'Public Database Documentation', - ['Arne de Laat'], 1) -] +man_pages = [('index', 'publicdb', 'Public Database Documentation', ['Arne de Laat'], 1)] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ @@ -237,19 +241,25 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'publicdb', 'Public Database Documentation', - 'Arne de Laat', 'publicdb', 'One line description of project.', - 'Miscellaneous'), + ( + 'index', + 'publicdb', + 'Public Database Documentation', + 'Arne de Laat', + 'publicdb', + 'One line description of project.', + 'Miscellaneous', + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # -- Options for Epub output --------------------------------------------------- @@ -262,37 +272,37 @@ def setup(app): # The language of the text. It defaults to the language option # or en if the language is not set. -#epub_language = '' +# epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. -#epub_scheme = '' +# epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. -#epub_identifier = '' +# epub_identifier = '' # A unique identification for the text. -#epub_uid = '' +# epub_uid = '' # A tuple containing the cover image and cover page html template filenames. -#epub_cover = () +# epub_cover = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. -#epub_pre_files = [] +# epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. -#epub_post_files = [] +# epub_post_files = [] # A list of files that should not be packed into the epub file. -#epub_exclude_files = [] +# epub_exclude_files = [] # The depth of the table of contents in toc.ncx. -#epub_tocdepth = 3 +# epub_tocdepth = 3 # Allow duplicate toc entries. -#epub_tocdup = True +# epub_tocdup = True # -- User-defined options ------------------------------------------------------ diff --git a/provisioning/roles/datastore/templates/datastore-config-server.py b/provisioning/roles/datastore/templates/datastore-config-server.py index efc8f794..cbb1eea4 100644 --- a/provisioning/roles/datastore/templates/datastore-config-server.py +++ b/provisioning/roles/datastore/templates/datastore-config-server.py @@ -21,6 +21,7 @@ CFG_URL = '{{ datastore_config_url }}' RELOAD_PATH = '/tmp/uwsgi-reload.me' + def reload_datastore(): """Load datastore config and reload datastore, if necessary""" @@ -55,8 +56,7 @@ class RequestHandler(SimpleXMLRPCRequestHandler): rpc_paths = ('/RPC2',) # Create server - server = SimpleXMLRPCServer(('{{ datastore_host }}', {{ datastore_port }}), - requestHandler=RequestHandler) + server = SimpleXMLRPCServer(('{{ datastore_host }}', {{datastore_port}}), requestHandler=RequestHandler) server.register_introspection_functions() server.register_function(reload_datastore) diff --git a/provisioning/roles/datastore/templates/writer_app.py b/provisioning/roles/datastore/templates/writer_app.py index ee8d7d45..4f2e37f6 100644 --- a/provisioning/roles/datastore/templates/writer_app.py +++ b/provisioning/roles/datastore/templates/writer_app.py @@ -6,5 +6,5 @@ from writer import writer_app -configfile = ('{{ datastore_path }}config.ini') +configfile = '{{ datastore_path }}config.ini' writer_app.writer(configfile) diff --git a/provisioning/roles/publicdb/templates/settings.py b/provisioning/roles/publicdb/templates/settings.py index 6523f326..33765d4c 100644 --- a/provisioning/roles/publicdb/templates/settings.py +++ b/provisioning/roles/publicdb/templates/settings.py @@ -130,7 +130,6 @@ 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admin', - 'publicdb.inforecords', 'publicdb.histograms', 'publicdb.coincidences', @@ -153,15 +152,13 @@ }, }, 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse' - }, + 'require_debug_false': {'()': 'django.utils.log.RequireDebugFalse'}, }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], - 'class': 'django.utils.log.AdminEmailHandler' + 'class': 'django.utils.log.AdminEmailHandler', }, 'null_handler': { 'class': 'logging.NullHandler', diff --git a/publicdb/analysissessions/models.py b/publicdb/analysissessions/models.py index f3bb174c..04031952 100644 --- a/publicdb/analysissessions/models.py +++ b/publicdb/analysissessions/models.py @@ -114,12 +114,13 @@ def create_session(self): ends=ends, pin=str(self.pin), slug=slugify(self.sid), - title=self.sid) + title=self.sid, + ) session.save() date = self.start_date search_length = datetime.timedelta(weeks=3) enddate = min([self.start_date + search_length, datetime.date.today()]) - while (self.events_created < self.events_to_create and date < enddate): + while self.events_created < self.events_to_create and date < enddate: self.events_created += self.find_coincidence(date, session) date += datetime.timedelta(days=1) if self.events_created <= 0: @@ -139,8 +140,9 @@ def find_coincidence(self, date, session): Then return the number of found coincidences. """ - stations = Station.objects.filter(cluster=self.cluster, - pcs__is_test=False).distinct().values_list('number', flat=True) + stations = ( + Station.objects.filter(cluster=self.cluster, pcs__is_test=False).distinct().values_list('number', flat=True) + ) path = get_esd_data_path(date) if not os.path.isfile(path): @@ -174,19 +176,23 @@ def save_coincidence(self, esd_coincidence, session): event_datetime = gps_to_datetime(trace_timestamp) event_timestamps.append((event_datetime, trace_nanoseconds)) - event = Event(date=event_datetime.date(), - time=event_datetime.time(), - nanoseconds=trace_nanoseconds, - station=station, - pulseheights=event['pulseheights'].tolist(), - integrals=event['integrals'].tolist(), - traces=traces) + event = Event( + date=event_datetime.date(), + time=event_datetime.time(), + nanoseconds=trace_nanoseconds, + station=station, + pulseheights=event['pulseheights'].tolist(), + integrals=event['integrals'].tolist(), + traces=traces, + ) event_objects.append(event) first_timestamp = min(event_timestamps) - coincidence = Coincidence(date=first_timestamp[0].date(), - time=first_timestamp[0].time(), - nanoseconds=first_timestamp[1]) + coincidence = Coincidence( + date=first_timestamp[0].date(), + time=first_timestamp[0].time(), + nanoseconds=first_timestamp[1], + ) coincidence.save() for event in event_objects: diff --git a/publicdb/analysissessions/urls.py b/publicdb/analysissessions/urls.py index 0ffb0cc2..dd83e8a5 100644 --- a/publicdb/analysissessions/urls.py +++ b/publicdb/analysissessions/urls.py @@ -5,11 +5,9 @@ app_name = 'sessions' urlpatterns = [ path('/data/', views.data_display, name="data_display"), - path('request/', views.request_form, name="request"), path('request/validate/', views.validate_request_form, name="validate"), path('request//', views.confirm_request, name="confirm"), - path('get_coincidence/', views.get_coincidence, name="get_coincidence"), path('result/', views.result, name="result"), ] diff --git a/publicdb/analysissessions/views.py b/publicdb/analysissessions/views.py index 5f9848c2..21cfec97 100644 --- a/publicdb/analysissessions/views.py +++ b/publicdb/analysissessions/views.py @@ -73,25 +73,29 @@ def get_events(analyzed_coincidence): events = [] for event in analyzed_coincidence.coincidence.events.all(): try: - config = (Configuration.objects - .filter(summary__station=event.station, summary__date__lte=event.date) - .exclude(gps_latitude=0, gps_longitude=0).latest()) + config = ( + Configuration.objects.filter(summary__station=event.station, summary__date__lte=event.date) + .exclude(gps_latitude=0, gps_longitude=0) + .latest() + ) except Configuration.DoesNotExist: continue timestamp = datetime_to_gps(datetime.combine(event.date, event.time)) - event_dict = dict(timestamp=timestamp, - nanoseconds=event.nanoseconds, - number=event.station.number, - latitude=config.gps_latitude, - longitude=config.gps_longitude, - altitude=config.gps_altitude, - status='on', - detectors=len(event.traces), - traces=event.traces, - pulseheights=event.pulseheights, - integrals=event.integrals, - mips=[ph / 200. if ph > 0 else ph for ph in event.pulseheights]) + event_dict = dict( + timestamp=timestamp, + nanoseconds=event.nanoseconds, + number=event.station.number, + latitude=config.gps_latitude, + longitude=config.gps_longitude, + altitude=config.gps_altitude, + status='on', + detectors=len(event.traces), + traces=event.traces, + pulseheights=event.pulseheights, + integrals=event.integrals, + mips=[ph / 200.0 if ph > 0 else ph for ph in event.pulseheights], + ) events.append(event_dict) return events @@ -99,10 +103,12 @@ def get_events(analyzed_coincidence): def data_json(coincidence, events): """Construct json with data for jSparc to display""" timestamp = datetime_to_gps(datetime.combine(coincidence.coincidence.date, coincidence.coincidence.time)) - data = dict(pk=coincidence.pk, - timestamp=timestamp, - nanoseconds=coincidence.coincidence.nanoseconds, - events=events) + data = dict( + pk=coincidence.pk, + timestamp=timestamp, + nanoseconds=coincidence.coincidence.nanoseconds, + events=events, + ) response = HttpResponse(json.dumps(data), content_type='application/json') response['Access-Control-Allow-Origin'] = '*' return response @@ -130,9 +136,15 @@ def top_lijst(slug): avg_error = np.average(error) wgh_error = avg_error / num_events min_error = min(error) - scores.append({'name': student.name, 'avg_error': avg_error, - 'wgh_error': wgh_error, 'min_error': min_error, - 'num_events': num_events}) + scores.append( + { + 'name': student.name, + 'avg_error': avg_error, + 'wgh_error': wgh_error, + 'min_error': min_error, + 'num_events': num_events, + } + ) return sorted(scores, key=operator.itemgetter('wgh_error')) @@ -205,20 +217,25 @@ def data_display(request, slug): star_map = None # create_star_map(slug, coincidences) scores = top_lijst(slug) - return render(request, 'analysissessions/results.html', - {'energy_histogram': energy_histogram, - 'core_map': core_map, - 'star_map': star_map, - 'scores': scores, - 'slug': slug, - 'session': session}) + return render( + request, + 'analysissessions/results.html', + { + 'energy_histogram': energy_histogram, + 'core_map': core_map, + 'star_map': star_map, + 'scores': scores, + 'slug': slug, + 'session': session, + }, + ) def create_energy_histogram(slug, coincidences): """Create an energy histogram""" energies = [x.log_energy for x in coincidences] - good_energies = [x.log_energy for x in coincidences.filter(error_estimate__lte=100.)] + good_energies = [x.log_energy for x in coincidences.filter(error_estimate__lte=100.0)] v1, bins = np.histogram(energies, bins=np.arange(14, 23, 1)) v2, bins = np.histogram(good_energies, bins=np.arange(14, 23, 1)) @@ -278,17 +295,19 @@ def validate_request_form(request): data = {} data.update(form.cleaned_data) - new_request = SessionRequest(first_name=data['first_name'], - sur_name=data['sur_name'], - email=data['email'], - school=data['school'], - cluster=data['cluster'], - start_date=data['start_date'], - mail_send=False, - session_created=False, - session_pending=True, - events_to_create=data['number_of_events'], - events_created=0) + new_request = SessionRequest( + first_name=data['first_name'], + sur_name=data['sur_name'], + email=data['email'], + school=data['school'], + cluster=data['cluster'], + start_date=data['start_date'], + mail_send=False, + session_created=False, + session_pending=True, + events_to_create=data['number_of_events'], + events_created=0, + ) new_request.generate_url() new_request.save() @@ -304,4 +323,8 @@ def confirm_request(request, url): sessionrequest.pin = randint(1000, 9999) sessionrequest.session_confirmed = True sessionrequest.save() - return render(request, 'analysissessions/confirm.html', {'id': sessionrequest.sid, 'pin': sessionrequest.pin}) + return render( + request, + 'analysissessions/confirm.html', + {'id': sessionrequest.sid, 'pin': sessionrequest.pin}, + ) diff --git a/publicdb/api/datastore.py b/publicdb/api/datastore.py index e4f80205..7715be35 100644 --- a/publicdb/api/datastore.py +++ b/publicdb/api/datastore.py @@ -37,12 +37,12 @@ def retrieve_traces(node, timestamp, nanoseconds, raw=False): event = node.events.read_where(f'(timestamp == {timestamp}) & (nanoseconds == {nanoseconds})')[0] traces_idx = event['traces'] baselines = event['baseline'] - traces_str = [zlib.decompress(node.blobs[trace_idx]).split(',') - for trace_idx in traces_idx if trace_idx != -1] + traces_str = [zlib.decompress(node.blobs[trace_idx]).split(',') for trace_idx in traces_idx if trace_idx != -1] - traces = [[int(value) if baseline == -999 or raw else int(value) - baseline - for value in trace_str if value != ''] - for baseline, trace_str in zip(baselines, traces_str)] + traces = [ + [int(value) if baseline == -999 or raw else int(value) - baseline for value in trace_str if value != ''] + for baseline, trace_str in zip(baselines, traces_str) + ] return traces diff --git a/publicdb/api/urls.py b/publicdb/api/urls.py index a05b3af4..13699de9 100644 --- a/publicdb/api/urls.py +++ b/publicdb/api/urls.py @@ -10,54 +10,120 @@ app_name = 'api' urlpatterns = [ path('', views.man, name="man"), - path('network/status/', views.network_status), - path('stations/', views.stations, name="stations"), path('subclusters/', views.subclusters, name="subclusters"), path('clusters/', views.clusters, name="clusters"), path('countries/', views.countries, name="countries"), - path('subclusters//', views.stations, name="stations"), path('clusters//', views.subclusters, name="subclusters"), path('countries//', views.clusters, name="clusters"), - path('stations/data/', views.stations_with_data, {'type': 'events'}, name="data_stations"), path('stations/data//', views.stations_with_data, {'type': 'events'}, name="data_stations"), - path('stations/data///', views.stations_with_data, {'type': 'events'}, name="data_stations"), + path( + 'stations/data///', + views.stations_with_data, + {'type': 'events'}, + name="data_stations", + ), path('stations/data//', views.stations_with_data, {'type': 'events'}, name="data_stations"), path('stations/weather/', views.stations_with_data, {'type': 'weather'}, name="weather_stations"), - path('stations/weather//', views.stations_with_data, {'type': 'weather'}, name="weather_stations"), - path('stations/weather///', views.stations_with_data, {'type': 'weather'}, name="weather_stations"), - path('stations/weather//', views.stations_with_data, {'type': 'weather'}, name="weather_stations"), + path( + 'stations/weather//', + views.stations_with_data, + {'type': 'weather'}, + name="weather_stations", + ), + path( + 'stations/weather///', + views.stations_with_data, + {'type': 'weather'}, + name="weather_stations", + ), + path( + 'stations/weather//', + views.stations_with_data, + {'type': 'weather'}, + name="weather_stations", + ), path('stations/singles/', views.stations_with_data, {'type': 'singles'}, name="singles_stations"), - path('stations/singles//', views.stations_with_data, {'type': 'singles'}, name="singles_stations"), - path('stations/singles///', views.stations_with_data, {'type': 'singles'}, name="singles_stations"), - path('stations/singles//', views.stations_with_data, {'type': 'singles'}, name="singles_stations"), - + path( + 'stations/singles//', + views.stations_with_data, + {'type': 'singles'}, + name="singles_stations", + ), + path( + 'stations/singles///', + views.stations_with_data, + {'type': 'singles'}, + name="singles_stations", + ), + path( + 'stations/singles//', + views.stations_with_data, + {'type': 'singles'}, + name="singles_stations", + ), path('station//', views.station, name="station"), path('station///', views.station, name="station"), - path('station//data/', views.has_data, {'type': 'events'}, name="has_data"), path('station//data//', views.has_data, {'type': 'events'}, name="has_data"), - path('station//data///', views.has_data, {'type': 'events'}, name="has_data"), + path( + 'station//data///', + views.has_data, + {'type': 'events'}, + name="has_data", + ), path('station//data//', views.has_data, {'type': 'events'}, name="has_data"), path('station//weather/', views.has_data, {'type': 'weather'}, name="has_weather"), - path('station//weather//', views.has_data, {'type': 'weather'}, name="has_weather"), - path('station//weather///', views.has_data, {'type': 'weather'}, name="has_weather"), - path('station//weather//', views.has_data, {'type': 'weather'}, name="has_weather"), + path( + 'station//weather//', + views.has_data, + {'type': 'weather'}, + name="has_weather", + ), + path( + 'station//weather///', + views.has_data, + {'type': 'weather'}, + name="has_weather", + ), + path( + 'station//weather//', + views.has_data, + {'type': 'weather'}, + name="has_weather", + ), path('station//singles/', views.has_data, {'type': 'singles'}, name="has_singles"), - path('station//singles//', views.has_data, {'type': 'singles'}, name="has_singles"), - path('station//singles///', views.has_data, {'type': 'singles'}, name="has_singles"), - path('station//singles//', views.has_data, {'type': 'singles'}, name="has_singles"), + path( + 'station//singles//', + views.has_data, + {'type': 'singles'}, + name="has_singles", + ), + path( + 'station//singles///', + views.has_data, + {'type': 'singles'}, + name="has_singles", + ), + path( + 'station//singles//', + views.has_data, + {'type': 'singles'}, + name="has_singles", + ), path('station//config/', views.config, name="config"), path('station//config//', views.config, name="config"), - path('station//num_events/', views.num_events, name="num_events"), path('station//num_events//', views.num_events, name="num_events"), - path('station//num_events///', views.num_events, name="num_events"), + path( + 'station//num_events///', + views.num_events, + name="num_events", + ), path('station//num_events//', views.num_events, name="num_events"), path('station//num_events///', views.num_events, name="num_events"), - path('station//trace//', views.get_event_traces, name="event_traces"), ] diff --git a/publicdb/api/views.py b/publicdb/api/views.py index 38f1bbc8..eab8ee8e 100644 --- a/publicdb/api/views.py +++ b/publicdb/api/views.py @@ -15,8 +15,7 @@ def json_dict(result): """Create a json HTTPResponse""" - response = HttpResponse(json.dumps(result, sort_keys=True), - content_type='application/json') + response = HttpResponse(json.dumps(result, sort_keys=True), content_type='application/json') response['Access-Control-Allow-Origin'] = '*' return response @@ -59,8 +58,7 @@ def network_status(request): for station in Station.objects.exclude(pcs__type__slug='admin'): status = station_status.get_status(station.number) - stations.append({'number': station.number, - 'status': status}) + stations.append({'number': station.number, 'status': status}) return json_dict(stations) @@ -97,32 +95,50 @@ def station(request, station_number, year=None, month=None, date=None): is_active = Pc.objects.filter(station=station, is_active=True).exists() - scintillators = [{'radius': layout.detector_1_radius, - 'alpha': layout.detector_1_alpha, - 'height': layout.detector_1_height, - 'beta': layout.detector_1_beta}] - scintillators.append({'radius': layout.detector_2_radius, - 'alpha': layout.detector_2_alpha, - 'height': layout.detector_2_height, - 'beta': layout.detector_2_beta}) + scintillators = [ + { + 'radius': layout.detector_1_radius, + 'alpha': layout.detector_1_alpha, + 'height': layout.detector_1_height, + 'beta': layout.detector_1_beta, + } + ] + scintillators.append( + { + 'radius': layout.detector_2_radius, + 'alpha': layout.detector_2_alpha, + 'height': layout.detector_2_height, + 'beta': layout.detector_2_beta, + } + ) if station.number_of_detectors() == 4: - scintillators.append({'radius': layout.detector_3_radius, - 'alpha': layout.detector_3_alpha, - 'height': layout.detector_3_height, - 'beta': layout.detector_3_beta}) - scintillators.append({'radius': layout.detector_4_radius, - 'alpha': layout.detector_4_alpha, - 'height': layout.detector_4_height, - 'beta': layout.detector_4_beta}) - - station_info = {'number': station.number, - 'name': station.name, - 'subcluster': station.cluster.name, - 'cluster': station.cluster.main_cluster(), - 'country': station.cluster.country.name, - 'active': is_active, - 'scintillators': scintillators} + scintillators.append( + { + 'radius': layout.detector_3_radius, + 'alpha': layout.detector_3_alpha, + 'height': layout.detector_3_height, + 'beta': layout.detector_3_beta, + } + ) + scintillators.append( + { + 'radius': layout.detector_4_radius, + 'alpha': layout.detector_4_alpha, + 'height': layout.detector_4_height, + 'beta': layout.detector_4_beta, + } + ) + + station_info = { + 'number': station.number, + 'name': station.name, + 'subcluster': station.cluster.name, + 'cluster': station.cluster.main_cluster(), + 'country': station.cluster.country.name, + 'active': is_active, + 'scintillators': scintillators, + } station_info.update(location) return json_dict(station_info) diff --git a/publicdb/default/management/commands/createfakedata.py b/publicdb/default/management/commands/createfakedata.py index 4d8e18cf..e52e4a36 100644 --- a/publicdb/default/management/commands/createfakedata.py +++ b/publicdb/default/management/commands/createfakedata.py @@ -18,8 +18,7 @@ def handle(*args, **options): with factory.Faker.override_default_locale('nl_NL'): # Inforecords countries = [ - inforecords_factories.CountryFactory(number=country_number) - for country_number in range(0, 20001, 10000) + inforecords_factories.CountryFactory(number=country_number) for country_number in range(0, 20001, 10000) ] clusters = [ inforecords_factories.ClusterFactory(country=country, number=cluster_number + country.number) @@ -27,7 +26,9 @@ def handle(*args, **options): for cluster_number in range(0, 3001, 1000) ] subclusters = [ - inforecords_factories.ClusterFactory(country=cluster.country, parent=cluster, number=cluster_number + cluster.number) + inforecords_factories.ClusterFactory( + country=cluster.country, parent=cluster, number=cluster_number + cluster.number + ) for cluster in clusters for cluster_number in range(100, 201, 100) ] @@ -47,10 +48,7 @@ def handle(*args, **options): dates = [date(2017, 1, 1), date(2017, 1, 2), date(2017, 2, 10), date(2018, 4, 1)] # Histograms and datasets - network_summaries = [ - histograms_factories.NetworkSummaryFactory(date=summary_date) - for summary_date in dates - ] + network_summaries = [histograms_factories.NetworkSummaryFactory(date=summary_date) for summary_date in dates] for network_summary in network_summaries: histograms_factories.CoincidencetimeHistogramFactory(network_summary=network_summary) diff --git a/publicdb/histograms/admin.py b/publicdb/histograms/admin.py index cec687b3..661e1458 100644 --- a/publicdb/histograms/admin.py +++ b/publicdb/histograms/admin.py @@ -5,22 +5,20 @@ @admin.register(models.GeneratorState) class GeneratorStateAdmin(admin.ModelAdmin): - list_display = ('check_last_run', 'check_is_running', 'update_last_run', - 'update_is_running') + list_display = ('check_last_run', 'check_is_running', 'update_last_run', 'update_is_running') @admin.register(models.NetworkHistogram) class NetworkHistogramAdmin(admin.ModelAdmin): - list_display = ('network_summary', 'type',) - list_filter = ('type', 'network_summary__date',) + list_display = ('network_summary', 'type') + list_filter = ('type', 'network_summary__date') raw_id_fields = ('network_summary',) -@admin.register(models.DailyHistogram, models.MultiDailyHistogram, - models.DailyDataset, models.MultiDailyDataset) +@admin.register(models.DailyHistogram, models.MultiDailyHistogram, models.DailyDataset, models.MultiDailyDataset) class DailyStationDataAdmin(admin.ModelAdmin): - list_display = ('summary', 'type',) - list_filter = ('type', 'summary__station__number',) + list_display = ('summary', 'type') + list_filter = ('type', 'summary__station__number') raw_id_fields = ('summary',) @@ -51,78 +49,114 @@ class NetworkHistogramInline(admin.StackedInline): @admin.register(models.NetworkSummary) class NetworkSummaryAdmin(admin.ModelAdmin): - list_display = ('date', 'needs_update', 'needs_update_coincidences', - 'num_coincidences',) + list_display = ( + 'date', + 'needs_update', + 'needs_update_coincidences', + 'num_coincidences', + ) list_filter = ('needs_update', 'needs_update_coincidences', 'date') list_editable = ('needs_update', 'needs_update_coincidences') inlines = (NetworkHistogramInline,) list_per_page = 200 - actions = ['unset_update_flag', 'unset_coincidences_flag', - 'set_update_flag', 'set_coincidences_flag'] + actions = ['unset_update_flag', 'unset_coincidences_flag', 'set_update_flag', 'set_coincidences_flag'] def unset_update_flag(self, request, qs): qs.update(needs_update=False) + unset_update_flag.short_description = "Unset needs_update" def unset_coincidences_flag(self, request, qs): qs.update(needs_update_coincidences=False) - unset_coincidences_flag.short_description = ("Unset " - "needs_update_coincidences") + + unset_coincidences_flag.short_description = "Unset " "needs_update_coincidences" def set_update_flag(self, request, qs): qs.update(needs_update=True) + set_update_flag.short_description = "Set needs_update" def set_coincidences_flag(self, request, qs): """Only set flags if num coincidences is not null""" - (qs.filter(num_coincidences__isnull=False) - .update(needs_update_coincidences=True)) + (qs.filter(num_coincidences__isnull=False).update(needs_update_coincidences=True)) + set_coincidences_flag.short_description = "Set needs_update_coincidences" @admin.register(models.Summary) class SummaryAdmin(admin.ModelAdmin): - list_display = ('station', 'date', 'num_events', 'num_config', - 'num_errors', 'num_weather', 'num_singles', - 'needs_update', 'needs_update_events', - 'needs_update_config', 'needs_update_errors', - 'needs_update_weather', 'needs_update_singles') - list_filter = ('station', 'needs_update', 'needs_update_events', - 'needs_update_weather', 'needs_update_singles', - 'needs_update_config', 'date') - list_editable = ('needs_update', 'needs_update_events', - 'needs_update_weather', 'needs_update_singles', - 'needs_update_config') - inlines = (DailyHistogramInline, MultiDailyHistogramInline, - DailyDatasetInline, MultiDailyDatasetInline) + list_display = ( + 'station', + 'date', + 'num_events', + 'num_config', + 'num_errors', + 'num_weather', + 'num_singles', + 'needs_update', + 'needs_update_events', + 'needs_update_config', + 'needs_update_errors', + 'needs_update_weather', + 'needs_update_singles', + ) + list_filter = ( + 'station', + 'needs_update', + 'needs_update_events', + 'needs_update_weather', + 'needs_update_singles', + 'needs_update_config', + 'date', + ) + list_editable = ( + 'needs_update', + 'needs_update_events', + 'needs_update_weather', + 'needs_update_singles', + 'needs_update_config', + ) + inlines = (DailyHistogramInline, MultiDailyHistogramInline, DailyDatasetInline, MultiDailyDatasetInline) list_per_page = 200 - actions = ['unset_update_flag', 'unset_events_flag', 'unset_config_flag', - 'set_update_flag', 'set_events_flag', 'set_config_flag'] + actions = [ + 'unset_update_flag', + 'unset_events_flag', + 'unset_config_flag', + 'set_update_flag', + 'set_events_flag', + 'set_config_flag', + ] def unset_update_flag(self, request, qs): qs.update(needs_update=False) + unset_update_flag.short_description = "Unset needs_update" def unset_events_flag(self, request, qs): qs.update(needs_update_events=False) + unset_events_flag.short_description = "Unset needs_update_events" def unset_config_flag(self, request, qs): qs.update(needs_update_config=False) + unset_config_flag.short_description = "Unset needs_update_config" def set_update_flag(self, request, qs): qs.update(needs_update=True) + set_update_flag.short_description = "Set needs_update" def set_events_flag(self, request, qs): """Only set flags if num events is not null""" qs.filter(num_events__isnull=False).update(needs_update_events=True) + set_events_flag.short_description = "Set needs_update_events" def set_config_flag(self, request, qs): """Only set flags if num config is not null""" qs.filter(num_config__isnull=False).update(needs_update_config=True) + set_config_flag.short_description = "Set needs_update_config" @@ -143,8 +177,7 @@ class DetectorTimingOffsetAdmin(admin.ModelAdmin): @admin.register(models.StationTimingOffset) class StationTimingOffsetAdmin(admin.ModelAdmin): list_display = ('ref_summary', 'summary', 'offset', 'error') - list_filter = ('ref_summary__station__number', 'summary__station__number', - 'ref_summary__date') + list_filter = ('ref_summary__station__number', 'summary__station__number', 'ref_summary__date') raw_id_fields = ('ref_summary', 'summary') diff --git a/publicdb/histograms/checks.py b/publicdb/histograms/checks.py index b4d2674d..625da5cd 100644 --- a/publicdb/histograms/checks.py +++ b/publicdb/histograms/checks.py @@ -72,9 +72,7 @@ def process_possible_stations_for_date(date, station_list): """ logger.info('Now processing %s', date) - unique_table_list = {table_name - for table_list in station_list.values() - for table_name in table_list.keys()} + unique_table_list = {table_name for table_list in station_list.values() for table_name in table_list.keys()} for table_name in unique_table_list: process_possible_tables_for_network(date, table_name) for station, table_list in station_list.items(): @@ -120,10 +118,12 @@ def check_table_and_update_flags(table_name, num_events, summary): update_flag_attr = f'needs_update_{table_name}' if getattr(summary, number_of_events_attr) != num_events: - logger.info("New data (%s) on %s for station %d", - table_name, - summary.date.strftime("%a %b %d %Y"), - summary.station.number) + logger.info( + "New data (%s) on %s for station %d", + table_name, + summary.date.strftime("%a %b %d %Y"), + summary.station.number, + ) # only record number of events for *some* tables at this time if table_name in RECORD_EARLY_NUM_EVENTS: setattr(summary, number_of_events_attr, num_events) diff --git a/publicdb/histograms/datastore.py b/publicdb/histograms/datastore.py index 0609c7c5..dbe4d7d5 100644 --- a/publicdb/histograms/datastore.py +++ b/publicdb/histograms/datastore.py @@ -43,10 +43,7 @@ def get_updated_files(rootdir, last_check_time): date = datetime.datetime.strptime(file, '%Y_%m_%d.h5').date() except ValueError: continue - if ( - date != datetime.datetime.utcnow().date() - and date != datetime.datetime.today() - ): + if date != datetime.datetime.utcnow().date() and date != datetime.datetime.today(): file_list.append((date, file_path)) return file_list diff --git a/publicdb/histograms/esd.py b/publicdb/histograms/esd.py index f8acb1c4..e92b427f 100644 --- a/publicdb/histograms/esd.py +++ b/publicdb/histograms/esd.py @@ -49,10 +49,12 @@ def search_coincidences_and_store_in_esd(network_summary): date = network_summary.date # Get non-test stations with events on the specified date - stations = Station.objects.filter(summaries__date=date, - summaries__num_events__isnull=False, - summaries__needs_update=False, - pcs__is_test=False).distinct() + stations = Station.objects.filter( + summaries__date=date, + summaries__num_events__isnull=False, + summaries__needs_update=False, + pcs__is_test=False, + ).distinct() station_numbers = [station.number for station in stations] station_groups = [get_station_node_path(station) for station in stations] @@ -105,8 +107,7 @@ def process_events_and_store_temporary_esd(summary): source_node = get_station_node(source_file, station) tmp_filename = create_temporary_file() with tables.open_file(tmp_filename, 'w') as tmp_file: - process = ProcessEventsFromSourceWithTriggerOffset( - source_file, tmp_file, source_node, '/', station.number) + process = ProcessEventsFromSourceWithTriggerOffset(source_file, tmp_file, source_node, '/', station.number) process.process_and_store_results() node_path = process.destination._v_pathname return tmp_filename, node_path @@ -177,7 +178,8 @@ def reconstruct_events_and_store_temporary_esd(summary): tmp_filename = create_temporary_file() with tables.open_file(tmp_filename, 'w') as tmp_file: reconstruct = ReconstructESDEventsFromSource( - source_file, tmp_file, source_path, '/', station.number, progress=False) + source_file, tmp_file, source_path, '/', station.number, progress=False + ) reconstruct.reconstruct_and_store() node_path = reconstruct.reconstructions._v_pathname return tmp_filename, node_path diff --git a/publicdb/histograms/jobs.py b/publicdb/histograms/jobs.py index ccb36747..e597e60c 100644 --- a/publicdb/histograms/jobs.py +++ b/publicdb/histograms/jobs.py @@ -219,7 +219,7 @@ def update_histograms(): def perform_tasks_manager(model, needs_update_item, perform_certain_tasks): - """ Front office for doing tasks + """Front office for doing tasks Depending on the USE_MULTIPROCESSING flag, the manager either does the tasks himself or he grabs some workers and let them do it. @@ -501,7 +501,7 @@ def update_temperature_dataset(summary): logger.debug("Updating temperature dataset for %s", summary) temperature = esd.get_temperature(summary) - error_values = [-999, -2 ** 15] + error_values = [-999, -(2**15)] temperature = [(x, y) for x, y in temperature if y not in error_values] if temperature != []: temperature = shrink_dataset(temperature, INTERVAL_TEMP) @@ -546,8 +546,7 @@ def shrink(column, bin_idxs, n_bins): """ with warnings.catch_warnings(): # suppress "Mean of empty slice" warnings.simplefilter("ignore", category=RuntimeWarning) - data = np.nan_to_num([np.nanmean(column[bin_idxs[i]:bin_idxs[i + 1]]) - for i in range(n_bins)]) + data = np.nan_to_num([np.nanmean(column[bin_idxs[i] : bin_idxs[i + 1]]) for i in range(n_bins)]) return data.tolist() @@ -559,7 +558,7 @@ def update_config(summary): logger.error('%s: Too many configs: %d. Skipping.', summary, num_config) return summary.num_config - for config in configs[summary.num_config:]: + for config in configs[summary.num_config :]: new_config = Configuration(summary=summary) for var in vars(new_config): if var in ['summary', 'id', 'summary_id'] or var[0] == '_': @@ -637,8 +636,7 @@ def save_offsets(summary, offsets): """ logger.debug("Saving detector timing offsets for %s", summary) - off = {f'offset_{i}': round_in_base(o, 0.25) if not np.isnan(o) else None - for i, o in enumerate(offsets, 1)} + off = {f'offset_{i}': round_in_base(o, 0.25) if not np.isnan(o) else None for i, o in enumerate(offsets, 1)} DetectorTimingOffset.objects.update_or_create(summary=summary, defaults=off) logger.debug("Saved succesfully") diff --git a/publicdb/histograms/models.py b/publicdb/histograms/models.py index 8d687b23..553ea1dc 100644 --- a/publicdb/histograms/models.py +++ b/publicdb/histograms/models.py @@ -43,30 +43,23 @@ class Meta: class SummaryQuerySet(models.QuerySet): def valid_date(self): """Filter by date to dates between start and today""" - return self.filter( - date__gte=FIRSTDATE, - date__lte=datetime.date.today()) + return self.filter(date__gte=FIRSTDATE, date__lte=datetime.date.today()) def with_data(self): """Filter with at least either events or weather data""" - return self.valid_date().filter( - models.Q(num_events__isnull=False) - | models.Q(num_weather__isnull=False)) + return self.valid_date().filter(models.Q(num_events__isnull=False) | models.Q(num_weather__isnull=False)) def with_events(self): """Filter with at least events""" - return self.valid_date().filter( - num_events__isnull=False) + return self.valid_date().filter(num_events__isnull=False) def with_config(self): """Filter with at least configurations""" - return self.valid_date().filter( - num_config__isnull=False) + return self.valid_date().filter(num_config__isnull=False) def with_events_in_last_hour(self): """Filter with events in last hour""" - return self.valid_date().filter( - events_in_last_hour=True) + return self.valid_date().filter(events_in_last_hour=True) class Summary(models.Model): @@ -205,16 +198,19 @@ class Meta: def station(self): return self.summary.station.number + station.admin_order_field = 'summary__station__number' def _master(self): return self.extract_hardware_serial(self.mas_version) + _master.admin_order_field = 'mas_version' master = property(_master) def _slave(self): return self.extract_hardware_serial(self.slv_version) + _slave.admin_order_field = 'slv_version' slave = property(_slave) diff --git a/publicdb/inforecords/admin.py b/publicdb/inforecords/admin.py index 1846d9fb..7af1c760 100644 --- a/publicdb/inforecords/admin.py +++ b/publicdb/inforecords/admin.py @@ -48,8 +48,7 @@ class CountryAdmin(admin.ModelAdmin): @admin.register(models.Station) class StationAdmin(admin.ModelAdmin): - list_display = ('number', 'name', 'cluster', 'contactinformation', - 'contact') + list_display = ('number', 'name', 'cluster', 'contactinformation', 'contact') search_fields = ('number', 'name', 'cluster__name') list_filter = ('cluster__country',) list_per_page = 200 @@ -57,7 +56,6 @@ class StationAdmin(admin.ModelAdmin): @admin.register(models.ContactInformation) class ContactInformationAdmin(admin.ModelAdmin): - def owner_name(self, obj): return obj.contact_owner @@ -73,8 +71,7 @@ def type(self, obj): @admin.register(models.Pc) class PcAdmin(admin.ModelAdmin): - list_display = ('station', 'name', 'is_active', 'is_test', 'ip', 'url', - 'keys') + list_display = ('station', 'name', 'is_active', 'is_test', 'ip', 'url', 'keys') list_filter = ('is_active', 'is_test') ordering = ('station',) list_per_page = 200 diff --git a/publicdb/inforecords/models.py b/publicdb/inforecords/models.py index 7e588b54..60c87159 100644 --- a/publicdb/inforecords/models.py +++ b/publicdb/inforecords/models.py @@ -94,9 +94,7 @@ def email_work(self): @property def name(self): - return (' '.join((self.title, self.first_name, self.prefix_surname, self.surname)) - .replace(' ', ' ') - .strip()) + return ' '.join((self.title, self.first_name, self.prefix_surname, self.surname)).replace(' ', ' ').strip() class Meta: verbose_name = 'contact' @@ -160,16 +158,20 @@ def clean(self): if self.number % 1000: raise ValidationError("Cluster number must be multiple of 1000") if not 0 <= (self.number - self.country.number) < 10000: - raise ValidationError("Cluster number must be in range of " - f"numbers for the country ({self.country.number}, {self.country.number + 10000}).") + raise ValidationError( + "Cluster number must be in range of " + f"numbers for the country ({self.country.number}, {self.country.number + 10000})." + ) if self.parent is not None: if self.parent.parent is not None: raise ValidationError("Subsubclusters are not allowed") if self.number % 100: raise ValidationError("Subcluster number must be multiple of 100") if not 0 < (self.number - self.parent.number) < 1000: - raise ValidationError("Subcluster number must be in range of " - f"numbers for the cluster ({self.parent.number}, {self.parent.number + 1000}).") + raise ValidationError( + "Subcluster number must be in range of " + f"numbers for the cluster ({self.parent.number}, {self.parent.number + 1000})." + ) def save(self, *args, **kwargs): super().save(*args, **kwargs) @@ -226,7 +228,8 @@ def clean(self): if not 0 < (self.number - self.cluster.number) < 100: raise ValidationError( "Station number must be in range of numbers for the (sub)cluster " - f"({self.cluster.number}, {self.cluster.number + 100}).") + f"({self.cluster.number}, {self.cluster.number + 100})." + ) def save(self, *args, **kwargs): # Strip some problematic characters @@ -250,7 +253,7 @@ def number_of_detectors(self): today = datetime.datetime.utcnow() try: - config = (Configuration.objects.filter(summary__station=self, timestamp__lte=today).latest()) + config = Configuration.objects.filter(summary__station=self, timestamp__lte=today).latest() except Configuration.DoesNotExist: n_detectors = 4 else: @@ -278,10 +281,9 @@ def latest_location(self, date=None): summaries = Summary.objects.with_config().filter(station=self, date__lte=date).reverse() for summary in summaries: try: - config = (Configuration.objects - .filter(summary=summary) - .exclude(gps_latitude=0, gps_longitude=0) - .latest()) + config = ( + Configuration.objects.filter(summary=summary).exclude(gps_latitude=0, gps_longitude=0).latest() + ) except Configuration.DoesNotExist: pass else: @@ -289,9 +291,11 @@ def latest_location(self, date=None): except Summary.DoesNotExist: pass - return {'latitude': (round(config.gps_latitude, 7) if config.gps_latitude is not None else None), - 'longitude': (round(config.gps_longitude, 7) if config.gps_longitude is not None else None), - 'altitude': (round(config.gps_altitude, 2) if config.gps_altitude is not None else None)} + return { + 'latitude': (round(config.gps_latitude, 7) if config.gps_latitude is not None else None), + 'longitude': (round(config.gps_longitude, 7) if config.gps_longitude is not None else None), + 'altitude': (round(config.gps_altitude, 2) if config.gps_altitude is not None else None), + } class Meta: verbose_name = 'Station' @@ -326,6 +330,7 @@ def __str__(self): def keys(self): url = reverse('keys', kwargs={'host': self.name}) return mark_safe(f'Certificate {self.name}') + keys.short_description = 'Certificates' def url(self): @@ -333,6 +338,7 @@ def url(self): return '' else: return mark_safe(f's{self.station.number}.his') + url.short_description = 'VNC URL' class Meta: diff --git a/publicdb/inforecords/views.py b/publicdb/inforecords/views.py index 1ff7c07a..4912f79a 100644 --- a/publicdb/inforecords/views.py +++ b/publicdb/inforecords/views.py @@ -37,6 +37,9 @@ def create_datastore_config(request): if socket.gethostbyaddr(request.META["REMOTE_ADDR"])[0] != settings.DATASTORE_HOST: raise PermissionDenied - return render(request, 'inforecords/datastore.cfg', - {'stations': Station.objects.all().select_related('cluster__parent')}, - content_type='text/plain') + return render( + request, + 'inforecords/datastore.cfg', + {'stations': Station.objects.all().select_related('cluster__parent')}, + content_type='text/plain', + ) diff --git a/publicdb/maps/views.py b/publicdb/maps/views.py index b3b3683d..ec27bec3 100644 --- a/publicdb/maps/views.py +++ b/publicdb/maps/views.py @@ -16,9 +16,7 @@ def station_on_map(request, station_number): subclusters = get_subclusters() - return render(request, 'maps/map.html', - {'subclusters': subclusters, - 'center': center}) + return render(request, 'maps/map.html', {'subclusters': subclusters, 'center': center}) def stations_on_map(request, country=None, cluster=None, subcluster=None): @@ -43,9 +41,7 @@ def stations_on_map(request, country=None, cluster=None, subcluster=None): subclusters = get_subclusters() - return render(request, 'maps/map.html', - {'subclusters': subclusters, - 'focus': focus}) + return render(request, 'maps/map.html', {'subclusters': subclusters, 'focus': focus}) def get_subclusters(): @@ -59,13 +55,14 @@ def get_subclusters(): link = station in data_stations status = station_status.get_status(station.number) location = station.latest_location() - station_data = {'number': station.number, - 'name': station.name, - 'cluster': subcluster, - 'link': link, - 'status': status} + station_data = { + 'number': station.number, + 'name': station.name, + 'cluster': subcluster, + 'link': link, + 'status': status, + } station_data.update(location) stations.append(station_data) - subclusters.append({'name': subcluster.name, - 'stations': stations}) + subclusters.append({'name': subcluster.name, 'stations': stations}) return subclusters diff --git a/publicdb/raw_data/date_generator.py b/publicdb/raw_data/date_generator.py index 96db9248..b8c35282 100644 --- a/publicdb/raw_data/date_generator.py +++ b/publicdb/raw_data/date_generator.py @@ -38,8 +38,7 @@ def single_day_ranges(start, end): """ cur = start - next_day = (cur.replace(hour=0, minute=0, second=0, microsecond=0) - + datetime.timedelta(days=1)) + next_day = cur.replace(hour=0, minute=0, second=0, microsecond=0) + datetime.timedelta(days=1) while next_day < end: yield cur, next_day diff --git a/publicdb/raw_data/forms.py b/publicdb/raw_data/forms.py index 25057f2d..feb2cfa2 100644 --- a/publicdb/raw_data/forms.py +++ b/publicdb/raw_data/forms.py @@ -3,35 +3,38 @@ from ..inforecords.models import Cluster, Station -TYPES = [('events', 'Events'), - ('weather', 'Weather'), - ('lightning', 'Lightning'), - ('singles', 'Singles')] +TYPES = [('events', 'Events'), ('weather', 'Weather'), ('lightning', 'Lightning'), ('singles', 'Singles')] -LGT_TYPES = [('0', 'Single-point'), - ('1', 'Cloud-cloud'), - ('2', 'Cloud-cloud mid'), - ('3', 'Cloud-cloud end'), - ('4', 'Cloud-ground'), - ('5', 'Cloud-ground return')] +LGT_TYPES = [ + ('0', 'Single-point'), + ('1', 'Cloud-cloud'), + ('2', 'Cloud-cloud mid'), + ('3', 'Cloud-cloud end'), + ('4', 'Cloud-ground'), + ('5', 'Cloud-ground return'), +] -FILTER = [('network', 'Network'), - ('cluster', 'Cluster'), - ('stations', 'Stations')] +FILTER = [('network', 'Network'), ('cluster', 'Cluster'), ('stations', 'Stations')] class DataDownloadForm(forms.Form): data_type = forms.ChoiceField(choices=TYPES, widget=forms.RadioSelect()) station_events = forms.ModelChoiceField( Station.objects.filter(summaries__num_events__isnull=False).distinct(), - empty_label='---------', required=False) + empty_label='---------', + required=False, + ) station_weather = forms.ModelChoiceField( Station.objects.filter(summaries__num_weather__isnull=False).distinct(), - empty_label='---------', required=False) + empty_label='---------', + required=False, + ) lightning_type = forms.ChoiceField(choices=LGT_TYPES, initial=4, required=False) station_singles = forms.ModelChoiceField( Station.objects.filter(summaries__num_singles__isnull=False).distinct(), - empty_label='---------', required=False) + empty_label='---------', + required=False, + ) start = forms.DateTimeField(help_text="e.g. '2013-5-17', or '2013-5-17 12:45'") end = forms.DateTimeField(help_text="e.g. '2013-5-18', or '2013-5-18 9:05'") download = forms.BooleanField(initial=True, required=False) @@ -57,9 +60,7 @@ def clean(self): class CoincidenceDownloadForm(forms.Form): filter_by = forms.ChoiceField(choices=FILTER, widget=forms.RadioSelect()) - cluster = forms.ModelChoiceField(Cluster.objects.filter(parent=None), - empty_label='---------', - required=False) + cluster = forms.ModelChoiceField(Cluster.objects.filter(parent=None), empty_label='---------', required=False) stations = forms.CharField(help_text="e.g. '103, 104, 105'", required=False) start = forms.DateTimeField(help_text="e.g. '2014-4-5', or '2014-4-18 12:45'") end = forms.DateTimeField(help_text="e.g. '2014-4-29', or '2014-04-30 9:05'") diff --git a/publicdb/raw_data/knmi_lightning.py b/publicdb/raw_data/knmi_lightning.py index 4da13954..0a0e9b36 100644 --- a/publicdb/raw_data/knmi_lightning.py +++ b/publicdb/raw_data/knmi_lightning.py @@ -35,11 +35,13 @@ def discharges(datafile, start, end, type=4): for idx in cg_idx[0]: ts, ns = get_gps_timestamp(reference_date, discharge_table.time_offset[idx]) if start <= ts < end: - yield {'timestamp': ts, - 'nanoseconds': ns, - 'latitude': discharge_table.latitude[idx], - 'longitude': discharge_table.longitude[idx], - 'current': discharge_table.current[idx]} + yield { + 'timestamp': ts, + 'nanoseconds': ns, + 'latitude': discharge_table.latitude[idx], + 'longitude': discharge_table.longitude[idx], + 'current': discharge_table.current[idx], + } def data_path(date): diff --git a/publicdb/raw_data/urls.py b/publicdb/raw_data/urls.py index 5c02718e..34f5115c 100644 --- a/publicdb/raw_data/urls.py +++ b/publicdb/raw_data/urls.py @@ -9,11 +9,20 @@ path('download/', views.download_form, name="download_form"), path('download////', views.download_form, name="download_form"), path('download/coincidences/', views.coincidences_download_form, name="coincidences_download_form"), - path('download/coincidences///', views.coincidences_download_form, name="coincidences_download_form"), + path( + 'download/coincidences///', + views.coincidences_download_form, + name="coincidences_download_form", + ), path('rpc', views.call_xmlrpc, name="rpc"), path('/events/', views.download_data, {'data_type': 'events'}, name="events"), path('/weather/', views.download_data, {'data_type': 'weather'}, name="weather"), path('/singles/', views.download_data, {'data_type': 'singles'}, name="singles"), - path('knmi/lightning//', views.download_data, {'data_type': 'lightning'}, name="lightning"), + path( + 'knmi/lightning//', + views.download_data, + {'data_type': 'lightning'}, + name="lightning", + ), path('network/coincidences/', views.download_coincidences, name="coincidences"), ] diff --git a/publicdb/raw_data/views.py b/publicdb/raw_data/views.py index d7cb5ba5..5b5e46b9 100644 --- a/publicdb/raw_data/views.py +++ b/publicdb/raw_data/views.py @@ -63,8 +63,7 @@ def call_xmlrpc(request): template = loader.get_template('raw_data/xmlrpc.html') methods = [] for method in dispatcher.system_listMethods(): - methods.append({'name': method, - 'help': dispatcher.system_methodHelp(method)}) + methods.append({'name': method, 'help': dispatcher.system_methodHelp(method)}) context = {'methods': methods} response.write(template.render(context)) return response @@ -76,6 +75,7 @@ def xmlrpc(uri): def register_xmlrpc(fn): dispatcher.register_function(fn, uri) return fn + return register_xmlrpc @@ -164,20 +164,23 @@ def download_form(request, station_number=None, start=None, end=None): url = reverse('data:lightning', kwargs={'lightning_type': lightning_type}) else: station = form.cleaned_data['station'] - url = reverse(f'data:{data_type}', - kwargs={'station_number': station.number}) + url = reverse(f'data:{data_type}', kwargs={'station_number': station.number}) return HttpResponseRedirect(f'{url}?{query_string}') else: if station_number: station = get_object_or_404(Station, number=station_number) else: station = None - form = DataDownloadForm(initial={'station_events': station, - 'station_weather': station, - 'station_singles': station, - 'start': start, - 'end': end, - 'data_type': 'events'}) + form = DataDownloadForm( + initial={ + 'station_events': station, + 'station_weather': station, + 'station_singles': station, + 'start': start, + 'end': end, + 'data_type': 'events', + } + ) return render(request, 'raw_data/data_download.html', {'form': form}) @@ -210,8 +213,7 @@ def download_data(request, data_type='events', station_number=None, lightning_ty else: end = start + datetime.timedelta(days=1) except ValueError: - msg = ("Incorrect optional parameters (start [datetime], " - "end [datetime])") + msg = "Incorrect optional parameters (start [datetime], " "end [datetime])" return HttpResponseBadRequest(msg, content_type=MIME_PLAIN) download = request.GET.get('download', False) @@ -232,7 +234,7 @@ def download_data(request, data_type='events', station_number=None, lightning_ty filename = f'singles-s{station_number}-{timerange_string}.tsv' elif data_type == 'lightning': if lightning_type not in list(range(6)): - msg = ("Incorrect lightning type, should be a value between 0-5") + msg = "Incorrect lightning type, should be a value between 0-5" return HttpResponseBadRequest(msg, content_type=MIME_PLAIN) tsv_output = generate_lightning_as_tsv(lightning_type, start, end) filename = f'lightning-knmi-{timerange_string}.tsv' @@ -264,30 +266,33 @@ def generate_events_as_tsv(station, start, end): if not len(events): continue dt = events['timestamp'].astype('datetime64[s]') - data = column_stack([ - dt.astype('datetime64[D]'), - [value.time() for value in dt.tolist()], - events['timestamp'], - events['nanoseconds'], - events['pulseheights'][:, 0], - events['pulseheights'][:, 1], - events['pulseheights'][:, 2], - events['pulseheights'][:, 3], - events['integrals'][:, 0], - events['integrals'][:, 1], - events['integrals'][:, 2], - events['integrals'][:, 3], - clean_float_array(events['n1']), - clean_float_array(events['n2']), - clean_float_array(events['n3']), - clean_float_array(events['n4']), - clean_float_array(events['t1']), - clean_float_array(events['t2']), - clean_float_array(events['t3']), - clean_float_array(events['t4']), - clean_float_array(events['t_trigger']), - clean_angle_array(reconstructions['zenith']), - clean_angle_array(reconstructions['azimuth'])]) + data = column_stack( + [ + dt.astype('datetime64[D]'), + [value.time() for value in dt.tolist()], + events['timestamp'], + events['nanoseconds'], + events['pulseheights'][:, 0], + events['pulseheights'][:, 1], + events['pulseheights'][:, 2], + events['pulseheights'][:, 3], + events['integrals'][:, 0], + events['integrals'][:, 1], + events['integrals'][:, 2], + events['integrals'][:, 3], + clean_float_array(events['n1']), + clean_float_array(events['n2']), + clean_float_array(events['n3']), + clean_float_array(events['n4']), + clean_float_array(events['t1']), + clean_float_array(events['t2']), + clean_float_array(events['t3']), + clean_float_array(events['t4']), + clean_float_array(events['t_trigger']), + clean_angle_array(reconstructions['zenith']), + clean_angle_array(reconstructions['azimuth']), + ] + ) block_buffer = StringIO() writer = csv.writer(block_buffer, delimiter='\t', lineterminator='\n') writer.writerows(data) @@ -353,24 +358,27 @@ def generate_weather_as_tsv(station, start, end): weather_events = get_weather_from_esd_in_range(station, start, end) for events in weather_events: dt = events['timestamp'].astype('datetime64[s]') - data = column_stack([ - dt.astype('datetime64[D]'), - [value.time() for value in dt.tolist()], - events['timestamp'], - clean_float_array(events['temp_inside']), - clean_float_array(events['temp_outside']), - events['humidity_inside'], - events['humidity_outside'], - clean_float_array(events['barometer']), - events['wind_dir'], - events['wind_speed'], - events['solar_rad'], - events['uv'], - clean_float_array(events['evapotranspiration']), - clean_float_array(events['rain_rate']), - events['heat_index'], - clean_float_array(events['dew_point']), - clean_float_array(events['wind_chill'])]) + data = column_stack( + [ + dt.astype('datetime64[D]'), + [value.time() for value in dt.tolist()], + events['timestamp'], + clean_float_array(events['temp_inside']), + clean_float_array(events['temp_outside']), + events['humidity_inside'], + events['humidity_outside'], + clean_float_array(events['barometer']), + events['wind_dir'], + events['wind_speed'], + events['solar_rad'], + events['uv'], + clean_float_array(events['evapotranspiration']), + clean_float_array(events['rain_rate']), + events['heat_index'], + clean_float_array(events['dew_point']), + clean_float_array(events['wind_chill']), + ] + ) block_buffer = StringIO() writer = csv.writer(block_buffer, delimiter='\t', lineterminator='\n') writer.writerows(data) @@ -425,18 +433,21 @@ def generate_singles_as_tsv(station, start, end): singles_events = get_singles_from_esd_in_range(station, start, end) for events in singles_events: dt = events['timestamp'].astype('datetime64[s]') - data = column_stack([ - dt.astype('datetime64[D]'), - [value.time() for value in dt.tolist()], - events['timestamp'], - events['mas_ch1_low'], - events['mas_ch1_high'], - events['mas_ch2_low'], - events['mas_ch2_high'], - events['slv_ch1_low'], - events['slv_ch1_high'], - events['slv_ch2_low'], - events['slv_ch2_high']]) + data = column_stack( + [ + dt.astype('datetime64[D]'), + [value.time() for value in dt.tolist()], + events['timestamp'], + events['mas_ch1_low'], + events['mas_ch1_high'], + events['mas_ch2_low'], + events['mas_ch2_high'], + events['slv_ch1_low'], + events['slv_ch1_high'], + events['slv_ch2_low'], + events['slv_ch2_high'], + ] + ) block_buffer = StringIO() writer = csv.writer(block_buffer, delimiter='\t', lineterminator='\n') writer.writerows(data) @@ -481,8 +492,14 @@ def get_singles_from_esd_in_range(station, start, end): def generate_lightning_as_tsv(lightning_type, start, end): """Render TSV output as an iterator.""" - types = ('Single-point', 'Cloud-cloud', 'Cloud-cloud mid', - 'Cloud-cloud end', 'Cloud-ground', 'Cloud-ground return') + types = ( + 'Single-point', + 'Cloud-cloud', + 'Cloud-cloud mid', + 'Cloud-cloud end', + 'Cloud-ground', + 'Cloud-ground return', + ) type_str = f'{lightning_type}: {types[lightning_type]}' template = loader.get_template('raw_data/lightning_data.tsv') @@ -497,12 +514,15 @@ def generate_lightning_as_tsv(lightning_type, start, end): events = get_lightning_in_range(lightning_type, start, end) for event in events: dt = datetime.datetime.utcfromtimestamp(event['timestamp']) - row = [dt.date(), dt.time(), - event['timestamp'], - event['nanoseconds'], - clean_floats(event['latitude'], precision=6), - clean_floats(event['longitude'], precision=6), - int(event['current'])] + row = [ + dt.date(), + dt.time(), + event['timestamp'], + event['nanoseconds'], + clean_floats(event['latitude'], precision=6), + clean_floats(event['longitude'], precision=6), + int(event['current']), + ] writer.writerow(row) yield line_buffer.line lightning_returned = True @@ -542,11 +562,16 @@ def coincidences_download_form(request, start=None, end=None): end = form.cleaned_data['end'] n = form.cleaned_data['n'] download = form.cleaned_data['download'] - query_string = urllib.parse.urlencode({ - 'cluster': cluster, 'stations': stations, - 'start': start, 'end': end, - 'n': n, 'download': download - }) + query_string = urllib.parse.urlencode( + { + 'cluster': cluster, + 'stations': stations, + 'start': start, + 'end': end, + 'n': n, + 'download': download, + } + ) url = reverse('data:coincidences') return HttpResponseRedirect(f'{url}?{query_string}') else: @@ -582,7 +607,7 @@ def download_coincidences(request): else: end = start + datetime.timedelta(days=1) except ValueError: - error_msg = ("Incorrect optional parameters (start [datetime], end [datetime])") + error_msg = "Incorrect optional parameters (start [datetime], end [datetime])" return HttpResponseBadRequest(error_msg, content_type=MIME_PLAIN) try: @@ -603,8 +628,7 @@ def download_coincidences(request): error_msg = "Both stations and cluster are defined." elif stations: try: - stations = [int(number.strip('"\' ')) - for number in stations.strip('[](), ').split(',')] + stations = [int(number.strip('"\' ')) for number in stations.strip('[](), ').split(',')] except ValueError: error_msg = "Unable to parse station numbers." else: @@ -616,8 +640,9 @@ def download_coincidences(request): error_msg = "Not all station numbers are valid." elif cluster: cluster = get_object_or_404(Cluster, name=cluster) - stations = (Station.objects.filter(Q(cluster__parent=cluster) | Q(cluster=cluster)) - .values_list('number', flat=True)) + stations = Station.objects.filter(Q(cluster__parent=cluster) | Q(cluster=cluster)).values_list( + 'number', flat=True + ) if len(stations) >= 30: error_msg = "To many stations in this cluster, manually select a subset of stations." @@ -660,29 +685,33 @@ def generate_coincidences_as_tsv(start, end, cluster, stations, n): for id, number, event in get_coincidences_from_esd_in_range(start, end, stations, n): dt = datetime.datetime.utcfromtimestamp(event['timestamp']) - row = [id, - number, - dt.date(), dt.time(), - event['timestamp'], - event['nanoseconds'], - event['pulseheights'][0], - event['pulseheights'][1], - event['pulseheights'][2], - event['pulseheights'][3], - event['integrals'][0], - event['integrals'][1], - event['integrals'][2], - event['integrals'][3], - clean_floats(event['n1']), - clean_floats(event['n2']), - clean_floats(event['n3']), - clean_floats(event['n4']), - clean_floats(event['t1']), - clean_floats(event['t2']), - clean_floats(event['t3']), - clean_floats(event['t4']), - clean_floats(event['t_trigger']), - -999, -999] + row = [ + id, + number, + dt.date(), + dt.time(), + event['timestamp'], + event['nanoseconds'], + event['pulseheights'][0], + event['pulseheights'][1], + event['pulseheights'][2], + event['pulseheights'][3], + event['integrals'][0], + event['integrals'][1], + event['integrals'][2], + event['integrals'][3], + clean_floats(event['n1']), + clean_floats(event['n2']), + clean_floats(event['n3']), + clean_floats(event['n4']), + clean_floats(event['t1']), + clean_floats(event['t2']), + clean_floats(event['t3']), + clean_floats(event['t4']), + clean_floats(event['t_trigger']), + -999, + -999, + ] writer.writerow(row) yield line_buffer.line coincidences_returned = True @@ -794,7 +823,7 @@ def prettyprint_timerange(t0, t1): """Pretty print a time range.""" duration = t1 - t0 - if (duration.seconds > 0 or t0.second > 0 or t0.minute > 0 or t0.hour > 0): + if duration.seconds > 0 or t0.second > 0 or t0.minute > 0 or t0.hour > 0: timerange = f'{t0} {t1}' elif duration.days == 1: timerange = str(t0.date()) diff --git a/publicdb/settings_develop.py b/publicdb/settings_develop.py index accdd533..7d54acb3 100644 --- a/publicdb/settings_develop.py +++ b/publicdb/settings_develop.py @@ -119,7 +119,6 @@ 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admin', - 'publicdb.inforecords', 'publicdb.histograms', 'publicdb.coincidences', @@ -136,16 +135,12 @@ LOGGING = { 'version': 1, 'disable_existing_loggers': False, - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse' - } - }, + 'filters': {'require_debug_false': {'()': 'django.utils.log.RequireDebugFalse'}}, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], - 'class': 'django.utils.log.AdminEmailHandler' + 'class': 'django.utils.log.AdminEmailHandler', }, 'null_handler': { 'class': 'logging.NullHandler', @@ -168,6 +163,6 @@ 'publicdb': { 'handlers': ['null_handler'], 'propagate': False, - } + }, }, } diff --git a/publicdb/station_layout/admin.py b/publicdb/station_layout/admin.py index 5f6f94e5..a257fd24 100644 --- a/publicdb/station_layout/admin.py +++ b/publicdb/station_layout/admin.py @@ -11,6 +11,5 @@ class StationLayoutAdmin(admin.ModelAdmin): @admin.register(models.StationLayoutQuarantine) class StationLayoutQuarantineAdmin(admin.ModelAdmin): - list_display = ('station', 'active_date', 'email_verified', 'approved', - 'reviewed') + list_display = ('station', 'active_date', 'email_verified', 'approved', 'reviewed') list_filter = ('station', 'email_verified', 'approved', 'reviewed') diff --git a/publicdb/station_layout/forms.py b/publicdb/station_layout/forms.py index c26b0b4c..921d0215 100644 --- a/publicdb/station_layout/forms.py +++ b/publicdb/station_layout/forms.py @@ -10,7 +10,7 @@ radius_field = partial(forms.FloatField, **DISTANCE_LIMITS) alpha_field = partial(forms.FloatField, **ANGLE_LIMITS) -height_field = partial(forms.FloatField, initial=0., **DISTANCE_LIMITS) +height_field = partial(forms.FloatField, initial=0.0, **DISTANCE_LIMITS) beta_field = partial(forms.FloatField, **ANGLE_LIMITS) @@ -18,10 +18,10 @@ class StationLayoutQuarantineForm(forms.Form): name = forms.CharField(max_length=255) email = forms.EmailField() - station = forms.ModelChoiceField( - queryset=Station.objects.filter(pcs__is_test=False).distinct()) + station = forms.ModelChoiceField(queryset=Station.objects.filter(pcs__is_test=False).distinct()) active_date = forms.DateTimeField( - help_text="Date the detectors were placed in this configuration, e.g. '2010-5-17 12:45'.") + help_text="Date the detectors were placed in this configuration, e.g. '2010-5-17 12:45'." + ) # Master detectors detector_1_radius = radius_field() diff --git a/publicdb/station_layout/models.py b/publicdb/station_layout/models.py index 86fa1ef4..412ce4f3 100644 --- a/publicdb/station_layout/models.py +++ b/publicdb/station_layout/models.py @@ -32,8 +32,7 @@ class StationLayout(models.Model): @property def has_four_detectors(self): - return (self.detector_3_radius is not None - and self.detector_4_radius is not None) + return self.detector_3_radius is not None and self.detector_4_radius is not None class Meta: verbose_name = 'Station layout' @@ -46,16 +45,14 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) try: next_layout = StationLayout.objects.filter( - station=self.station, - active_date__gt=self.active_date).earliest() + station=self.station, active_date__gt=self.active_date + ).earliest() next_date = next_layout.active_date except StationLayout.DoesNotExist: next_date = date.today() if self.has_four_detectors: # Only for 4 detector stations - summaries = Summary.objects.filter(station=self.station, - date__gte=self.active_date, - date__lt=next_date) + summaries = Summary.objects.filter(station=self.station, date__gte=self.active_date, date__lt=next_date) for summary in summaries: if summary.num_events: summary.needs_update_events = True @@ -99,9 +96,8 @@ class Meta: def generate_hashes(self): hash_submit = os.urandom(16).encode('hex') hash_review = os.urandom(16).encode('hex') - if ( - StationLayoutQuarantine.objects.filter(hash_submit=hash_submit) - or StationLayoutQuarantine.objects.filter(hash_review=hash_review) + if StationLayoutQuarantine.objects.filter(hash_submit=hash_submit) or StationLayoutQuarantine.objects.filter( + hash_review=hash_review ): self.generate_hashes() else: @@ -141,8 +137,7 @@ def sendmail_review(self): The HiSPARC Team''' ) sender = 'Beheer HiSPARC ' - send_mail(subject, message, sender, ['beheer@hisparc.nl'], - fail_silently=False) + send_mail(subject, message, sender, ['beheer@hisparc.nl'], fail_silently=False) def sendmail_accepted(self): subject = 'HiSPARC station layout accepted' diff --git a/publicdb/station_layout/views.py b/publicdb/station_layout/views.py index 41e0d619..235bb7ba 100644 --- a/publicdb/station_layout/views.py +++ b/publicdb/station_layout/views.py @@ -48,22 +48,26 @@ def validate_layout_submit(request): detector_4_alpha=form.cleaned_data['detector_4_alpha'], detector_4_beta=form.cleaned_data['detector_4_beta'], detector_4_radius=form.cleaned_data['detector_4_radius'], - detector_4_height=form.cleaned_data['detector_4_height']) + detector_4_height=form.cleaned_data['detector_4_height'], + ) new_layout.generate_hashes() new_layout.save() new_layout.sendmail_submit() - return render(request, 'station_layout/submitted.html', - {'name': form.cleaned_data['name'], - 'email': form.cleaned_data['email'], - 'station': form.cleaned_data['station']}) + return render( + request, + 'station_layout/submitted.html', + { + 'name': form.cleaned_data['name'], + 'email': form.cleaned_data['email'], + 'station': form.cleaned_data['station'], + }, + ) def confirmed_layout(request, hash): - submitted_layout = get_object_or_404(StationLayoutQuarantine, - hash_submit=hash, - email_verified=False) + submitted_layout = get_object_or_404(StationLayoutQuarantine, hash_submit=hash, email_verified=False) submitted_layout.email_verified = True submitted_layout.save() submitted_layout.sendmail_review() @@ -71,10 +75,7 @@ def confirmed_layout(request, hash): def review_layout(request, hash): - submitted_layout = get_object_or_404(StationLayoutQuarantine, - hash_review=hash, - email_verified=True, - reviewed=False) + submitted_layout = get_object_or_404(StationLayoutQuarantine, hash_review=hash, email_verified=True, reviewed=False) if request.method == 'POST': form = ReviewStationLayoutForm(request.POST) else: @@ -83,20 +84,25 @@ def review_layout(request, hash): try: station = submitted_layout.station active_date = submitted_layout.active_date.replace(hour=23, minute=59, second=59) - config = (Configuration.objects.filter(summary__station=station, - timestamp__gte=FIRSTDATE, - timestamp__lte=active_date) - .exclude(gps_latitude=0.)).latest() + config = ( + Configuration.objects.filter( + summary__station=station, timestamp__gte=FIRSTDATE, timestamp__lte=active_date + ).exclude(gps_latitude=0.0) + ).latest() except Configuration.DoesNotExist: try: - configs = (Configuration.objects.filter(summary__station=station, timestamp__gte=active_date) - .exclude(gps_latitude=0.)) + configs = Configuration.objects.filter(summary__station=station, timestamp__gte=active_date).exclude( + gps_latitude=0.0 + ) config = configs.earliest() except Configuration.DoesNotExist: config = None - return render(request, 'station_layout/review.html', - {'layout': submitted_layout, 'form': form, 'hash': hash, 'config': config}) + return render( + request, + 'station_layout/review.html', + {'layout': submitted_layout, 'form': form, 'hash': hash, 'config': config}, + ) def validate_review_layout(request, hash): @@ -108,10 +114,7 @@ def validate_review_layout(request, hash): if not form.is_valid(): return review_layout(request) - submitted_layout = get_object_or_404(StationLayoutQuarantine, - hash_review=hash, - email_verified=True, - reviewed=False) + submitted_layout = get_object_or_404(StationLayoutQuarantine, hash_review=hash, email_verified=True, reviewed=False) submitted_layout.reviewed = True submitted_layout.approved = form.cleaned_data['approved'] submitted_layout.save() @@ -135,7 +138,8 @@ def validate_review_layout(request, hash): detector_4_alpha=submitted_layout.detector_4_alpha, detector_4_beta=submitted_layout.detector_4_beta, detector_4_radius=submitted_layout.detector_4_radius, - detector_4_height=submitted_layout.detector_4_height) + detector_4_height=submitted_layout.detector_4_height, + ) accepted_layout.save() submitted_layout.sendmail_accepted() else: diff --git a/publicdb/status_display/status.py b/publicdb/status_display/status.py index a1908ad7..f73d6763 100644 --- a/publicdb/status_display/status.py +++ b/publicdb/status_display/status.py @@ -24,9 +24,19 @@ def __init__(self): self.status_available = update_state.update_has_finished(yesterday) self.stations = Station.objects.values_list('number', flat=True) - self.stations_with_current_data = Summary.objects.with_events_in_last_hour().filter(date__exact=yesterday).values_list('station__number', flat=True) - self.stations_with_recent_data = Summary.objects.with_events_in_last_hour().filter(date__gte=recent_day).values_list('station__number', flat=True) - self.stations_with_pc = Pc.objects.exclude(type__slug='admin').filter(is_active=True).values_list('station__number', flat=True) + self.stations_with_current_data = ( + Summary.objects.with_events_in_last_hour() + .filter(date__exact=yesterday) + .values_list('station__number', flat=True) + ) + self.stations_with_recent_data = ( + Summary.objects.with_events_in_last_hour() + .filter(date__gte=recent_day) + .values_list('station__number', flat=True) + ) + self.stations_with_pc = ( + Pc.objects.exclude(type__slug='admin').filter(is_active=True).values_list('station__number', flat=True) + ) def _get_datetime_yesterday(self): """Determine the datetime of `yesterday` diff --git a/publicdb/status_display/templatetags/fix_data.py b/publicdb/status_display/templatetags/fix_data.py index 065ca9cc..d784c18d 100644 --- a/publicdb/status_display/templatetags/fix_data.py +++ b/publicdb/status_display/templatetags/fix_data.py @@ -11,8 +11,7 @@ def fix_histogram_data(value): """Append one value to end of data, to fix step histogram""" if len(value) > 1: - return value + [[value[-1][0] + (value[-1][0] - value[-2][0]), - value[-1][1]]] + return value + [[value[-1][0] + (value[-1][0] - value[-2][0]), value[-1][1]]] else: return value @@ -43,7 +42,7 @@ def fix_timestamps_in_data(values): x, y = list(zip(*values)) seconds_in_day = [timestamp % 86400 for timestamp in x] - hours_in_day = [seconds / 3600. for seconds in seconds_in_day] + hours_in_day = [seconds // 3600.0 for seconds in seconds_in_day] values = [list(u) for u in zip(hours_in_day, y)] return values diff --git a/publicdb/status_display/urls.py b/publicdb/status_display/urls.py index aac810f6..b5e45be8 100644 --- a/publicdb/status_display/urls.py +++ b/publicdb/status_display/urls.py @@ -26,31 +26,85 @@ ] source_patterns = [ - # Network histograms - path(f'{type}//', views.get_specific_network_histogram_source, {'type': type}, name=type) - for type in ['coincidencetime', 'coincidencenumber'] -] + [ - # Histograms - path(f'{type}///', views.get_specific_histogram_source, {'type': type}, name=type) - for type in ['eventtime', 'pulseheight', 'pulseintegral', 'singleslow', 'singleshigh', 'zenith', 'azimuth'] -] + [ - # Datasets - path(f'{type}//', views.get_specific_dataset_source, {'type': type}, name=type) - for type in ['barometer', 'temperature', 'singlesratelow', 'singlesratehigh'] -] + [ - # Configurations - path(f'{type}//', views.get_specific_config_source, {'type': type}, name=type) - for type in ['electronics', 'voltage', 'current', 'gps', 'trigger'] -] + [ - # Histograms - path('eventtime//', views.get_eventtime_source, name='eventtime'), - - # Configurations - path('layout//', views.get_station_layout_source, name='layout'), - - # Calibrations - path('detector_timing_offsets//', views.get_detector_timing_offsets_source, name="detector_offsets"), - path('station_timing_offsets///', views.get_station_timing_offsets_source, name="station_offsets"), + *[ + # Network histograms + path( + f'{type}//', + views.get_specific_network_histogram_source, + {'type': type}, + name=type, + ) + for type in [ + 'coincidencetime', + 'coincidencenumber', + ] + ], + *[ + # Histograms + path( + f'{type}///', + views.get_specific_histogram_source, + {'type': type}, + name=type, + ) + for type in [ + 'eventtime', + 'pulseheight', + 'pulseintegral', + 'singleslow', + 'singleshigh', + 'zenith', + 'azimuth', + ] + ], + *[ + # Datasets + path( + f'{type}//', + views.get_specific_dataset_source, + {'type': type}, + name=type, + ) + for type in [ + 'barometer', + 'temperature', + 'singlesratelow', + 'singlesratehigh', + ] + ], + *[ + # Configurations + path( + f'{type}//', + views.get_specific_config_source, + {'type': type}, + name=type, + ) + for type in [ + 'electronics', + 'voltage', + 'current', + 'gps', + 'trigger', + ] + ], + *[ + # Histograms + path('eventtime//', views.get_eventtime_source, name='eventtime'), + # Configurations + path('layout//', views.get_station_layout_source, name='layout'), + # Calibrations + path( + 'detector_timing_offsets//', + views.get_detector_timing_offsets_source, + name="detector_offsets", + ), + path( + 'station_timing_offsets///', + views.get_station_timing_offsets_source, + name="station_offsets", + ), + ], ] app_name = 'status' @@ -60,11 +114,9 @@ path('stations_by_name/', views.stations_by_name, name="stations_by_name"), path('stations_by_number/', views.stations_by_number, name="stations_by_number"), path('stations_by_status/', views.stations_by_status, name="stations_by_status"), - path('stations_on_map/', include((maps_patterns, 'map'))), path('network/', include((network_patterns, 'network'))), path('stations/', include((station_patterns, 'station'))), path('source/', include((source_patterns, 'source'))), - path('help/', views.help, name="help"), ] diff --git a/publicdb/status_display/views.py b/publicdb/status_display/views.py index 5fefd6ce..ec751043 100644 --- a/publicdb/status_display/views.py +++ b/publicdb/status_display/views.py @@ -54,16 +54,13 @@ def stations_by_country(request): countries = OrderedDict() test_stations = [] - for station in (Station.objects - .exclude(pcs__type__slug='admin') - .select_related('cluster__country', 'cluster__parent')): + for station in Station.objects.exclude(pcs__type__slug='admin').select_related( + 'cluster__country', 'cluster__parent' + ): link = station in data_stations status = station_status.get_status(station.number) - station_info = {'number': station.number, - 'name': station.name, - 'link': link, - 'status': status} + station_info = {'number': station.number, 'name': station.name, 'link': link, 'status': status} country = station.cluster.country.name if station.cluster.parent: @@ -83,10 +80,11 @@ def stations_by_country(request): countries[country][cluster][subcluster] = [] countries[country][cluster][subcluster].append(station_info) - return render(request, 'status_display/stations_by_country.html', - {'countries': countries, - 'test_stations': test_stations, - 'statuscount': statuscount}) + return render( + request, + 'status_display/stations_by_country.html', + {'countries': countries, 'test_stations': test_stations, 'statuscount': statuscount}, + ) def stations_by_number(request): @@ -101,13 +99,9 @@ def stations_by_number(request): link = station in data_stations status = station_status.get_status(station.number) - stations.append({'number': station.number, - 'name': station.name, - 'link': link, - 'status': status}) + stations.append({'number': station.number, 'name': station.name, 'link': link, 'status': status}) - return render(request, 'status_display/stations_by_number.html', - {'stations': stations, 'statuscount': statuscount}) + return render(request, 'status_display/stations_by_number.html', {'stations': stations, 'statuscount': statuscount}) def stations_by_status(request): @@ -125,14 +119,13 @@ def stations_by_status(request): # use setdefault() to automatically include unforeseen status labels without crashing group = station_groups.setdefault(status, []) - group.append({'number': station.number, - 'name': station.name, - 'link': link, - 'status': status}) + group.append({'number': station.number, 'name': station.name, 'link': link, 'status': status}) - return render(request, 'status_display/stations_by_status.html', - {'station_groups': station_groups, - 'statuscount': statuscount}) + return render( + request, + 'status_display/stations_by_status.html', + {'station_groups': station_groups, 'statuscount': statuscount}, + ) def stations_by_name(request): @@ -147,15 +140,11 @@ def stations_by_name(request): link = station in data_stations status = station_status.get_status(station.number) - stations.append({'number': station.number, - 'name': station.name, - 'link': link, - 'status': status}) + stations.append({'number': station.number, 'name': station.name, 'link': link, 'status': status}) stations = sorted(stations, key=itemgetter('name')) - return render(request, 'status_display/stations_by_name.html', - {'stations': stations, 'statuscount': statuscount}) + return render(request, 'status_display/stations_by_name.html', {'stations': stations, 'statuscount': statuscount}) def stations_on_map(request, country=None, cluster=None, subcluster=None): @@ -190,20 +179,22 @@ def stations_on_map(request, country=None, cluster=None, subcluster=None): status = station_status.get_status(station.number) location = station.latest_location() - station_data = {'number': station.number, - 'name': station.name, - 'cluster': subcluster, - 'link': link, - 'status': status} + station_data = { + 'number': station.number, + 'name': station.name, + 'cluster': subcluster, + 'link': link, + 'status': status, + } station_data.update(location) stations.append(station_data) - subclusters.append({'name': subcluster.name, - 'stations': stations}) + subclusters.append({'name': subcluster.name, 'stations': stations}) - return render(request, 'status_display/stations_on_map.html', - {'subclusters': subclusters, - 'focus': focus, - 'statuscount': statuscount}) + return render( + request, + 'status_display/stations_on_map.html', + {'subclusters': subclusters, 'focus': focus, 'statuscount': statuscount}, + ) class NetworkSummaryDetailView(DateDetailView): @@ -224,9 +215,8 @@ def get_object(self, queryset=None): return obj def get_queryset(self): - return ( - NetworkSummary.objects.with_coincidences() - .prefetch_related('network_histograms', 'network_histograms__type') + return NetworkSummary.objects.with_coincidences().prefetch_related( + 'network_histograms', 'network_histograms__type' ) def get_context_data(self, **kwargs): @@ -235,33 +225,24 @@ def get_context_data(self, **kwargs): # Find previous/next dates with data try: - prev = (NetworkSummary.objects - .with_coincidences() - .filter(date__lt=date) - .latest() - .date) + prev = NetworkSummary.objects.with_coincidences().filter(date__lt=date).latest().date except NetworkSummary.DoesNotExist: prev = None try: - next = (NetworkSummary.objects - .with_coincidences() - .filter(date__gt=date) - .earliest() - .date) + next = NetworkSummary.objects.with_coincidences().filter(date__gt=date).earliest().date except NetworkSummary.DoesNotExist: next = None # Number of non-test stations with data on this date - n_stations = (Station.objects - .filter(summaries__date=date, summaries__num_events__isnull=False, pcs__is_test=False) - .distinct() - .count()) - histograms = (DailyHistogram.objects - .filter(summary__date=date, - summary__station__pcs__is_test=False, - type__slug='eventtime') - .distinct()) + n_stations = ( + Station.objects.filter(summaries__date=date, summaries__num_events__isnull=False, pcs__is_test=False) + .distinct() + .count() + ) + histograms = DailyHistogram.objects.filter( + summary__date=date, summary__station__pcs__is_test=False, type__slug='eventtime' + ).distinct() number_of_events = sum(sum(histogram.values) for histogram in histograms) status = {'station_count': n_stations, 'n_events': number_of_events} @@ -274,25 +255,26 @@ def get_context_data(self, **kwargs): plots = {histogram.type.slug: plot_histogram(histogram) for histogram in self.object.network_histograms.all()} # data for singles plots - singles_datasets = (MultiDailyDataset.objects - .filter(summary__date=date, - summary__station__pcs__is_test=False, - type__slug='singlesratelow') - .distinct()) + singles_datasets = MultiDailyDataset.objects.filter( + summary__date=date, summary__station__pcs__is_test=False, type__slug='singlesratelow' + ).distinct() singles_plots = [(dataset.summary.station.number, plot_dataset(dataset)) for dataset in singles_datasets] singles_plots = sorted(singles_plots) - context.update({'date': date, - 'tomorrow': date + datetime.timedelta(days=1), - 'status': status, - - 'plots': plots, - 'singles_plots': singles_plots, - 'thismonth': thismonth, - 'month_list': month_list, - 'year_list': year_list, - 'prev': prev, - 'next': next}) + context.update( + { + 'date': date, + 'tomorrow': date + datetime.timedelta(days=1), + 'status': status, + 'plots': plots, + 'singles_plots': singles_plots, + 'thismonth': thismonth, + 'month_list': month_list, + 'year_list': year_list, + 'prev': prev, + 'next': next, + } + ) return context def nav_calendar(self): @@ -331,9 +313,9 @@ def nav_months(self): month_list = [{'month': month} for month in calendar.month_abbr[1:]] for date in date_list: - first_of_month = (self.get_queryset() - .filter(date__year=date.year, date__month=date.month) - .earliest().get_absolute_url()) + first_of_month = ( + self.get_queryset().filter(date__year=date.year, date__month=date.month).earliest().get_absolute_url() + ) month_list[date.month - 1]['link'] = first_of_month return month_list @@ -378,8 +360,16 @@ def get_object(self, queryset=None): queryset = ( self.get_queryset() .select_related('station') - .prefetch_related('histograms', 'histograms__type', 'multi_histograms', 'multi_histograms__type', - 'datasets', 'datasets__type', 'multi_datasets', 'multi_datasets__type') + .prefetch_related( + 'histograms', + 'histograms__type', + 'multi_histograms', + 'multi_histograms__type', + 'datasets', + 'datasets__type', + 'multi_datasets', + 'multi_datasets__type', + ) ) date = self.kwargs['date'] @@ -402,8 +392,7 @@ def get_context_data(self, **kwargs): # Find previous/next dates with data try: - previous = (self.get_queryset().filter(station=station, date__lt=date) - .latest().get_absolute_url()) + previous = self.get_queryset().filter(station=station, date__lt=date).latest().get_absolute_url() except Summary.DoesNotExist: previous = None try: @@ -413,10 +402,7 @@ def get_context_data(self, **kwargs): # Get most recent configuration try: - summary = (Summary.objects - .with_config() - .filter(station=station, date__lte=date) - .latest()) + summary = Summary.objects.with_config().filter(station=station, date__lte=date).latest() config = Configuration.objects.filter(summary=summary).latest() if config.slave == -1: has_slave = False @@ -442,28 +428,31 @@ def get_context_data(self, **kwargs): # Data for the plots plots = {histogram.type.slug: plot_histogram(histogram) for histogram in self.object.histograms.all()} - plots.update({histogram.type.slug: plot_histogram(histogram) for histogram in self.object.multi_histograms.all()}) + plots.update( + {histogram.type.slug: plot_histogram(histogram) for histogram in self.object.multi_histograms.all()} + ) plots.update({dataset.type.slug: plot_dataset(dataset) for dataset in self.object.datasets.all()}) plots.update({dataset.type.slug: plot_dataset(dataset) for dataset in self.object.multi_datasets.all()}) - context.update({'station': station, - 'date': date, - 'tomorrow': date + datetime.timedelta(days=1), - 'config': config, - 'location': location, - 'has_slave': has_slave, - - 'plots': plots, - - 'thismonth': thismonth, - 'month_list': month_list, - 'year_list': year_list, - 'previous': previous, - 'next': next, - - 'has_data': True, - 'has_config': has_config, - 'coincidences_found': coincidences_found}) + context.update( + { + 'station': station, + 'date': date, + 'tomorrow': date + datetime.timedelta(days=1), + 'config': config, + 'location': location, + 'has_slave': has_slave, + 'plots': plots, + 'thismonth': thismonth, + 'month_list': month_list, + 'year_list': year_list, + 'previous': previous, + 'next': next, + 'has_data': True, + 'has_config': has_config, + 'coincidences_found': coincidences_found, + } + ) return context def nav_calendar(self): @@ -472,9 +461,11 @@ def nav_calendar(self): date = self.object.date month = calendar.Calendar().monthdatescalendar(date.year, date.month) - days_with_data = self.get_queryset().filter(station=self.object.station, - date__year=date.year, - date__month=date.month) + days_with_data = self.get_queryset().filter( + station=self.object.station, + date__year=date.year, + date__month=date.month, + ) days_with_data = {day.date: day.get_absolute_url() for day in days_with_data} weeks = [] @@ -496,16 +487,20 @@ def nav_calendar(self): def nav_months(self): """Create list of months with links""" - months_with_data = (self.get_queryset().filter(station=self.object.station, - date__year=self.object.date.year) - .dates('date', 'month')) + months_with_data = ( + self.get_queryset() + .filter(station=self.object.station, date__year=self.object.date.year) + .dates('date', 'month') + ) month_list = [{'month': month} for month in calendar.month_abbr[1:]] for date in months_with_data: - first_of_month = (self.get_queryset().filter(station=self.object.station, - date__year=date.year, - date__month=date.month) - .earliest().get_absolute_url()) + first_of_month = ( + self.get_queryset() + .filter(station=self.object.station, date__year=date.year, date__month=date.month) + .earliest() + .get_absolute_url() + ) month_list[date.month - 1]['link'] = first_of_month return month_list @@ -519,8 +514,12 @@ def nav_years(self): year_list = [] for year in range(years_with_data[0], years_with_data[-1] + 1): if year in years_with_data: - first_of_year = (self.get_queryset().filter(station=self.object.station, date__year=year) - .earliest().get_absolute_url()) + first_of_year = ( + self.get_queryset() + .filter(station=self.object.station, date__year=year) + .earliest() + .get_absolute_url() + ) year_list.append({'year': year, 'link': first_of_year}) else: year_list.append({'year': year, 'link': None}) @@ -532,11 +531,9 @@ class LatestSummaryRedirectView(RedirectView): def get_redirect_url(self, *args, **kwargs): try: - return (Summary.objects - .with_data() - .filter(station__number=kwargs['station_number']) - .latest() - .get_absolute_url()) + return ( + Summary.objects.with_data().filter(station__number=kwargs['station_number']).latest().get_absolute_url() + ) except Summary.DoesNotExist: return None @@ -552,12 +549,17 @@ def station_status(request, station_number): station_status = DataStatus() status = station_status.get_status(station_number) - return render(request, 'status_display/station_status.html', - {'station': station, - 'has_data': has_data, - 'has_config': has_config, - 'status': status, - 'coincidences_found': True}) + return render( + request, + 'status_display/station_status.html', + { + 'station': station, + 'has_data': has_data, + 'has_config': has_config, + 'status': status, + 'coincidences_found': True, + }, + ) def station_config(request, station_number): @@ -566,10 +568,12 @@ def station_config(request, station_number): today = datetime.date.today() station = get_object_or_404(Station, number=station_number) - configs = get_list_or_404(Configuration.objects.order_by('timestamp'), - summary__station=station, - timestamp__gte=FIRSTDATE, - timestamp__lte=today) + configs = get_list_or_404( + Configuration.objects.order_by('timestamp'), + summary__station=station, + timestamp__gte=FIRSTDATE, + timestamp__lte=today, + ) has_data = station_has_data(station) @@ -592,26 +596,27 @@ def station_config(request, station_number): timingoffsetgraph = plot_timing_offsets(station.number) altitudegraph = plot_config('altitude', configs) gpstrack = set(gpslocations) - layout = (StationLayout.objects - .filter(station=station, - active_date__gte=FIRSTDATE, - active_date__lte=today) - .last()) - - return render(request, 'status_display/station_config.html', - {'station': station, - 'config': config, - 'lla': lla, - 'voltagegraph': voltagegraph, - 'currentgraph': currentgraph, - 'timingoffsetgraph': timingoffsetgraph, - 'altitudegraph': altitudegraph, - 'gpstrack': gpstrack, - 'layout': layout, - 'has_slave': has_slave, - 'has_data': has_data, - 'has_config': True, - 'coincidences_found': True}) + layout = StationLayout.objects.filter(station=station, active_date__gte=FIRSTDATE, active_date__lte=today).last() + + return render( + request, + 'status_display/station_config.html', + { + 'station': station, + 'config': config, + 'lla': lla, + 'voltagegraph': voltagegraph, + 'currentgraph': currentgraph, + 'timingoffsetgraph': timingoffsetgraph, + 'altitudegraph': altitudegraph, + 'gpstrack': gpstrack, + 'layout': layout, + 'has_slave': has_slave, + 'has_data': has_data, + 'has_config': True, + 'coincidences_found': True, + }, + ) def station_latest(request, station_number): @@ -622,30 +627,30 @@ def station_latest(request, station_number): station = get_object_or_404(Station, number=station_number) try: - summary = (Summary.objects - .get(num_events__isnull=False, - station=station, - date=yesterday)) + summary = Summary.objects.get(num_events__isnull=False, station=station, date=yesterday) except Summary.DoesNotExist: # Do something nice, get older data old_data = True - summary = (Summary.objects - .valid_date() - .filter(num_events__isnull=False, - station=station) - .latest()) + summary = Summary.objects.valid_date().filter(num_events__isnull=False, station=station).latest() station_status = DataStatus() status = station_status.get_status(station.number) date = summary.date - plots = {histogram.type.slug: plot_histogram(histogram) - for histogram in summary.histograms.filter(type__slug='eventtime')} - plots.update({histogram.type.slug: plot_histogram(histogram) - for histogram in summary.multi_histograms.filter(type__slug__in=['pulseheight', 'pulseintegral'])}) - plots.update({dataset.type.slug: plot_dataset(dataset) - for dataset in summary.datasets.filter(type__slug='barometer')}) + plots = { + histogram.type.slug: plot_histogram(histogram) + for histogram in summary.histograms.filter(type__slug='eventtime') + } + plots.update( + { + histogram.type.slug: plot_histogram(histogram) + for histogram in summary.multi_histograms.filter(type__slug__in=['pulseheight', 'pulseintegral']) + } + ) + plots.update( + {dataset.type.slug: plot_dataset(dataset) for dataset in summary.datasets.filter(type__slug='barometer')} + ) # Show alternative extra_station = None @@ -662,13 +667,18 @@ def station_latest(request, station_number): except IndexError: pass - return render(request, 'status_display/station_latest.html', - {'station': station, - 'date': date, - 'status': status, - 'plots': plots, - 'extra_station': extra_station, - 'old_data': old_data}) + return render( + request, + 'status_display/station_latest.html', + { + 'station': station, + 'date': date, + 'status': status, + 'plots': plots, + 'extra_station': extra_station, + 'old_data': old_data, + }, + ) def get_specific_network_histogram_source(request, date, type): @@ -708,22 +718,24 @@ def get_eventtime_source(request, station_number, start=None, end=None): if end is None: try: - last = (Summary.objects - .valid_date() - .filter(station__number=station_number, num_events__isnull=False) - .latest() - .date) + last = ( + Summary.objects.valid_date() + .filter(station__number=station_number, num_events__isnull=False) + .latest() + .date + ) except Summary.DoesNotExist: raise Http404 end = last + datetime.timedelta(days=1) if start is None: # Get first date with data try: - start = (Summary.objects - .valid_date() - .filter(station__number=station_number, date__lt=end, num_events__isnull=False) - .earliest() - .date) + start = ( + Summary.objects.valid_date() + .filter(station__number=station_number, date__lt=end, num_events__isnull=False) + .earliest() + .date + ) except Summary.DoesNotExist: raise Http404 @@ -745,9 +757,9 @@ def get_eventtime_source(request, station_number, start=None, end=None): }, content_type=MIME_TSV, ) - response['Content-Disposition'] = ( - f'attachment; filename=eventtime-s{station_number}-{start:%Y%-m%-d}-{end:%Y%-m%-d}.tsv' - ) + response[ + 'Content-Disposition' + ] = f'attachment; filename=eventtime-s{station_number}-{start:%Y%-m%-d}-{end:%Y%-m%-d}.tsv' return response @@ -755,8 +767,10 @@ def get_eventtime_histogram_sources(station_number, start, end): histograms = get_list_or_404( DailyHistogram.objects.select_related('summary'), summary__station__number=station_number, - summary__date__gte=start, summary__date__lt=end, - type__slug='eventtime') + summary__date__gte=start, + summary__date__lt=end, + type__slug='eventtime', + ) bins = [] values = [] hours = arange(24) * 3600 @@ -781,11 +795,7 @@ def get_specific_dataset_source(request, station_number, date, type): response = render( request, f'source/{type}_dataset.tsv', - { - 'data': data, - 'date': date, - 'station_number': station_number - }, + {'data': data, 'date': date, 'station_number': station_number}, content_type=MIME_TSV, ) response['Content-Disposition'] = f'attachment; filename={type}-s{station_number}-{date:%Y%-m%-d}.tsv' @@ -797,10 +807,7 @@ def get_specific_config_source(request, station_number, type): response = render( request, f'source/{type}_config.tsv', - { - 'data': data, - 'station_number': station_number - }, + {'data': data, 'station_number': station_number}, content_type=MIME_TSV, ) response['Content-Disposition'] = f'attachment; filename={type}-s{station_number}.tsv' @@ -808,20 +815,23 @@ def get_specific_config_source(request, station_number, type): def get_station_layout_source(request, station_number): - layouts = (StationLayout.objects - .filter(station__number=station_number, - active_date__gte=FIRSTDATE, - active_date__lte=datetime.date.today())) + layouts = StationLayout.objects.filter( + station__number=station_number, + active_date__gte=FIRSTDATE, + active_date__lte=datetime.date.today(), + ) if not layouts: raise Http404 for layout in layouts: layout.timestamp = calendar.timegm(layout.active_date.utctimetuple()) - response = render(request, 'source/station_layout.tsv', - {'layouts': layouts, - 'station_number': station_number}, - content_type=MIME_TSV) + response = render( + request, + 'source/station_layout.tsv', + {'layouts': layouts, 'station_number': station_number}, + content_type=MIME_TSV, + ) response['Content-Disposition'] = f'attachment; filename=station_layout-s{station_number}.tsv' return response @@ -833,18 +843,28 @@ def get_detector_timing_offsets_source(request, station_number): data = [next(rows) for _, rows in groupby(data, key=itemgetter(1, 2, 3, 4))] - data = [(clock.datetime_to_gps(r[0]), none_to_nan(r[1]), none_to_nan(r[2]), none_to_nan(r[3]), none_to_nan(r[4])) - for r in data] + data = [ + ( + clock.datetime_to_gps(r[0]), + none_to_nan(r[1]), + none_to_nan(r[2]), + none_to_nan(r[3]), + none_to_nan(r[4]), + ) + for r in data + ] buffer = StringIO() writer = csv.writer(buffer, delimiter='\t', lineterminator='\n') writer.writerows(data) tsvdata = buffer.getvalue().strip('\n') - response = render(request, 'source/detector_timing_offsets.tsv', - {'tsvdata': tsvdata, - 'station_number': station_number}, - content_type=MIME_TSV) + response = render( + request, + 'source/detector_timing_offsets.tsv', + {'tsvdata': tsvdata, 'station_number': station_number}, + content_type=MIME_TSV, + ) response['Content-Disposition'] = f'attachment; filename=detector_timing_offsets-s{station_number}.tsv' return response @@ -868,22 +888,22 @@ def get_station_timing_offsets_source(request, ref_station_number, station_numbe data = [next(rows) for _, rows in groupby(data, key=itemgetter(1))] - data = [(clock.datetime_to_gps(r[0]), none_to_nan(r[1]), none_to_nan(r[2])) - for r in data] + data = [(clock.datetime_to_gps(r[0]), none_to_nan(r[1]), none_to_nan(r[2])) for r in data] buffer = StringIO() writer = csv.writer(buffer, delimiter='\t', lineterminator='\n') writer.writerows(data) tsvdata = buffer.getvalue().strip('\n') - response = render(request, 'source/station_timing_offsets.tsv', - {'tsvdata': tsvdata, - 'ref_station_number': ref_station_number, - 'station_number': station_number}, - content_type=MIME_TSV) - response['Content-Disposition'] = ( - f'attachment; filename=station_timing_offsets-s{ref_station_number}-s{station_number}.tsv' + response = render( + request, + 'source/station_timing_offsets.tsv', + {'tsvdata': tsvdata, 'ref_station_number': ref_station_number, 'station_number': station_number}, + content_type=MIME_TSV, ) + response[ + 'Content-Disposition' + ] = f'attachment; filename=station_timing_offsets-s{ref_station_number}-s{station_number}.tsv' return response @@ -906,10 +926,12 @@ def get_histogram_source(date, type, station_number=None): else: histogram_model = MultiDailyHistogram - histogram = get_object_or_404(histogram_model, - summary__station__number=station_number, - summary__date=date, - type__slug=type) + histogram = get_object_or_404( + histogram_model, + summary__station__number=station_number, + summary__date=date, + type__slug=type, + ) if type in ['eventtime', 'zenith', 'azimuth', 'coincidencetime', 'coincidencenumber']: return list(zip(histogram.bins, histogram.values)) @@ -932,10 +954,12 @@ def get_dataset_source(date, type, station_number): else: dataset_model = MultiDailyDataset - dataset = get_object_or_404(dataset_model, - summary__station__number=int(station_number), - summary__date=date, - type__slug=type) + dataset = get_object_or_404( + dataset_model, + summary__station__number=int(station_number), + summary__date=date, + type__slug=type, + ) if type in ['barometer', 'temperature']: return list(zip(dataset.x, dataset.y)) @@ -973,19 +997,18 @@ def get_config_source(station_number, type): else: return None - configs = (Configuration.objects - .filter(summary__station__number=station_number, - timestamp__gte=FIRSTDATE, - timestamp__lte=datetime.date.today()) - .order_by('timestamp')) + configs = Configuration.objects.filter( + summary__station__number=station_number, + timestamp__gte=FIRSTDATE, + timestamp__lte=datetime.date.today(), + ).order_by('timestamp') if not configs: raise Http404 if type == 'electronics': data = [ - (config.timestamp, config.master, config.slave, config.master_fpga, config.slave_fpga) - for config in configs + (config.timestamp, config.master, config.slave, config.master_fpga, config.slave_fpga) for config in configs ] else: data = list(configs.values_list(*fields)) @@ -1011,17 +1034,21 @@ def plot_config(type, configs): x_label = 'Date (month/year)' if type == 'voltage': - values = [[config.mas_ch1_voltage, config.mas_ch2_voltage, config.slv_ch1_voltage, config.slv_ch2_voltage] - for config in configs] + values = [ + [config.mas_ch1_voltage, config.mas_ch2_voltage, config.slv_ch1_voltage, config.slv_ch2_voltage] + for config in configs + ] values = list(zip(*values)) y_label = 'PMT Voltage (V)' elif type == 'current': - values = [[config.mas_ch1_current, config.mas_ch2_current, config.slv_ch1_current, config.slv_ch2_current] - for config in configs] + values = [ + [config.mas_ch1_current, config.mas_ch2_current, config.slv_ch1_current, config.slv_ch2_current] + for config in configs + ] values = list(zip(*values)) y_label = 'PMT Current (mA)' if type == 'altitude': - values = [config.gps_altitude for config in configs if config.gps_altitude != 0.] + values = [config.gps_altitude for config in configs if config.gps_altitude != 0.0] if not len(values): return None y_label = 'Altitude (m)' @@ -1053,7 +1080,8 @@ def get_detector_timing_offsets(station_number): offsets = DetectorTimingOffset.objects.filter( summary__station__number=station_number, summary__date__gte=FIRSTDATE, - summary__date__lte=datetime.date.today()) + summary__date__lte=datetime.date.today(), + ) data = offsets.values_list('summary__date', 'offset_1', 'offset_2', 'offset_3', 'offset_4') return data @@ -1070,7 +1098,8 @@ def get_station_timing_offsets(ref_station_number, station_number): ref_summary__station__number=ref_station_number, summary__station__number=station_number, summary__date__gte=FIRSTDATE, - summary__date__lte=datetime.date.today()) + summary__date__lte=datetime.date.today(), + ) data = offsets.values_list('summary__date', 'offset', 'error') return data @@ -1079,17 +1108,18 @@ def get_station_timing_offsets(ref_station_number, station_number): def get_gpslocations(configs): """Get all valid GPS locations from the configs""" - gps = [(config.gps_latitude, config.gps_longitude, config.gps_altitude) - for config in configs - if config.gps_latitude != 0. and config.gps_longitude != 0.] + gps = [ + (config.gps_latitude, config.gps_longitude, config.gps_altitude) + for config in configs + if config.gps_latitude != 0.0 and config.gps_longitude != 0.0 + ] return gps def create_plot_object(x_values, y_series, x_label, y_label): if type(y_series[0]) not in [list, tuple]: y_series = [y_series] - data = [[[xv, yv] for xv, yv in zip(x_values, y_values) if yv is not None] - for y_values in y_series] + data = [[[xv, yv] for xv, yv in zip(x_values, y_values) if yv is not None] for y_values in y_series] plot_object = {'data': data, 'x_label': x_label, 'y_label': y_label} return plot_object @@ -1102,12 +1132,11 @@ def stations_with_data(): weather or shower, between 2004 and now. """ - return (Station.objects - .filter(Q(summaries__num_events__isnull=False) - | Q(summaries__num_weather__isnull=False), - summaries__date__gte=FIRSTDATE, - summaries__date__lte=datetime.date.today()) - .distinct()) + return Station.objects.filter( + Q(summaries__num_events__isnull=False) | Q(summaries__num_weather__isnull=False), + summaries__date__gte=FIRSTDATE, + summaries__date__lte=datetime.date.today(), + ).distinct() def station_has_config(station): diff --git a/publicdb/urls.py b/publicdb/urls.py index c2c2b6ea..506b74f6 100644 --- a/publicdb/urls.py +++ b/publicdb/urls.py @@ -6,9 +6,7 @@ urlpatterns = [ path('', RedirectView.as_view(url='show/stations', permanent=False)), - path('robots.txt', TemplateView.as_view(template_name='robots.txt', content_type='text/plain')), - path('api/', include('publicdb.api.urls')), path('show/', include('publicdb.status_display.urls')), path('maps/', include('publicdb.maps.urls')), @@ -17,10 +15,7 @@ path('software-updates/', include('publicdb.updates.urls')), path('raw_data/', include('publicdb.raw_data.urls', namespace='raw_data')), path('data/', include('publicdb.raw_data.urls')), - path('config/datastore', create_datastore_config, name='datatore_config'), - path('keys//', keys, name='keys'), - path('admin/', admin.site.urls), ] diff --git a/pyproject.toml b/pyproject.toml index 5c46a1d0..89bd1d2a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,4 +1,10 @@ [tool.black] target-version = ['py310'] -line-length = 110 +line-length = 120 skip-string-normalization = true +extend-exclude = ''' +( + /migrations/ + | /templates/ +) +''' diff --git a/scripts/contactlist.py b/scripts/contactlist.py index 625edc21..c15b24cb 100644 --- a/scripts/contactlist.py +++ b/scripts/contactlist.py @@ -8,6 +8,7 @@ os.environ['DJANGO_SETTINGS_MODULE'] = 'publicdb.settings' import django + django.setup() from publicdb.inforecords.models import * diff --git a/scripts/download_test_datastore.py b/scripts/download_test_datastore.py index ad54e3ec..7b55cdc4 100644 --- a/scripts/download_test_datastore.py +++ b/scripts/download_test_datastore.py @@ -27,6 +27,7 @@ from sapphire.publicdb import download_data import django + django.setup() from django.conf import settings @@ -45,7 +46,7 @@ def main(): def test_for_datastore_directory(): - print("Checking for datastore path at {datastore_path} ...",) + print("Checking for datastore path at {datastore_path} ...", end=' ') if not os.path.exists(datastore_path): raise RuntimeError("Datastore path cannot be found!") else: @@ -82,9 +83,7 @@ def download_and_store_station_data(f, station, date, get_blobs=True): cluster = station.cluster.main_cluster() station_group = get_or_create_station_group(f, cluster, station.number) - download_data(f, station_group, station.number, - start, end, - get_blobs=get_blobs) + download_data(f, station_group, station.number, start, end, get_blobs=get_blobs) def open_or_create_file(data_dir, date): @@ -124,8 +123,7 @@ def get_or_create_cluster_group(file, cluster): try: cluster = file.get_node(hisparc, node_name) except tables.NoSuchNodeError: - cluster = file.create_group(hisparc, node_name, - f'HiSPARC cluster {cluster} data') + cluster = file.create_group(hisparc, node_name, f'HiSPARC cluster {cluster} data') file.flush() return cluster @@ -144,8 +142,7 @@ def get_or_create_station_group(file, cluster, station_number): try: station = file.get_node(cluster, node_name) except tables.NoSuchNodeError: - station = file.create_group(cluster, node_name, - f'HiSPARC station {station_number} data') + station = file.create_group(cluster, node_name, f'HiSPARC station {station_number} data') file.flush() return station diff --git a/scripts/vpn-xmlrpc-client.py b/scripts/vpn-xmlrpc-client.py index c6eb5538..8573e0f8 100644 --- a/scripts/vpn-xmlrpc-client.py +++ b/scripts/vpn-xmlrpc-client.py @@ -10,10 +10,7 @@ vpn_server = ServerProxy('http://localhost:8001') print(vpn_server.system.listMethods()) print(vpn_server.create_key('sciencepark501', 'client', '192.168.0.1')) -print(vpn_server.register_hosts_ip([ - ('nikhef1', '192.168.0.1'), - ('nikhef2', '192.168.0.2') -])) +print(vpn_server.register_hosts_ip([('nikhef1', '192.168.0.1'), ('nikhef2', '192.168.0.2')])) zip = base64.b64decode(s.get_key('sciencepark501', 'client')) with open('/tmp/test.zip', 'w') as file: file.write(zip) diff --git a/setup.cfg b/setup.cfg index b1b6a918..dad80209 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,7 +12,7 @@ exclude = [isort] profile = black -line_length = 110 +line_length = 120 known_django = django known_hisparc = diff --git a/tests/factories/providers.py b/tests/factories/providers.py index 1d0fd492..df68ad4d 100644 --- a/tests/factories/providers.py +++ b/tests/factories/providers.py @@ -13,7 +13,7 @@ def make_urlsafe(value): class DataProvider(BaseProvider): """Provider for histogram and dataset data""" - def float(self, min=0., max=100., ndigits=6): + def float(self, min=0.0, max=100.0, ndigits=6): return round(self.generator.random.uniform(a=min, b=max), ndigits) def int_list(self, n=24, **kwargs): diff --git a/tests/test_analysissessions/test_management.py b/tests/test_analysissessions/test_management.py index 2422bbc7..ac68a855 100644 --- a/tests/test_analysissessions/test_management.py +++ b/tests/test_analysissessions/test_management.py @@ -14,15 +14,25 @@ def setUp(self): cluster = ClusterFactory(number=0, country__number=0) # Newly created - self.new = SessionRequestFactory(session_confirmed=False, session_pending=True, session_created=False, cluster=cluster) + self.new = SessionRequestFactory( + session_confirmed=False, session_pending=True, session_created=False, cluster=cluster + ) # Email confirmed - self.confirmed = SessionRequestFactory(session_confirmed=True, session_pending=True, session_created=False, cluster=cluster) + self.confirmed = SessionRequestFactory( + session_confirmed=True, session_pending=True, session_created=False, cluster=cluster + ) # Selected for create session - self.selected = SessionRequestFactory(session_confirmed=False, session_pending=True, session_created=False, cluster=cluster) + self.selected = SessionRequestFactory( + session_confirmed=False, session_pending=True, session_created=False, cluster=cluster + ) # Create session started - self.started = SessionRequestFactory(session_confirmed=False, session_pending=False, session_created=False, cluster=cluster) + self.started = SessionRequestFactory( + session_confirmed=False, session_pending=False, session_created=False, cluster=cluster + ) # Session created - self.created = SessionRequestFactory(session_confirmed=False, session_pending=False, session_created=True, cluster=cluster) + self.created = SessionRequestFactory( + session_confirmed=False, session_pending=False, session_created=True, cluster=cluster + ) @patch('publicdb.analysissessions.models.SessionRequest.create_session') def test_createsessions(self, mock_create_session): diff --git a/tests/test_analysissessions/test_models.py b/tests/test_analysissessions/test_models.py index e18588fb..de38d858 100644 --- a/tests/test_analysissessions/test_models.py +++ b/tests/test_analysissessions/test_models.py @@ -25,18 +25,22 @@ def test_no_longer_in_progress(self): analysis_session = AnalysisSessionFactory( session_request__cluster=self.cluster, starts=datetime.datetime.utcnow() + datetime.timedelta(days=3), - ends=datetime.datetime.utcnow() + datetime.timedelta(days=7)) + ends=datetime.datetime.utcnow() + datetime.timedelta(days=7), + ) self.assertFalse(analysis_session.in_progress()) def test_not_yet_in_progress(self): analysis_session = AnalysisSessionFactory( session_request__cluster=self.cluster, starts=datetime.datetime.utcnow() - datetime.timedelta(days=7), - ends=datetime.datetime.utcnow() - datetime.timedelta(days=3)) + ends=datetime.datetime.utcnow() - datetime.timedelta(days=3), + ) self.assertFalse(analysis_session.in_progress()) def test_hash(self): - self.assertEqual(hashlib.md5(self.analysis_session.slug.encode('utf-8')).hexdigest(), self.analysis_session.hash) + self.assertEqual( + hashlib.md5(self.analysis_session.slug.encode('utf-8')).hexdigest(), self.analysis_session.hash + ) def test_str(self): self.assertEqual(self.analysis_session.title, str(self.analysis_session)) @@ -67,7 +71,8 @@ def setUp(self): def test_str(self): self.assertEqual( f'{self.analyzed_coincidence.coincidence} - {self.analyzed_coincidence.student}', - str(self.analyzed_coincidence)) + str(self.analyzed_coincidence), + ) class TestSessionRequest(TestCase): diff --git a/tests/test_histograms/test_checks.py b/tests/test_histograms/test_checks.py index f36cdf31..9df972ea 100644 --- a/tests/test_histograms/test_checks.py +++ b/tests/test_histograms/test_checks.py @@ -9,7 +9,6 @@ class TestChecks(TestCase): - @patch('publicdb.histograms.checks.check_for_new_events_and_update_flags') def test_check_for_updates(self, mock_flags): """The check function is called if previous check has finished""" @@ -44,7 +43,7 @@ def test_check_for_new_events_and_update_flags_creates_summaries(self, mock_even 'satellites': 0, 'singles_old': 86400, 'config': 1, - 'events': 168 + 'events': 168, } } } @@ -80,17 +79,22 @@ def test_check_for_new_events_and_update_flags_updates_existing_summary(self, mo 'satellites': 0, 'singles_old': 86400, 'config': 1, - 'events': 168 + 'events': 168, } } } self.setup_station() summary = histograms_factories.SummaryFactory( - station=self.station, date=date(2017, 1, 1), - needs_update_events=False, num_events=100, - needs_update_weather=False, num_weather=None, - needs_update_config=False, num_config=None, - needs_update_singles=False, num_singles=10, + station=self.station, + date=date(2017, 1, 1), + needs_update_events=False, + num_events=100, + needs_update_weather=False, + num_weather=None, + needs_update_config=False, + num_config=None, + needs_update_singles=False, + num_singles=10, ) state = Mock(check_last_run=datetime(2004, 1, 1, 1, 0, 0)) diff --git a/tests/test_histograms/test_datastore.py b/tests/test_histograms/test_datastore.py index 98ccba40..aba9c248 100644 --- a/tests/test_histograms/test_datastore.py +++ b/tests/test_histograms/test_datastore.py @@ -27,11 +27,12 @@ def test_check_for_new_events(self): 'satellites': 0, 'singles_old': 86400, 'config': 1, - 'events': 168 + 'events': 168, } } }, - event_summary) + event_summary, + ) def test_get_stations(self): """Get all stations with a node in the test data""" @@ -41,7 +42,10 @@ def test_get_stations(self): def test_get_data_path(self): """Get all stations with a node in the test data""" - self.assertEqual(join(settings.DATASTORE_PATH, '2017/1/2017_1_1.h5'), datastore.get_data_path(datetime.date(2017, 1, 1))) + self.assertEqual( + join(settings.DATASTORE_PATH, '2017/1/2017_1_1.h5'), + datastore.get_data_path(datetime.date(2017, 1, 1)), + ) def test_get_config_messages(self): """Get all stations with a node in the test data""" diff --git a/tests/test_histograms/test_esd.py b/tests/test_histograms/test_esd.py index bd52bc20..7d87871b 100644 --- a/tests/test_histograms/test_esd.py +++ b/tests/test_histograms/test_esd.py @@ -11,16 +11,20 @@ @override_settings(ESD_PATH=join(dirname(__file__), '../data/esd')) class TestESD(TestCase): - def setup_station(self): cluster = inforecords_factories.ClusterFactory(name='Amsterdam', number=0, country__number=0) self.station = inforecords_factories.StationFactory(number=501, cluster=cluster) self.summary = histograms_factories.SummaryFactory( - station=self.station, date=datetime.date(2017, 1, 1), - needs_update_events=False, num_events=168, - needs_update_weather=False, num_weather=60, - needs_update_config=False, num_config=1, - needs_update_singles=False, num_singles=301, + station=self.station, + date=datetime.date(2017, 1, 1), + needs_update_events=False, + num_events=168, + needs_update_weather=False, + num_weather=60, + needs_update_config=False, + num_config=1, + needs_update_singles=False, + num_singles=301, needs_update=False, ) diff --git a/tests/test_histograms/test_jobs.py b/tests/test_histograms/test_jobs.py index 7176f8de..d9226a58 100644 --- a/tests/test_histograms/test_jobs.py +++ b/tests/test_histograms/test_jobs.py @@ -56,11 +56,16 @@ def test_perform_update_tasks(self): self.setup_station() summary = histograms_factories.SummaryFactory( - station=self.station, date=date(2017, 1, 1), - needs_update_events=True, num_events=168, - needs_update_weather=True, num_weather=60, - needs_update_config=True, num_config=None, - needs_update_singles=True, num_singles=301, + station=self.station, + date=date(2017, 1, 1), + needs_update_events=True, + num_events=168, + needs_update_weather=True, + num_weather=60, + needs_update_config=True, + num_config=None, + needs_update_singles=True, + num_singles=301, needs_update=True, ) diff --git a/tests/test_histograms/test_models.py b/tests/test_histograms/test_models.py index 0c66a0cc..243c1a99 100644 --- a/tests/test_histograms/test_models.py +++ b/tests/test_histograms/test_models.py @@ -84,7 +84,8 @@ def setUp(self): self.station = inforecords_factories.StationFactory(number=9, cluster__number=0, cluster__country__number=0) self.summary = histograms_factories.SummaryFactory(station=self.station, date=date(2016, 1, 12)) self.configuration = histograms_factories.ConfigurationFactory( - summary=self.summary, timestamp=datetime.combine(self.summary.date, time(10, 11, 20))) + summary=self.summary, timestamp=datetime.combine(self.summary.date, time(10, 11, 20)) + ) def test_str(self): self.assertEqual('9 - 2016-01-12 10:11:20', str(self.configuration)) @@ -144,19 +145,27 @@ def setUp(self): self.summary_date = histograms_factories.SummaryFactory(station=self.station, date=date(2010, 2, 13)) def test_clean(self): - offset = histograms_factories.StationTimingOffsetFactory.build(ref_summary=self.ref_summary, summary=self.summary) + offset = histograms_factories.StationTimingOffsetFactory.build( + ref_summary=self.ref_summary, summary=self.summary + ) offset.clean() def test_clean_same_station(self): - offset = histograms_factories.StationTimingOffsetFactory.build(ref_summary=self.ref_summary, summary=self.ref_summary) + offset = histograms_factories.StationTimingOffsetFactory.build( + ref_summary=self.ref_summary, summary=self.ref_summary + ) with self.assertRaisesMessage(ValidationError, 'stations'): offset.clean() - offset = histograms_factories.StationTimingOffsetFactory.build(ref_summary=self.ref_summary, summary=self.ref_summary_date) + offset = histograms_factories.StationTimingOffsetFactory.build( + ref_summary=self.ref_summary, summary=self.ref_summary_date + ) with self.assertRaisesMessage(ValidationError, 'stations'): offset.clean() def test_clean_different_date(self): - offset = histograms_factories.StationTimingOffsetFactory.build(ref_summary=self.ref_summary, summary=self.summary_date) + offset = histograms_factories.StationTimingOffsetFactory.build( + ref_summary=self.ref_summary, summary=self.summary_date + ) with self.assertRaisesMessage(ValidationError, 'summary dates'): offset.clean() diff --git a/tests/test_inforecords/test_models.py b/tests/test_inforecords/test_models.py index 9ae4c56f..977df258 100644 --- a/tests/test_inforecords/test_models.py +++ b/tests/test_inforecords/test_models.py @@ -21,8 +21,9 @@ def test_type_with_contact(self): def test_type_with_station(self): contact_info = inforecords_factories.ContactInformationFactory() - inforecords_factories.StationFactory(number=1, cluster__number=0, cluster__country__number=0, - contactinformation=contact_info) + inforecords_factories.StationFactory( + number=1, cluster__number=0, cluster__country__number=0, contactinformation=contact_info + ) self.assertEqual('Station', contact_info.type) def test_contact_owner(self): @@ -36,15 +37,17 @@ def test_contact_owner_with_contact(self): def test_contact_owner_with_station(self): contact_info = inforecords_factories.ContactInformationFactory() - station = inforecords_factories.StationFactory(number=1, cluster__number=0, cluster__country__number=0, - contactinformation=contact_info) + station = inforecords_factories.StationFactory( + number=1, cluster__number=0, cluster__country__number=0, contactinformation=contact_info + ) self.assertEqual(str(station), contact_info.contact_owner) def test_contact_owner_with_contact_and_station(self): contact_info = inforecords_factories.ContactInformationFactory() contact = inforecords_factories.ContactFactory(contactinformation=contact_info) - station = inforecords_factories.StationFactory(number=1, cluster__number=0, cluster__country__number=0, - contactinformation=contact_info) + station = inforecords_factories.StationFactory( + number=1, cluster__number=0, cluster__country__number=0, contactinformation=contact_info + ) self.assertEqual(f'{contact}, {station}', contact_info.contact_owner) def test_contact_owner_with_multiple_contacts(self): @@ -56,8 +59,8 @@ def test_contact_owner_with_multiple_contacts(self): def test_str(self): contact_info = inforecords_factories.ContactInformationFactory() self.assertEqual( - ' '.join([contact_info.city, contact_info.street_1, contact_info.email_work]), - str(contact_info)) + ' '.join([contact_info.city, contact_info.street_1, contact_info.email_work]), str(contact_info) + ) class TestContact(TestCase): diff --git a/tests/test_raw_data/test_date_generator.py b/tests/test_raw_data/test_date_generator.py index fdf58c28..e49f573a 100644 --- a/tests/test_raw_data/test_date_generator.py +++ b/tests/test_raw_data/test_date_generator.py @@ -23,11 +23,9 @@ def test_stop_day_after_start(self): def test_stop_days_after_start(self): stop = self.start + timedelta(days=3) self.assertEqual( - [self.start, - self.start + timedelta(days=1), - self.start + timedelta(days=2), - stop], - list(daterange(self.start, stop))) + [self.start, self.start + timedelta(days=1), self.start + timedelta(days=2), stop], + list(daterange(self.start, stop)), + ) class TestSingleDayRanges(SimpleTestCase): @@ -35,32 +33,25 @@ def setUp(self): self.start = datetime(2010, 3, 1, 5, 20, 13) def test_stop_is_start(self): - self.assertEqual( - [(self.start, self.start)], - list(single_day_ranges(self.start, self.start)) - ) + self.assertEqual([(self.start, self.start)], list(single_day_ranges(self.start, self.start))) def test_stop_before_start(self): stop = self.start - timedelta(days=7) - self.assertEqual( - [(self.start, stop)], - list(single_day_ranges(self.start, stop)) - ) + self.assertEqual([(self.start, stop)], list(single_day_ranges(self.start, stop))) def test_stop_end_of_same_day_as_start(self): stop = self.start.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) - self.assertEqual( - [(self.start, stop)], - list(single_day_ranges(self.start, stop)) - ) + self.assertEqual([(self.start, stop)], list(single_day_ranges(self.start, stop))) def test_stop_days_after_start(self): stop = self.start + timedelta(days=3) midnight = self.start.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) self.assertEqual( - [(self.start, midnight), - (midnight, midnight + timedelta(days=1)), - (midnight + timedelta(days=1), midnight + timedelta(days=2)), - (midnight + timedelta(days=2), stop)], - list(single_day_ranges(self.start, stop)) + [ + (self.start, midnight), + (midnight, midnight + timedelta(days=1)), + (midnight + timedelta(days=1), midnight + timedelta(days=2)), + (midnight + timedelta(days=2), stop), + ], + list(single_day_ranges(self.start, stop)), ) diff --git a/tests/test_raw_data/test_forms.py b/tests/test_raw_data/test_forms.py index 9a915d70..74df1e06 100644 --- a/tests/test_raw_data/test_forms.py +++ b/tests/test_raw_data/test_forms.py @@ -8,25 +8,44 @@ class TestDataDownloadForm(TestCase): - def setUp(self): # Required models cluster = inforecords_factories.ClusterFactory(name='Amsterdam', number=0, country__number=0) self.station = inforecords_factories.StationFactory(name='Nikhef', number=501, cluster=cluster) self.summary = histograms_factories.SummaryFactory( - station=self.station, date=datetime.date(2017, 1, 1), - needs_update_events=False, num_events=168, - needs_update_weather=False, num_weather=60, - needs_update_config=False, num_config=1, - needs_update_singles=False, num_singles=301, + station=self.station, + date=datetime.date(2017, 1, 1), + needs_update_events=False, + num_events=168, + needs_update_weather=False, + num_weather=60, + needs_update_config=False, + num_config=1, + needs_update_singles=False, + num_singles=301, needs_update=False, ) def test_clean_valid(self): valid_form_data = [ - {'data_type': 'events', 'station_events': self.station.id, 'start': '2017-1-1', 'end': '2017-1-2'}, - {'data_type': 'weather', 'station_weather': self.station.id, 'start': '2017-1-1', 'end': '2017-1-2'}, - {'data_type': 'singles', 'station_singles': self.station.id, 'start': '2017-1-1', 'end': '2017-1-2'}, + { + 'data_type': 'events', + 'station_events': self.station.id, + 'start': '2017-1-1', + 'end': '2017-1-2', + }, + { + 'data_type': 'weather', + 'station_weather': self.station.id, + 'start': '2017-1-1', + 'end': '2017-1-2', + }, + { + 'data_type': 'singles', + 'station_singles': self.station.id, + 'start': '2017-1-1', + 'end': '2017-1-2', + }, {'data_type': 'lightning', 'lightning_type': 0, 'start': '2014-10-1', 'end': '2014-11-4'}, ] for data in valid_form_data: @@ -35,8 +54,18 @@ def test_clean_valid(self): def test_clean_invalid(self): invalid_form_data = [ - {'data_type': 'events', 'station_weather': self.station.id, 'start': '2017-1-1', 'end': '2017-1-2'}, - {'data_type': 'weather', 'station_events': self.station.id, 'start': '2017-1-1', 'end': '2017-1-2'}, + { + 'data_type': 'events', + 'station_weather': self.station.id, + 'start': '2017-1-1', + 'end': '2017-1-2', + }, + { + 'data_type': 'weather', + 'station_events': self.station.id, + 'start': '2017-1-1', + 'end': '2017-1-2', + }, {'data_type': 'lightning', 'lightning_type': 10, 'start': '2014-10-1', 'end': '2014-11-4'}, {'data_type': 'events', 'station_events': self.station.id}, ] @@ -46,18 +75,22 @@ def test_clean_invalid(self): class TestCoincidenceDownloadForm(TestCase): - def setUp(self): # Required models self.cluster = inforecords_factories.ClusterFactory(name='Amsterdam', number=0, country__number=0) self.station = inforecords_factories.StationFactory(name='Nikhef', number=501, cluster=self.cluster) self.station2 = inforecords_factories.StationFactory(name='Nikhef2', number=502, cluster=self.cluster) self.summary = histograms_factories.SummaryFactory( - station=self.station, date=datetime.date(2017, 1, 1), - needs_update_events=False, num_events=168, - needs_update_weather=False, num_weather=60, - needs_update_config=False, num_config=1, - needs_update_singles=False, num_singles=301, + station=self.station, + date=datetime.date(2017, 1, 1), + needs_update_events=False, + num_events=168, + needs_update_weather=False, + num_weather=60, + needs_update_config=False, + num_config=1, + needs_update_singles=False, + num_singles=301, needs_update=False, ) @@ -65,8 +98,20 @@ def test_clean_valid(self): valid_form_data = [ {'filter_by': 'network', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}, {'filter_by': 'stations', 'stations': '501,502', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}, - {'filter_by': 'stations', 'stations': '[502, 501]', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}, - {'filter_by': 'cluster', 'cluster': self.cluster.id, 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}, + { + 'filter_by': 'stations', + 'stations': '[502, 501]', + 'start': '2017-1-1', + 'end': '2017-1-2', + 'n': 2, + }, + { + 'filter_by': 'cluster', + 'cluster': self.cluster.id, + 'start': '2017-1-1', + 'end': '2017-1-2', + 'n': 2, + }, ] for data in valid_form_data: form = CoincidenceDownloadForm(data) @@ -74,13 +119,65 @@ def test_clean_valid(self): def test_clean_invalid(self): invalid_form_data = [ - ('cluster', 'invalid_choice', {'filter_by': 'cluster', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}), - ('stations', 'required', {'filter_by': 'stations', 'stations': '', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}), - ('stations', 'incorrect_entry', {'filter_by': 'stations', 'stations': '501;102', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}), - ('stations', 'too_few', {'filter_by': 'stations', 'stations': '501', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}), - ('stations', 'too_many', {'filter_by': 'stations', 'stations': (',501' * 32)[1:], 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}), - ('stations', 'invalid_choices', {'filter_by': 'stations', 'stations': '501,501', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}), - ('stations', 'invalid_choices', {'filter_by': 'stations', 'stations': '501,100001', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}), + ( + 'cluster', + 'invalid_choice', + {'filter_by': 'cluster', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}, + ), + ( + 'stations', + 'required', + {'filter_by': 'stations', 'stations': '', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}, + ), + ( + 'stations', + 'incorrect_entry', + { + 'filter_by': 'stations', + 'stations': '501;102', + 'start': '2017-1-1', + 'end': '2017-1-2', + 'n': 2, + }, + ), + ( + 'stations', + 'too_few', + {'filter_by': 'stations', 'stations': '501', 'start': '2017-1-1', 'end': '2017-1-2', 'n': 2}, + ), + ( + 'stations', + 'too_many', + { + 'filter_by': 'stations', + 'stations': (',501' * 32)[1:], + 'start': '2017-1-1', + 'end': '2017-1-2', + 'n': 2, + }, + ), + ( + 'stations', + 'invalid_choices', + { + 'filter_by': 'stations', + 'stations': '501,501', + 'start': '2017-1-1', + 'end': '2017-1-2', + 'n': 2, + }, + ), + ( + 'stations', + 'invalid_choices', + { + 'filter_by': 'stations', + 'stations': '501,100001', + 'start': '2017-1-1', + 'end': '2017-1-2', + 'n': 2, + }, + ), ] for field, error_code, data in invalid_form_data: form = CoincidenceDownloadForm(data) diff --git a/tests/test_raw_data/test_views.py b/tests/test_raw_data/test_views.py index 71b5742f..3f7df743 100644 --- a/tests/test_raw_data/test_views.py +++ b/tests/test_raw_data/test_views.py @@ -10,17 +10,21 @@ @override_settings(ESD_PATH=join(dirname(__file__), '../data/esd')) class TestDownload(TestCase): - def setUp(self): # Required models cluster = inforecords_factories.ClusterFactory(name='Amsterdam', number=0, country__number=0) self.station = inforecords_factories.StationFactory(name='Nikhef', number=501, cluster=cluster) self.summary = histograms_factories.SummaryFactory( - station=self.station, date=datetime.date(2017, 1, 1), - needs_update_events=False, num_events=168, - needs_update_weather=False, num_weather=60, - needs_update_config=False, num_config=1, - needs_update_singles=False, num_singles=301, + station=self.station, + date=datetime.date(2017, 1, 1), + needs_update_events=False, + num_events=168, + needs_update_weather=False, + num_weather=60, + needs_update_config=False, + num_config=1, + needs_update_singles=False, + num_singles=301, needs_update=False, ) diff --git a/tests/test_station_layout/test_models.py b/tests/test_station_layout/test_models.py index de3a3c86..f2d6cb9b 100644 --- a/tests/test_station_layout/test_models.py +++ b/tests/test_station_layout/test_models.py @@ -25,6 +25,6 @@ def test_does_not_have_four_detectors(self): detector_4_radius=None, detector_4_alpha=None, detector_4_height=None, - detector_4_beta=None + detector_4_beta=None, ) self.assertFalse(layout.has_four_detectors) diff --git a/tests/test_status_display/test_views.py b/tests/test_status_display/test_views.py index 951789f9..c3e7ae4e 100644 --- a/tests/test_status_display/test_views.py +++ b/tests/test_status_display/test_views.py @@ -93,7 +93,7 @@ def assert_context_contains(self, expected_context, context): def test_network_histograms(self): factories = [ histograms_factories.CoincidencetimeHistogramFactory, - histograms_factories.CoincidencenumberHistogramFactory + histograms_factories.CoincidencenumberHistogramFactory, ] for factory in factories: with self.subTest(factory=factory): @@ -109,7 +109,7 @@ def test_daily_histograms(self): factories = [ histograms_factories.EventtimeHistogramFactory, histograms_factories.AzimuthHistogramFactory, - histograms_factories.ZenithHistogramFactory + histograms_factories.ZenithHistogramFactory, ] for factory in factories: with self.subTest(factory=factory): @@ -127,7 +127,7 @@ def test_multi_daily_histograms(self): histograms_factories.PulseintegralHistogramFactory, histograms_factories.PulseheightHistogramFactory, histograms_factories.SingleslowHistogramFactory, - histograms_factories.SingleshighHistogramFactory + histograms_factories.SingleshighHistogramFactory, ] for factory in factories: with self.subTest(factory=factory): @@ -207,7 +207,7 @@ def test_station_offsets(self): kwargs = { 'ref_station_number': min(other_station.number, self.station.number), - 'station_number': max(other_station.number, self.station.number) + 'station_number': max(other_station.number, self.station.number), } self.get_tsv(reverse('status:source:station_offsets', kwargs=kwargs)) diff --git a/tests/test_updates/test_views.py b/tests/test_updates/test_views.py index f95d2eb7..5f4c4214 100644 --- a/tests/test_updates/test_views.py +++ b/tests/test_updates/test_views.py @@ -37,9 +37,7 @@ def test_check_querystring_admin_update(self): self.assertEqual(200, response.status_code) data = parse_qs(response.content.decode('utf-8')) - self.assertEqual( - {'mustUpdate': ['2'], 'newVersionAdmin': ['2'], 'urlAdmin': [admin_update.update.url]}, - data) + self.assertEqual({'mustUpdate': ['2'], 'newVersionAdmin': ['2'], 'urlAdmin': [admin_update.update.url]}, data) def test_check_querystring_user_update(self): kwargs = {'queue': 'hisparc'} @@ -50,9 +48,7 @@ def test_check_querystring_user_update(self): self.assertEqual(200, response.status_code) data = parse_qs(response.content.decode('utf-8')) - self.assertEqual( - {'mustUpdate': ['1'], 'newVersionUser': ['2'], 'urlUser': [user_update.update.url]}, - data) + self.assertEqual({'mustUpdate': ['1'], 'newVersionUser': ['2'], 'urlUser': [user_update.update.url]}, data) def test_check_querystring_admin_and_user_update(self): kwargs = {'queue': 'hisparc'} @@ -65,10 +61,15 @@ def test_check_querystring_admin_and_user_update(self): data = parse_qs(response.content.decode('utf-8')) self.assertEqual( - {'mustUpdate': ['3'], - 'newVersionUser': ['2'], 'urlUser': [user_update.update.url], - 'newVersionAdmin': ['2'], 'urlAdmin': [admin_update.update.url]}, - data) + { + 'mustUpdate': ['3'], + 'newVersionUser': ['2'], + 'urlUser': [user_update.update.url], + 'newVersionAdmin': ['2'], + 'urlAdmin': [admin_update.update.url], + }, + data, + ) def test_check_querystring_missing_versions(self): kwargs = {'queue': 'hisparc'}