From a7f317c297e4d7c1e5d36c2c1fa367b123f7d07f Mon Sep 17 00:00:00 2001 From: glass-ships Date: Mon, 5 Aug 2024 15:36:06 -0400 Subject: [PATCH 01/28] Add expiration field to DataRun model --- .gitignore | 4 + README.md | 87 ++++++++++++------ docs/Makefile | 4 +- docs/conf.py | 8 +- docs/developer/config_for_local_use.rst | 81 ++++++++++------- src/live_data_server/__init__.py | 0 src/live_data_server/config.py | 4 + src/live_data_server/plots/admin.py | 1 - src/live_data_server/plots/models.py | 38 ++++---- src/live_data_server/plots/view_util.py | 25 ++++-- src/live_data_server/plots/views.py | 29 ++++-- tests/conftest.py | 2 - tests/test_expiration.py | 114 ++++++++++++++++++++++++ tests/test_post_get.py | 100 +++++++++++---------- 14 files changed, 349 insertions(+), 148 deletions(-) create mode 100644 src/live_data_server/__init__.py create mode 100644 src/live_data_server/config.py create mode 100644 tests/test_expiration.py diff --git a/.gitignore b/.gitignore index 1dbc687..000bd40 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ +/.envrc +/docker-compose.yml +_version.py + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/README.md b/README.md index fb6b97e..cb0ab6d 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,25 @@ -## live_data_server -Data server for data plots +# live_data_server + [![Documentation Status](https://readthedocs.org/projects/livedata-ornl/badge/?version=latest)](https://livedata-ornl.readthedocs.io/en/latest/?badge=latest) [![codecov](https://codecov.io/gh/neutrons/live_data_server/graph/badge.svg?token=niQ0AWldBd)](https://codecov.io/gh/neutrons/live_data_server) +Data server for data plots. + Developer documentation at https://livedata-ornl.readthedocs.io/en/latest/ -## Contributing +## Development + +### Dependencies + +- [Conda](https://conda.io/projects/conda/en/latest/user-guide/install/index.html) / [Mamba]() +- [Docker](https://docs.docker.com/engine/install/) and [Docker Compose](https://docs.docker.com/compose/install/) +- [direnv](https://direnv.net/) (optional) + +### Setup for Local Development + +Clone the repository and `cd` into the project directory. Create a conda environment `livedata`, containing all the dependencies ```python @@ -15,56 +27,75 @@ conda env create -f environment.yml conda activate livedata ``` -### Containerization +To deploy this application locally, you will need to set a number of environment variables, +for example (bash): -To deploy this application locally for development you will need to assign values to the following secrets -as environment variables defined in the shell's environment: ```bash - DATABASE_NAME - DATABASE_USER - DATABASE_PASS - DATABASE_HOST - DATABASE_PORT - LIVE_PLOT_SECRET_KEY +export DATABASE_NAME=livedatadb +export DATABASE_USER=livedatauser +export DATABASE_PASS=livedatapass +export DATABASE_HOST=db +export DATABASE_PORT=5432 +export LIVE_PLOT_SECRET_KEY="secretKey" + +# These need to be set for `pytest`, +# but are not used in the docker compose +export DJANGO_SUPERUSER_USERNAME=$DATABASE_USER +export DJANGO_SUPERUSER_PASSWORD=$DATABASE_PASS ``` -It is recommended to save these variables into an `.envrc` file which can be managed by -[envdir](https://direnv.net/). + +**Notes**: + +- The `DATABASE_PORT` _must_ be set to `5432`, as Postgres is configured to listen on that port by default. + If you need to change the port, you will need to modify the `docker-compose.yml` file accordingly. + +- It is recommended to save these variables into an `.envrc` file which can be managed by [direnv](https://direnv.net/). + direnv will automatically load the variables when you `cd` into the project directory. After the secrets are set, type in the terminal shell: + ```bash make local/docker/up ``` -This command will copy `config/docker-compose.envlocal.yml` into `docker-compose.yml` before composing -all the services. -Type `make help` to learn about other macros available as make targets. -For instance, `make docker/pruneall` will stop all containers, then remove -all containers, images, networks, and volumes. +This command will copy `config/docker-compose.envlocal.yml` into `docker-compose.yml` before composing all the services. + +Type `make help` to learn about other macros available as make targets. +For instance, `make docker/pruneall` will stop all containers, then remove all containers, images, networks, and volumes. -## Test & Verification +### Testing -After starting the services with `make local/docker/up`, run the test that will post and get data: +After the setup, with the server running, you can test your setup by running `pytest`: ```bash -DJANGO_SUPERUSER_USERNAME=***** DJANGO_SUPERUSER_PASSWORD=***** pytest tests/test_post_get.py +pytest tests/test_post_get.py +# or simply +pytest ``` -Environment variables `DJANGO_SUPERUSER_USERNAME` and `DJANGO_SUPERUSER_PASSWORD` are defined in -file `docker-compose.envlocal.yml`. You need to either pass these variables with the correct values or have -them exported to the shell where `pytest` is to be run. +**NOTE:** +The environment variables `DJANGO_SUPERUSER_USERNAME` and `DJANGO_SUPERUSER_PASSWORD` are defined in the `docker-compose.envlocal.yml` file, but `pytest` does not read this file. +You must either have them exported to the shell where `pytest` is to be run, as described above, or modify the `pytest` command to include them, e.g.: + +```bash +DJANGO_SUPERUSER_USERNAME=***** DJANGO_SUPERUSER_PASSWORD=***** pytest +``` ## Deployment to the Test Environment + - Repository managing the provision for deployment: - + hardware and networking for deployment: https://code.ornl.gov/sns-hfir-scse/infrastructure/neutrons-test-environment/-/blob/main/terraform/servers.tf#L85-97 - + configuration independent of source code changes: https://code.ornl.gov/sns-hfir-scse/infrastructure/neutrons-test-environment/-/blob/main/ansible/testfixture02-test.yaml + - hardware and networking for deployment: https://code.ornl.gov/sns-hfir-scse/infrastructure/neutrons-test-environment/-/blob/main/terraform/servers.tf#L85-97 + - configuration independent of source code changes: https://code.ornl.gov/sns-hfir-scse/infrastructure/neutrons-test-environment/-/blob/main/ansible/testfixture02-test.yaml - Repository managing deployment of the source to the provisioned hardware: https://code.ornl.gov/sns-hfir-scse/deployments/livedata-deploy - ## Building the Documentation + Additional documentation is available in the `docs` directory. To build the documentation in your local machine, run the following command from within directory `docs/`: + ```bash make html ``` + The documentation will be built in the `docs/_build/html` directory. To view the documentation, open the `docs/_build/html/index.html` file in a web browser. diff --git a/docs/Makefile b/docs/Makefile index 5117fbf..bedb955 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -10,11 +10,11 @@ BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + @$(SPHINXBUILD) -M help $(SOURCEDIR) $(BUILDDIR) $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + $(SPHINXBUILD) -M $@ $(SOURCEDIR) $(BUILDDIR) $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py index 6de6959..3ed893f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -4,10 +4,12 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html import os import sys +from importlib import metadata -sys.path.insert(0, os.path.abspath("../live_data_server")) +# sys.path.insert(0, os.path.abspath("../live_data_server")) +# from live_data_server import __version__ as release -from live_data_server import __version__ as release +release = metadata.version("live_data_server") # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information @@ -67,7 +69,7 @@ autosummary_generate = True # Napoleon settings -napoleon_google_docstring = False +napoleon_google_docstring = True napoleon_numpy_docstring = True # Add any paths that contain custom static files (such as style sheets) here, diff --git a/docs/developer/config_for_local_use.rst b/docs/developer/config_for_local_use.rst index bf9a9bd..32d311e 100644 --- a/docs/developer/config_for_local_use.rst +++ b/docs/developer/config_for_local_use.rst @@ -2,58 +2,79 @@ Configure for Local Debugging and Development ============================================= -If you just follow the steps in the readme then you will be able to start the service -but will not be able to interact with the api at all beyond receiving 400 errors. +Clone the repository and ``cd`` into the project directory. -In order to enable the api you will need to tweak some config settings. -(Maybe in the future it would be worth including these as dev versions) +Create a conda environment ``livedata``, containing all the dependencies -docker-compose.yml ------------------- +.. code-block:: python -.. code-block:: yaml + conda env create -f environment.yml + conda activate livedata - # replace this - image: live_data:dev - # with this - build: - network: host - context: . +To deploy this application locally, you will need to set a number of environment variables, +for example (bash): +.. code-block:: bash -This will build from our local source instead of pulling an image online. + export DATABASE_NAME=livedatadb + export DATABASE_USER=livedatauser + export DATABASE_PASS=livedatapass + export DATABASE_HOST=db + export DATABASE_PORT=5432 + export LIVE_PLOT_SECRET_KEY="secretKey" + # These need to be set for `pytest`, + # but are not used in the docker compose + export DJANGO_SUPERUSER_USERNAME=$DATABASE_USER + export DJANGO_SUPERUSER_PASSWORD=$DATABASE_PASS -Settings.py ------------ -.. code-block:: python +*NOTES*: - # replace this - ALLOWED_HOSTS = ['livedata.sns.gov'] - # with this - ALLOWED_HOSTS = ['*'] +- The ``DATABASE_PORT`` **must** be set to ``5432``, as Postgres is configured to listen on that port by default. + If you need to change the port, you will need to modify the ``docker-compose.yml`` file accordingly. +- It is recommended to save these variables into an ``.envrc`` file which can be managed by `direnv `_. + direnv will automatically load the variables when you ``cd`` into the project directory. + +After the secrets are set, you can start the server with: + +.. code-block:: bash + make local/docker/up -This setting is meant for production where its actually hosted on livedata.sns.gov. -Changing it to a wildcard lets us ping it as local host and not get a 400 error. +This command will copy ``config/docker-compose.envlocal.yml`` into ``./docker-compose.yml`` before composing all the services. + +| Run ``make help`` to learn about other macros available as make targets. +| For instance, ``make docker/pruneall`` will stop all containers, then remove all containers, images, networks, and volumes. + +Testing +------- + +After the setup, with the server running, you can test your setup with ``pytest``: + +.. code-block:: bash + # run all tests + pytest + # or run a specific test + pytest tests/test_post_get.py -You should now be able to interact with the api on `localhost:9999` but there's a little more. -You need to add a user that you can use for your post requests, +*NOTE:* +The environment variables ``DJANGO_SUPERUSER_USERNAME`` and ``DJANGO_SUPERUSER_PASSWORD`` are defined in the ``docker-compose.envlocal.yml`` file, but ``pytest`` does not read this file. +You must either have them exported to the shell where ``pytest`` is to be run, as described above, or modify the ``pytest`` command to include them, e.g.: .. code-block:: bash - docker exec -it live_data_server_livedata_1 /bin/bash - cd live_data_server - python manage.py createsuperuser + DJANGO_SUPERUSER_USERNAME=***** DJANGO_SUPERUSER_PASSWORD=***** pytest +API +--- I personally recommend using `Postman `_ when interacting with the api. -If you do, set the request body to `form-data`! +If you do, set the request body to ``form-data``! -Some relevant form-data field keys: +Some relevant form-data field keys: #. file #. username diff --git a/src/live_data_server/__init__.py b/src/live_data_server/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/live_data_server/config.py b/src/live_data_server/config.py new file mode 100644 index 0000000..76e1ec5 --- /dev/null +++ b/src/live_data_server/config.py @@ -0,0 +1,4 @@ +from datetime import timedelta + +# Default expiration time for runs/data (3 years) +DEFAULT_EXPIRATION_TIME = timedelta(days=365 * 3) diff --git a/src/live_data_server/plots/admin.py b/src/live_data_server/plots/admin.py index a1e54b9..07accd3 100644 --- a/src/live_data_server/plots/admin.py +++ b/src/live_data_server/plots/admin.py @@ -1,5 +1,4 @@ from django.contrib import admin - from plots.models import DataRun, Instrument, PlotData diff --git a/src/live_data_server/plots/models.py b/src/live_data_server/plots/models.py index 8a53c8c..d5f2d7f 100644 --- a/src/live_data_server/plots/models.py +++ b/src/live_data_server/plots/models.py @@ -1,20 +1,19 @@ """ Plot data models """ - import logging import sys +from datetime import timedelta from django.db import models +from django.utils import timezone DATA_TYPES = {"json": 0, "html": 1, "div": 1} DATA_TYPE_INFO = {0: {"name": "json"}, 1: {"name": "html"}} class Instrument(models.Model): - """ - Table of instruments - """ + """Table of instruments""" name = models.CharField(max_length=128, unique=True) run_id_type = models.IntegerField(default=0) @@ -24,25 +23,30 @@ def __str__(self): class DataRun(models.Model): - """ - Table of runs + """Table of runs. + + A run is a collection of plots that are all related to a single data set. + + Attributes: + run_number (int): Run number + run_id (str): Optional run identifier + instrument (Instrument): Instrument object + created_on (datetime): Timestamp + expiration_date (datetime): Expiration date """ run_number = models.IntegerField() - # Optional free-form run identifier run_id = models.TextField() - instrument = models.ForeignKey(Instrument, on_delete=models.deletion.CASCADE) created_on = models.DateTimeField("Timestamp", auto_now_add=True) + expiration_date = models.DateTimeField("Expires", default=timezone.now() + timedelta(days=(365 * 3))) def __str__(self): return f"{self.instrument}_{self.run_number}_{self.run_id}" class PlotData(models.Model): - """ - Table of plot data. This data can either be json or html - """ + """Table of plot data. This data can either be json or html""" ## DataRun this run status belongs to data_run = models.ForeignKey(DataRun, on_delete=models.deletion.CASCADE) @@ -60,8 +64,8 @@ def __str__(self): return str(self.data_run) def is_data_type_valid(self, data_type): - """ - Verify that a given data type matches the stored data + """Verify that a given data type matches the stored data + @param data_type: data type to check """ try: @@ -73,8 +77,8 @@ def is_data_type_valid(self, data_type): @classmethod def get_data_type_from_data(cls, data): - """ - Inspect the data to guess what type it is. + """Inspect the data to guess what type it is. + @param data: block of text to store """ if data.startswith(" Date: Mon, 5 Aug 2024 19:36:36 +0000 Subject: [PATCH 02/28] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- README.md | 12 ++++++------ docs/conf.py | 2 -- docs/developer/config_for_local_use.rst | 14 +++++++------- src/live_data_server/plots/models.py | 1 + src/live_data_server/plots/views.py | 5 ++--- tests/test_post_get.py | 1 - 6 files changed, 16 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index cb0ab6d..001db5b 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ conda env create -f environment.yml conda activate livedata ``` -To deploy this application locally, you will need to set a number of environment variables, +To deploy this application locally, you will need to set a number of environment variables, for example (bash): ```bash @@ -46,10 +46,10 @@ export DJANGO_SUPERUSER_PASSWORD=$DATABASE_PASS **Notes**: -- The `DATABASE_PORT` _must_ be set to `5432`, as Postgres is configured to listen on that port by default. +- The `DATABASE_PORT` _must_ be set to `5432`, as Postgres is configured to listen on that port by default. If you need to change the port, you will need to modify the `docker-compose.yml` file accordingly. -- It is recommended to save these variables into an `.envrc` file which can be managed by [direnv](https://direnv.net/). +- It is recommended to save these variables into an `.envrc` file which can be managed by [direnv](https://direnv.net/). direnv will automatically load the variables when you `cd` into the project directory. After the secrets are set, type in the terminal shell: @@ -60,7 +60,7 @@ make local/docker/up This command will copy `config/docker-compose.envlocal.yml` into `docker-compose.yml` before composing all the services. -Type `make help` to learn about other macros available as make targets. +Type `make help` to learn about other macros available as make targets. For instance, `make docker/pruneall` will stop all containers, then remove all containers, images, networks, and volumes. ### Testing @@ -73,8 +73,8 @@ pytest tests/test_post_get.py pytest ``` -**NOTE:** -The environment variables `DJANGO_SUPERUSER_USERNAME` and `DJANGO_SUPERUSER_PASSWORD` are defined in the `docker-compose.envlocal.yml` file, but `pytest` does not read this file. +**NOTE:** +The environment variables `DJANGO_SUPERUSER_USERNAME` and `DJANGO_SUPERUSER_PASSWORD` are defined in the `docker-compose.envlocal.yml` file, but `pytest` does not read this file. You must either have them exported to the shell where `pytest` is to be run, as described above, or modify the `pytest` command to include them, e.g.: ```bash diff --git a/docs/conf.py b/docs/conf.py index 3ed893f..1e6f2f8 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -2,8 +2,6 @@ # # For the full list of built-in configuration values, see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html -import os -import sys from importlib import metadata # sys.path.insert(0, os.path.abspath("../live_data_server")) diff --git a/docs/developer/config_for_local_use.rst b/docs/developer/config_for_local_use.rst index 32d311e..29283de 100644 --- a/docs/developer/config_for_local_use.rst +++ b/docs/developer/config_for_local_use.rst @@ -11,7 +11,7 @@ Create a conda environment ``livedata``, containing all the dependencies conda env create -f environment.yml conda activate livedata -To deploy this application locally, you will need to set a number of environment variables, +To deploy this application locally, you will need to set a number of environment variables, for example (bash): .. code-block:: bash @@ -31,10 +31,10 @@ for example (bash): *NOTES*: -- The ``DATABASE_PORT`` **must** be set to ``5432``, as Postgres is configured to listen on that port by default. +- The ``DATABASE_PORT`` **must** be set to ``5432``, as Postgres is configured to listen on that port by default. If you need to change the port, you will need to modify the ``docker-compose.yml`` file accordingly. -- It is recommended to save these variables into an ``.envrc`` file which can be managed by `direnv `_. +- It is recommended to save these variables into an ``.envrc`` file which can be managed by `direnv `_. direnv will automatically load the variables when you ``cd`` into the project directory. After the secrets are set, you can start the server with: @@ -45,7 +45,7 @@ After the secrets are set, you can start the server with: This command will copy ``config/docker-compose.envlocal.yml`` into ``./docker-compose.yml`` before composing all the services. -| Run ``make help`` to learn about other macros available as make targets. +| Run ``make help`` to learn about other macros available as make targets. | For instance, ``make docker/pruneall`` will stop all containers, then remove all containers, images, networks, and volumes. Testing @@ -60,8 +60,8 @@ After the setup, with the server running, you can test your setup with ``pytest` # or run a specific test pytest tests/test_post_get.py -*NOTE:* -The environment variables ``DJANGO_SUPERUSER_USERNAME`` and ``DJANGO_SUPERUSER_PASSWORD`` are defined in the ``docker-compose.envlocal.yml`` file, but ``pytest`` does not read this file. +*NOTE:* +The environment variables ``DJANGO_SUPERUSER_USERNAME`` and ``DJANGO_SUPERUSER_PASSWORD`` are defined in the ``docker-compose.envlocal.yml`` file, but ``pytest`` does not read this file. You must either have them exported to the shell where ``pytest`` is to be run, as described above, or modify the ``pytest`` command to include them, e.g.: .. code-block:: bash @@ -74,7 +74,7 @@ API I personally recommend using `Postman `_ when interacting with the api. If you do, set the request body to ``form-data``! -Some relevant form-data field keys: +Some relevant form-data field keys: #. file #. username diff --git a/src/live_data_server/plots/models.py b/src/live_data_server/plots/models.py index d5f2d7f..0e9ed42 100644 --- a/src/live_data_server/plots/models.py +++ b/src/live_data_server/plots/models.py @@ -1,6 +1,7 @@ """ Plot data models """ + import logging import sys from datetime import timedelta diff --git a/src/live_data_server/plots/views.py b/src/live_data_server/plots/views.py index d31839a..2919839 100644 --- a/src/live_data_server/plots/views.py +++ b/src/live_data_server/plots/views.py @@ -2,9 +2,9 @@ Definition of views """ -from datetime import timedelta import json import logging +from datetime import timedelta from django.conf import settings from django.contrib.auth import authenticate, login @@ -21,8 +21,7 @@ # from plots.models import DataRun, Instrument, PlotData from .models import DataRun, Instrument, PlotData - -DEFAULT_EXPIRATION_TIME = 365 * 3 # 3 years +DEFAULT_EXPIRATION_TIME = 365 * 3 # 3 years def check_credentials(fn): diff --git a/tests/test_post_get.py b/tests/test_post_get.py index c78bcde..bf6dc2b 100644 --- a/tests/test_post_get.py +++ b/tests/test_post_get.py @@ -36,7 +36,6 @@ def setup_class(cls): conn.close() def test_post_request(self, data_server): - # load html plot as autoreduce service filename = "reflectivity.html" files = {"file": open(data_server.path_to(filename)).read()} From 0fbd74bef4891d0f3c9a060c0d025575366c3bf2 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 09:12:57 -0400 Subject: [PATCH 03/28] lint --- README.md | 12 ++++++------ docs/conf.py | 2 -- docs/developer/config_for_local_use.rst | 14 +++++++------- pyproject.toml | 2 ++ src/live_data_server/plots/models.py | 1 + src/live_data_server/plots/views.py | 5 ++--- tests/test_post_get.py | 1 - 7 files changed, 18 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index cb0ab6d..001db5b 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ conda env create -f environment.yml conda activate livedata ``` -To deploy this application locally, you will need to set a number of environment variables, +To deploy this application locally, you will need to set a number of environment variables, for example (bash): ```bash @@ -46,10 +46,10 @@ export DJANGO_SUPERUSER_PASSWORD=$DATABASE_PASS **Notes**: -- The `DATABASE_PORT` _must_ be set to `5432`, as Postgres is configured to listen on that port by default. +- The `DATABASE_PORT` _must_ be set to `5432`, as Postgres is configured to listen on that port by default. If you need to change the port, you will need to modify the `docker-compose.yml` file accordingly. -- It is recommended to save these variables into an `.envrc` file which can be managed by [direnv](https://direnv.net/). +- It is recommended to save these variables into an `.envrc` file which can be managed by [direnv](https://direnv.net/). direnv will automatically load the variables when you `cd` into the project directory. After the secrets are set, type in the terminal shell: @@ -60,7 +60,7 @@ make local/docker/up This command will copy `config/docker-compose.envlocal.yml` into `docker-compose.yml` before composing all the services. -Type `make help` to learn about other macros available as make targets. +Type `make help` to learn about other macros available as make targets. For instance, `make docker/pruneall` will stop all containers, then remove all containers, images, networks, and volumes. ### Testing @@ -73,8 +73,8 @@ pytest tests/test_post_get.py pytest ``` -**NOTE:** -The environment variables `DJANGO_SUPERUSER_USERNAME` and `DJANGO_SUPERUSER_PASSWORD` are defined in the `docker-compose.envlocal.yml` file, but `pytest` does not read this file. +**NOTE:** +The environment variables `DJANGO_SUPERUSER_USERNAME` and `DJANGO_SUPERUSER_PASSWORD` are defined in the `docker-compose.envlocal.yml` file, but `pytest` does not read this file. You must either have them exported to the shell where `pytest` is to be run, as described above, or modify the `pytest` command to include them, e.g.: ```bash diff --git a/docs/conf.py b/docs/conf.py index 3ed893f..1e6f2f8 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -2,8 +2,6 @@ # # For the full list of built-in configuration values, see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html -import os -import sys from importlib import metadata # sys.path.insert(0, os.path.abspath("../live_data_server")) diff --git a/docs/developer/config_for_local_use.rst b/docs/developer/config_for_local_use.rst index 32d311e..29283de 100644 --- a/docs/developer/config_for_local_use.rst +++ b/docs/developer/config_for_local_use.rst @@ -11,7 +11,7 @@ Create a conda environment ``livedata``, containing all the dependencies conda env create -f environment.yml conda activate livedata -To deploy this application locally, you will need to set a number of environment variables, +To deploy this application locally, you will need to set a number of environment variables, for example (bash): .. code-block:: bash @@ -31,10 +31,10 @@ for example (bash): *NOTES*: -- The ``DATABASE_PORT`` **must** be set to ``5432``, as Postgres is configured to listen on that port by default. +- The ``DATABASE_PORT`` **must** be set to ``5432``, as Postgres is configured to listen on that port by default. If you need to change the port, you will need to modify the ``docker-compose.yml`` file accordingly. -- It is recommended to save these variables into an ``.envrc`` file which can be managed by `direnv `_. +- It is recommended to save these variables into an ``.envrc`` file which can be managed by `direnv `_. direnv will automatically load the variables when you ``cd`` into the project directory. After the secrets are set, you can start the server with: @@ -45,7 +45,7 @@ After the secrets are set, you can start the server with: This command will copy ``config/docker-compose.envlocal.yml`` into ``./docker-compose.yml`` before composing all the services. -| Run ``make help`` to learn about other macros available as make targets. +| Run ``make help`` to learn about other macros available as make targets. | For instance, ``make docker/pruneall`` will stop all containers, then remove all containers, images, networks, and volumes. Testing @@ -60,8 +60,8 @@ After the setup, with the server running, you can test your setup with ``pytest` # or run a specific test pytest tests/test_post_get.py -*NOTE:* -The environment variables ``DJANGO_SUPERUSER_USERNAME`` and ``DJANGO_SUPERUSER_PASSWORD`` are defined in the ``docker-compose.envlocal.yml`` file, but ``pytest`` does not read this file. +*NOTE:* +The environment variables ``DJANGO_SUPERUSER_USERNAME`` and ``DJANGO_SUPERUSER_PASSWORD`` are defined in the ``docker-compose.envlocal.yml`` file, but ``pytest`` does not read this file. You must either have them exported to the shell where ``pytest`` is to be run, as described above, or modify the ``pytest`` command to include them, e.g.: .. code-block:: bash @@ -74,7 +74,7 @@ API I personally recommend using `Postman `_ when interacting with the api. If you do, set the request body to ``form-data``! -Some relevant form-data field keys: +Some relevant form-data field keys: #. file #. username diff --git a/pyproject.toml b/pyproject.toml index a53bfa1..76868ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,8 @@ python_files = ["test*.py"] [tool.ruff] line-length = 120 + +[tool.ruff.lint] select = ["A", "ARG","ASYNC","BLE","C90", "E", "F", "I", "N", "UP032", "W"] # Add additional 3rd party tool configuration here as needed diff --git a/src/live_data_server/plots/models.py b/src/live_data_server/plots/models.py index d5f2d7f..0e9ed42 100644 --- a/src/live_data_server/plots/models.py +++ b/src/live_data_server/plots/models.py @@ -1,6 +1,7 @@ """ Plot data models """ + import logging import sys from datetime import timedelta diff --git a/src/live_data_server/plots/views.py b/src/live_data_server/plots/views.py index d31839a..2919839 100644 --- a/src/live_data_server/plots/views.py +++ b/src/live_data_server/plots/views.py @@ -2,9 +2,9 @@ Definition of views """ -from datetime import timedelta import json import logging +from datetime import timedelta from django.conf import settings from django.contrib.auth import authenticate, login @@ -21,8 +21,7 @@ # from plots.models import DataRun, Instrument, PlotData from .models import DataRun, Instrument, PlotData - -DEFAULT_EXPIRATION_TIME = 365 * 3 # 3 years +DEFAULT_EXPIRATION_TIME = 365 * 3 # 3 years def check_credentials(fn): diff --git a/tests/test_post_get.py b/tests/test_post_get.py index c78bcde..bf6dc2b 100644 --- a/tests/test_post_get.py +++ b/tests/test_post_get.py @@ -36,7 +36,6 @@ def setup_class(cls): conn.close() def test_post_request(self, data_server): - # load html plot as autoreduce service filename = "reflectivity.html" files = {"file": open(data_server.path_to(filename)).read()} From 9bc990e2314e036f68f34823dc688428616a9c31 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 12:55:12 -0400 Subject: [PATCH 04/28] makefile checks for correct docker compose command --- .github/dependabot.yml | 10 +++++++ .github/workflows/unittest.yml | 37 +++++++++++++++++++++---- .pre-commit-config.yaml | 2 +- Makefile | 16 ++++++----- README.md | 2 +- docs/developer/config_for_local_use.rst | 2 +- 6 files changed, 53 insertions(+), 16 deletions(-) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..dfd0e30 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +# Set update schedule for GitHub Actions + +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every week + interval: "weekly" diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index d47a15f..36d5457 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -1,11 +1,11 @@ -name: unit-test +name: Run unit tests on: workflow_dispatch: pull_request: push: branches: [next, qa, main] - tags: ['v*'] + tags: ["v*"] jobs: linux: @@ -13,6 +13,7 @@ jobs: defaults: run: shell: bash -l {0} + env: DATABASE_NAME: livedatadb DATABASE_USER: livedatauser @@ -23,29 +24,51 @@ jobs: DJANGO_SUPERUSER_USERNAME: livedatauser DJANGO_SUPERUSER_PASSWORD: livedatapass COVERAGE_RUN: coverage run -m + steps: - - uses: actions/checkout@v4 - - uses: conda-incubator/setup-miniconda@v3 + - name: Checkout code + uses: actions/checkout@v4 + + - name: Cache conda + uses: actions/cache@v4 + env: + # Increase this value to reset cache if etc/example-environment.yml has not changed + CACHE_NUMBER: 0 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('etc/example-environment.yml') }} + + - name: Setup Conda environment + uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true channels: conda-forge,defaults mamba-version: "*" environment-file: environment.yml - cache-environment-key: ${{ runner.os }}-env-${{ hashFiles('**/environment.yml') }} - cache-downloads-key: ${{ runner.os }}-downloads-${{ hashFiles('**/environment.yml') }} + use-only-tar-bz2: true # IMPORTANT: This needs to be set for caching to work properly! + # These don't seem to be valid options for this action + # See: https://github.com/conda-incubator/setup-miniconda + # cache-environment-key: ${{ runner.os }}-env-${{ hashFiles('**/environment.yml') }} + # cache-downloads-key: ${{ runner.os }}-downloads-${{ hashFiles('**/environment.yml') }} + - name: Start docker containers run: | cp ./config/docker-compose.envlocal.yml docker-compose.yml docker-compose up --build -d + - name: Sleep, wait for containers to start up run: sleep 30 + - name: Run unit tests run: python -m pytest tests/ + - name: Stop the coverage process # Stopping the coverage process allows the code coverage to be written to disk run: docker exec live_data_server_livedata_1 /bin/bash -c "pkill coverage" + - name: Copy code coverage out of docker container run: docker cp live_data_server_livedata_1:/var/www/livedata/app /tmp/ + - name: Combine and show code coverage shell: bash -l {0} run: | @@ -54,9 +77,11 @@ jobs: coverage xml cp coverage.xml $OLDPWD coverage report + - name: Bring down docker containers completely now # This will completely remove the containers run: docker-compose down + - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v4 with: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 96c2b83..6fb156b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.1 + rev: v0.5.6 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/Makefile b/Makefile index 29caf9c..49e9406 100644 --- a/Makefile +++ b/Makefile @@ -3,10 +3,12 @@ app_dir := live_data_server DJANGO_COMPATIBLE:=$(shell python -c "import django;t=0 if django.VERSION[0]<4 else 1; print(t)") DJANGO_VERSION:=$(shell python -c "import django;print(django.__version__)") -# command to run docker compose. change this to be what you have installed -# this can be overriden on the command line -# DOCKER_COMPOSE="docker compose" make docker/pruneall -DOCKER_COMPOSE ?= docker-compose +ifneq ($(shell docker compose version 2>/dev/null),) + DOCKER_COMPOSE=docker compose +else ifneq ($(shell docker-compose --version 2>/dev/null),) + DOCKER_COMPOSE=docker-compose +endif + help: # this nifty perl one-liner collects all comments headed by the double "#" symbols next to each target and recycles them as comments @@ -30,10 +32,10 @@ docker/pruneall: docker/compose/validate ## stop all containers, then remove al docker/compose/validate: ## validate the version of the docker-compose command. Exits quietly if valid. @./scripts/docker-compose_validate.sh $(DOCKER_COMPOSE) -local/docker/up: docker/compose/validate ## compose and start the service locally +docker/compose/local: docker/compose/validate ## compose and start the service locally \cp ./config/docker-compose.envlocal.yml docker-compose.yml $(DOCKER_COMPOSE) up --build - + .PHONY: check .PHONY: first_install .PHONY: help @@ -42,4 +44,4 @@ local/docker/up: docker/compose/validate ## compose and start the service locall .PHONY: webapp/core .PHONY: docker/compose/validate .PHONY: docker/pruneall -.PHONY: local/docker/up +.PHONY: docker/compose/local diff --git a/README.md b/README.md index 001db5b..c5eb77a 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,7 @@ export DJANGO_SUPERUSER_PASSWORD=$DATABASE_PASS After the secrets are set, type in the terminal shell: ```bash -make local/docker/up +make docker/compose/local ``` This command will copy `config/docker-compose.envlocal.yml` into `docker-compose.yml` before composing all the services. diff --git a/docs/developer/config_for_local_use.rst b/docs/developer/config_for_local_use.rst index 29283de..0fe12a6 100644 --- a/docs/developer/config_for_local_use.rst +++ b/docs/developer/config_for_local_use.rst @@ -41,7 +41,7 @@ After the secrets are set, you can start the server with: .. code-block:: bash - make local/docker/up + make docker/compose/local This command will copy ``config/docker-compose.envlocal.yml`` into ``./docker-compose.yml`` before composing all the services. From e2e8ede03766c2bba5d1646392b420609b39aa81 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 16:55:20 +0000 Subject: [PATCH 05/28] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 49e9406..831c368 100644 --- a/Makefile +++ b/Makefile @@ -35,7 +35,7 @@ docker/compose/validate: ## validate the version of the docker-compose command. docker/compose/local: docker/compose/validate ## compose and start the service locally \cp ./config/docker-compose.envlocal.yml docker-compose.yml $(DOCKER_COMPOSE) up --build - + .PHONY: check .PHONY: first_install .PHONY: help From 1fdc19e41fea1677b3d22b28e1dd96d72f720590 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 12:56:22 -0400 Subject: [PATCH 06/28] typo in path to environment.yml in github action --- .github/workflows/unittest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 36d5457..db2f5fc 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -36,7 +36,7 @@ jobs: CACHE_NUMBER: 0 with: path: ~/conda_pkgs_dir - key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('etc/example-environment.yml') }} + key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('environment.yml') }} - name: Setup Conda environment uses: conda-incubator/setup-miniconda@v3 From 350372418487e0e63862ea63699dc05cf3199b08 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 13:09:07 -0400 Subject: [PATCH 07/28] try commenting use only tar bz2 --- .github/workflows/unittest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index db2f5fc..c9fac43 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -45,7 +45,7 @@ jobs: channels: conda-forge,defaults mamba-version: "*" environment-file: environment.yml - use-only-tar-bz2: true # IMPORTANT: This needs to be set for caching to work properly! + # use-only-tar-bz2: true # IMPORTANT: This needs to be set for caching to work properly! # These don't seem to be valid options for this action # See: https://github.com/conda-incubator/setup-miniconda # cache-environment-key: ${{ runner.os }}-env-${{ hashFiles('**/environment.yml') }} From 14d5e915cf1c541d911b8699801739d848b899f2 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 13:14:29 -0400 Subject: [PATCH 08/28] try uncommenting use only tar bz2 --- .github/workflows/unittest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index c9fac43..db2f5fc 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -45,7 +45,7 @@ jobs: channels: conda-forge,defaults mamba-version: "*" environment-file: environment.yml - # use-only-tar-bz2: true # IMPORTANT: This needs to be set for caching to work properly! + use-only-tar-bz2: true # IMPORTANT: This needs to be set for caching to work properly! # These don't seem to be valid options for this action # See: https://github.com/conda-incubator/setup-miniconda # cache-environment-key: ${{ runner.os }}-env-${{ hashFiles('**/environment.yml') }} From b8a1711059c819b6ff26546e4a9c6d6ab1b30df7 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 13:15:59 -0400 Subject: [PATCH 09/28] try setting cache_number to 1 --- .github/workflows/unittest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index db2f5fc..5a9a80a 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -32,8 +32,8 @@ jobs: - name: Cache conda uses: actions/cache@v4 env: - # Increase this value to reset cache if etc/example-environment.yml has not changed - CACHE_NUMBER: 0 + # Increase this value to reset cache if example-environment.yml has not changed + CACHE_NUMBER: 1 with: path: ~/conda_pkgs_dir key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('environment.yml') }} From 283b6b8c35d119f5ba269820f36e4cc2eb13ae6a Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 13:17:47 -0400 Subject: [PATCH 10/28] remove caching step for now --- .github/workflows/unittest.yml | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 5a9a80a..a1fbf57 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -29,15 +29,6 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Cache conda - uses: actions/cache@v4 - env: - # Increase this value to reset cache if example-environment.yml has not changed - CACHE_NUMBER: 1 - with: - path: ~/conda_pkgs_dir - key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('environment.yml') }} - - name: Setup Conda environment uses: conda-incubator/setup-miniconda@v3 with: @@ -45,7 +36,6 @@ jobs: channels: conda-forge,defaults mamba-version: "*" environment-file: environment.yml - use-only-tar-bz2: true # IMPORTANT: This needs to be set for caching to work properly! # These don't seem to be valid options for this action # See: https://github.com/conda-incubator/setup-miniconda # cache-environment-key: ${{ runner.os }}-env-${{ hashFiles('**/environment.yml') }} From 5bb56bf5b35d9f0b177af582348272386c727897 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 13:23:59 -0400 Subject: [PATCH 11/28] add seemingly unnecessary check for docker compose command --- .github/workflows/unittest.yml | 9 ++++++++- test.sh | 10 ++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 test.sh diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index a1fbf57..7ab686f 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -44,7 +44,14 @@ jobs: - name: Start docker containers run: | cp ./config/docker-compose.envlocal.yml docker-compose.yml - docker-compose up --build -d + if [[ $(docker compose version 2>/dev/null) != "" ]]; then + DOCKER_COMPOSE="docker compose" + elif [[ $(docker-compose version 2>/dev/null) != "" ]]; then + DOCKER_COMPOSE="docker-compose" + else + echo "docker compose or docker-compose is not installed" && exit 1 + fi + $DOCKER_COMPOSE up --build -d - name: Sleep, wait for containers to start up run: sleep 30 diff --git a/test.sh b/test.sh new file mode 100644 index 0000000..85606a9 --- /dev/null +++ b/test.sh @@ -0,0 +1,10 @@ +if [[ $(docker compose version 2>/dev/null) != "" ]]; then + DOCKER_COMPOSE="docker compose" +elif [[ $(docker-compose version 2>/dev/null) != "" ]]; then + DOCKER_COMPOSE="docker-compose" +else + echo "docker compose or docker-compose is not installed" + exit 1 +fi + +$DOCKER_COMPOSE version From ff713488952bdbb906782159a58d8c66c13c6e53 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 13:37:43 -0400 Subject: [PATCH 12/28] fix wrong docker image in coverage steps --- .github/workflows/unittest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 7ab686f..102a568 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -61,10 +61,10 @@ jobs: - name: Stop the coverage process # Stopping the coverage process allows the code coverage to be written to disk - run: docker exec live_data_server_livedata_1 /bin/bash -c "pkill coverage" + run: docker exec live_data_server-livedata1 /bin/bash -c "pkill coverage" - name: Copy code coverage out of docker container - run: docker cp live_data_server_livedata_1:/var/www/livedata/app /tmp/ + run: docker cp live_data_server-livedata1:/var/www/livedata/app /tmp/ - name: Combine and show code coverage shell: bash -l {0} From 92fc810b6db73cbefdf4680d4bbcd6e673228efc Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 13:42:38 -0400 Subject: [PATCH 13/28] fix wrong docker image in coverage steps (again) --- .github/workflows/unittest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 102a568..ec8a6ba 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -61,10 +61,10 @@ jobs: - name: Stop the coverage process # Stopping the coverage process allows the code coverage to be written to disk - run: docker exec live_data_server-livedata1 /bin/bash -c "pkill coverage" + run: docker exec live_data_server-livedata-1 /bin/bash -c "pkill coverage" - name: Copy code coverage out of docker container - run: docker cp live_data_server-livedata1:/var/www/livedata/app /tmp/ + run: docker cp live_data_server-livedata-1:/var/www/livedata/app /tmp/ - name: Combine and show code coverage shell: bash -l {0} From ce533388c483ec710188f88ab55629b344621c3c Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 6 Aug 2024 13:48:54 -0400 Subject: [PATCH 14/28] docker-compose -> docker compose everywhere --- .github/workflows/unittest.yml | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index ec8a6ba..beed291 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -44,14 +44,7 @@ jobs: - name: Start docker containers run: | cp ./config/docker-compose.envlocal.yml docker-compose.yml - if [[ $(docker compose version 2>/dev/null) != "" ]]; then - DOCKER_COMPOSE="docker compose" - elif [[ $(docker-compose version 2>/dev/null) != "" ]]; then - DOCKER_COMPOSE="docker-compose" - else - echo "docker compose or docker-compose is not installed" && exit 1 - fi - $DOCKER_COMPOSE up --build -d + docker compose up --build -d - name: Sleep, wait for containers to start up run: sleep 30 @@ -77,7 +70,7 @@ jobs: - name: Bring down docker containers completely now # This will completely remove the containers - run: docker-compose down + run: docker compose down - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v4 From 450faa53b4024816cf715caf896638ab1c20700b Mon Sep 17 00:00:00 2001 From: glass-ships Date: Wed, 7 Aug 2024 13:43:38 -0400 Subject: [PATCH 15/28] remove try except, unsure what to catch apart from bare exception --- src/live_data_server/plots/admin.py | 9 ++++++- src/live_data_server/plots/urls.py | 2 ++ src/live_data_server/plots/views.py | 41 ++++++++++++++++++++++++++++- tests/test_expiration.py | 12 +++++++++ 4 files changed, 62 insertions(+), 2 deletions(-) diff --git a/src/live_data_server/plots/admin.py b/src/live_data_server/plots/admin.py index 07accd3..be4eebf 100644 --- a/src/live_data_server/plots/admin.py +++ b/src/live_data_server/plots/admin.py @@ -8,7 +8,14 @@ class PlotDataAdmin(admin.ModelAdmin): class DataRunAdmin(admin.ModelAdmin): - list_display = ("id", "run_number", "run_id", "instrument", "created_on") + list_display = ( + "id", + "run_number", + "run_id", + "instrument", + "created_on", + "expiration_date", + ) admin.site.register(DataRun, DataRunAdmin) diff --git a/src/live_data_server/plots/urls.py b/src/live_data_server/plots/urls.py index a0cba07..76df22d 100644 --- a/src/live_data_server/plots/urls.py +++ b/src/live_data_server/plots/urls.py @@ -16,4 +16,6 @@ ), re_path(r"^(?P[\w]+)/upload_user_data/$", views.upload_user_data, name="upload_user_data"), re_path(r"^(?P[\w]+)/list/$", views.get_data_list, name="get_data_list"), + re_path("get_all_runs", views.get_all_runs, name="get_all_runs"), + re_path("purge_expired", views.purge_expired, name="purge_expired"), ] diff --git a/src/live_data_server/plots/views.py b/src/live_data_server/plots/views.py index 2919839..ef2a8ae 100644 --- a/src/live_data_server/plots/views.py +++ b/src/live_data_server/plots/views.py @@ -99,7 +99,6 @@ def _store(request, instrument, run_id=None, as_user=False): @param run_id: run number @param as_user: if True, we will store as user data """ - # r = if "file" in request.FILES: raw_data = request.FILES["file"].read().decode("utf-8") data_type_default = PlotData.get_data_type_from_data(raw_data) @@ -162,3 +161,43 @@ def get_data_list(_, instrument): ) ) return JsonResponse(data_list, safe=False) + + +@csrf_exempt +@check_credentials +def get_all_runs(_): + """ + Get a list of all runs for all instruments/users + """ + data_list = [] + for item in DataRun.objects.all(): + timestamp_local = timezone.localtime(item.created_on) + timestamp_formatted = dateformat.DateFormat(timestamp_local).format(settings.DATETIME_FORMAT) + expiration_local = timezone.localtime(item.expiration_date) + expiration_formatted = dateformat.DateFormat(expiration_local).format(settings.DATETIME_FORMAT) + data_list.append( + dict( + id=item.id, + run_number=str(item.run_number), + run_id=item.run_id, + instrument=item.instrument.name, + timestamp=item.created_on.isoformat(), + created_on=timestamp_formatted, + expiration_date=expiration_formatted, + expired=True if expiration_local < timezone.now() else False, + ) + ) + return JsonResponse(data_list, safe=False) + + +@csrf_exempt +@check_credentials +def purge_expired(_): + """ + Delete all expired runs and related plots + """ + runs = DataRun.objects.all() + for run in runs: + if run.expiration_date < timezone.now(): + run.delete() + return HttpResponse() diff --git a/tests/test_expiration.py b/tests/test_expiration.py index 8167529..64932a6 100644 --- a/tests/test_expiration.py +++ b/tests/test_expiration.py @@ -109,6 +109,18 @@ def test_expiration_user(self, data_server): ) assert request.status_code == HTTP_OK + # check that expiration field for runs are marked correctly r = request.json() assert r[0]["expired"] is False assert r[1]["expired"] is True + + def test_deleting_expired(self): + """Test the purge_expired view""" + r = requests.post(f"{TEST_URL}/plots/purge_expired/", data=self.user_data) + assert r.status_code == HTTP_OK + + # check if all expired runs are deleted + r = requests.post(f"{TEST_URL}/plots/get_all_runs/", data=self.user_data) + runs = r.json() + assert len(runs) == 2 + assert all(run["expired"] is False for run in runs) From 6c942db65cee4fce93e81cf68cb30e02c529601f Mon Sep 17 00:00:00 2001 From: glass-ships Date: Wed, 7 Aug 2024 14:14:58 -0400 Subject: [PATCH 16/28] rename expired run data id just because --- tests/test_expiration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_expiration.py b/tests/test_expiration.py index 64932a6..d37a41b 100644 --- a/tests/test_expiration.py +++ b/tests/test_expiration.py @@ -96,7 +96,7 @@ def test_expiration_user(self, data_server): # create expired run expiration_date = datetime.now(tz=timezone.utc) - timedelta(days=365 * 3) - request_data["data_id"] = "reflectivity2.json" + request_data["data_id"] = "reflectivity_expired.json" request_data["expiration_date"] = expiration_date request = requests.post( f"{TEST_URL}/plots/{self.username}/upload_user_data/", data=request_data, files=files, verify=True From 5739c544558c7a0704e52197e31a3a9383789e6c Mon Sep 17 00:00:00 2001 From: glass-ships Date: Thu, 8 Aug 2024 10:28:49 -0400 Subject: [PATCH 17/28] trying management command, need to figure out tests --- Makefile | 7 + src/live_data_server/config.py | 4 - .../live_data_server/settings.py | 3 +- .../management/commands/purge_expired_data.py | 13 ++ test.sh | 10 -- tests/conftest.py | 8 +- tests/test_expiration2.py | 145 ++++++++++++++++++ 7 files changed, 171 insertions(+), 19 deletions(-) delete mode 100644 src/live_data_server/config.py create mode 100644 src/live_data_server/plots/management/commands/purge_expired_data.py delete mode 100644 test.sh create mode 100644 tests/test_expiration2.py diff --git a/Makefile b/Makefile index 831c368..0079a39 100644 --- a/Makefile +++ b/Makefile @@ -36,6 +36,13 @@ docker/compose/local: docker/compose/validate ## compose and start the service l \cp ./config/docker-compose.envlocal.yml docker-compose.yml $(DOCKER_COMPOSE) up --build +.PHONY: clean +clean: ## remove all local compiled Python files + rm -f `find . -type f -name '*.py[co]' ` \ + `find . -type f -name '_version.py'` + rm -rf `find . -name __pycache__` \ + .ruff_cache .pytest_cache *.egg-info + .PHONY: check .PHONY: first_install .PHONY: help diff --git a/src/live_data_server/config.py b/src/live_data_server/config.py deleted file mode 100644 index 76e1ec5..0000000 --- a/src/live_data_server/config.py +++ /dev/null @@ -1,4 +0,0 @@ -from datetime import timedelta - -# Default expiration time for runs/data (3 years) -DEFAULT_EXPIRATION_TIME = timedelta(days=365 * 3) diff --git a/src/live_data_server/live_data_server/settings.py b/src/live_data_server/live_data_server/settings.py index c125fb6..eb331e9 100644 --- a/src/live_data_server/live_data_server/settings.py +++ b/src/live_data_server/live_data_server/settings.py @@ -40,7 +40,8 @@ # Application definition INSTALLED_APPS = [ - "plots.apps.PlotsConfig", + # "plots.apps.PlotsConfig", + "plots", "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", diff --git a/src/live_data_server/plots/management/commands/purge_expired_data.py b/src/live_data_server/plots/management/commands/purge_expired_data.py new file mode 100644 index 0000000..689d2e0 --- /dev/null +++ b/src/live_data_server/plots/management/commands/purge_expired_data.py @@ -0,0 +1,13 @@ +from django.core.management.base import BaseCommand +from django.utils import timezone + +from ...models import DataRun + + +class Command(BaseCommand): + help = "Delete expired runs and related plots" + + def handle(self, *args, **options): # noqa: ARG002 + for item in DataRun.objects.all(): + if item.expiration_date < timezone.now(): + item.delete() diff --git a/test.sh b/test.sh deleted file mode 100644 index 85606a9..0000000 --- a/test.sh +++ /dev/null @@ -1,10 +0,0 @@ -if [[ $(docker compose version 2>/dev/null) != "" ]]; then - DOCKER_COMPOSE="docker compose" -elif [[ $(docker-compose version 2>/dev/null) != "" ]]; then - DOCKER_COMPOSE="docker-compose" -else - echo "docker compose or docker-compose is not installed" - exit 1 -fi - -$DOCKER_COMPOSE version diff --git a/tests/conftest.py b/tests/conftest.py index 09004a9..c684fd7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,7 @@ @pytest.fixture(scope="module") def data_server(): - r"""Object containing info and functionality for data files. + """Object containing info and functionality for data files. It assumes the data files are stored under directory `data/`, located under the same directory as this module. @@ -19,14 +19,14 @@ class _DataServe(object): @property def directory(self): - r"""Directory where to find the data files""" + """Directory where to find the data files""" return self._directory def path_to(self, basename): - r"""Absolute path to a data file""" + """Absolute path to a data file""" file_path = os.path.join(self._directory, basename) if not os.path.isfile(file_path): - raise IOError("File {basename} not found in data directory {self._directory}") + raise IOError(f"File {basename} not found in data directory {self._directory}") return file_path return _DataServe() diff --git a/tests/test_expiration2.py b/tests/test_expiration2.py new file mode 100644 index 0000000..d15fa71 --- /dev/null +++ b/tests/test_expiration2.py @@ -0,0 +1,145 @@ +import json +import os +from datetime import datetime, timedelta, timezone + +import psycopg +import pytest +import requests + +# from django.core.management import call_command +from django.test import Client + +TEST_URL = "http://127.0.0.1" +HTTP_OK = requests.status_codes.codes["OK"] +HTTP_UNAUTHORIZED = requests.status_codes.codes["unauthorized"] +HTTP_NOT_FOUND = requests.status_codes.codes["NOT_FOUND"] +HTTP_BAD_REQUEST = requests.status_codes.codes["BAD_REQUEST"] + +# @pytest.fixture +# def client(): +# return Client() +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "live_data_server.live_data_server.settings") +c = Client() + + +class TestLiveDataServer: + # authenticate with username and password + username = os.environ.get("DJANGO_SUPERUSER_USERNAME") + password = os.environ.get("DJANGO_SUPERUSER_PASSWORD") + user_data = {"username": username, "password": password} + + @classmethod + def setup_class(cls): + """Clean the database before running tests""" + conn = psycopg.connect( + dbname=os.environ.get("DATABASE_NAME"), + user=os.environ.get("DATABASE_USER"), + password=os.environ.get("DATABASE_PASS"), + port=os.environ.get("DATABASE_PORT"), + host="localhost", + ) + cur = conn.cursor() + cur.execute("DELETE FROM plots_plotdata") + cur.execute("DELETE FROM plots_datarun") + cur.execute("DELETE FROM plots_instrument") + conn.commit() + conn.close() + + def test_expiration_plot(self, data_server): # , client): + """Test the expiration field on DataRun model for upload_plot_data""" + # client = Client() + instrument = "TEST_INST" + + # request data + filename = "reflectivity.html" + files = {"file": open(data_server.path_to(filename)).read()} + request_data = { + **self.user_data, + "data_id": filename, + } + + # create a new run + run_id = 12345 + request = c.post( + f"{TEST_URL}/plots/{instrument}/{run_id}/upload_plot_data/", data=request_data, files=files, verify=True + ) + # request = requests.post( + # f"{TEST_URL}/plots/{instrument}/{run_id}/upload_plot_data/", data=request_data, files=files, verify=True + # ) + assert request.status_code == HTTP_OK + return + # create expired run + run_id += 1 + expiration_date = datetime.now(tz=timezone.utc) - timedelta(days=365 * 3) + request_data["expiration_date"] = expiration_date + request = requests.post( + f"{TEST_URL}/plots/{instrument}/{run_id}/upload_plot_data/", + data=request_data, + files=files, + verify=True, + ) + assert request.status_code == HTTP_OK + + request = requests.post( + f"{TEST_URL}/plots/{instrument}/list/", + data=self.user_data, + ) + assert request.status_code == HTTP_OK + + r = request.json() + assert r[0]["expired"] is False + assert r[1]["expired"] is True + + @pytest.mark.skip("Not implemented") + def test_expiration_user(self, data_server): + """Test the expiration field on DataRun model for upload_user_data""" + + filename = "reflectivity.json" + with open(data_server.path_to(filename), "r") as file_handle: + files = {"file": json.dumps(json.load(file_handle))} + request_data = { + **self.user_data, + "data_id": filename, + } + + # create a new run + request = requests.post( + f"{TEST_URL}/plots/{self.username}/upload_user_data/", data=request_data, files=files, verify=True + ) + assert request.status_code == HTTP_OK + + # create expired run + expiration_date = datetime.now(tz=timezone.utc) - timedelta(days=365 * 3) + request_data["data_id"] = "reflectivity_expired.json" + request_data["expiration_date"] = expiration_date + request = requests.post( + f"{TEST_URL}/plots/{self.username}/upload_user_data/", data=request_data, files=files, verify=True + ) + assert request.status_code == HTTP_OK + + request = requests.post( + f"{TEST_URL}/plots/{self.username}/list/", + data=self.user_data, + ) + assert request.status_code == HTTP_OK + + # check that expiration field for runs are marked correctly + r = request.json() + assert r[0]["expired"] is False + assert r[1]["expired"] is True + + @pytest.mark.skip("Not implemented") + def test_deleting_expired(self): + """Test the purge_expired view""" + # r = requests.post(f"{TEST_URL}/plots/purge_expired/", data=self.user_data) + # assert r.status_code == HTTP_OK + # call_command("purge_expired_data.py") + import subprocess + + subprocess.call(["python", "src/live_data_server/manage.py", "purge_expired_data"]) + + # check if all expired runs are deleted + r = requests.post(f"{TEST_URL}/plots/get_all_runs/", data=self.user_data) + runs = r.json() + assert len(runs) == 2 + assert all(run["expired"] is False for run in runs) From 3654b14f6b97ee1f5d0a256e2df52fcc40b8647e Mon Sep 17 00:00:00 2001 From: glass-ships Date: Thu, 8 Aug 2024 14:53:39 -0400 Subject: [PATCH 18/28] fix tests --- environment.yml | 3 +- .../live_data_server/settings.py | 4 +- .../management/commands/purge_expired_data.py | 7 +- src/live_data_server/plots/urls.py | 1 - src/live_data_server/plots/view_util.py | 10 ++ src/live_data_server/plots/views.py | 13 -- tests/test_expiration.py | 9 +- tests/test_expiration2.py | 145 ------------------ 8 files changed, 22 insertions(+), 170 deletions(-) delete mode 100644 tests/test_expiration2.py diff --git a/environment.yml b/environment.yml index 0a0f7ea..dc3301a 100644 --- a/environment.yml +++ b/environment.yml @@ -6,7 +6,8 @@ dependencies: - postgresql=16 - sphinx - sphinx_rtd_theme - - django=4.2 + # - django=4.2 + - django=5.1 - django-cors-headers=4.4 - psycopg=3.2 - gunicorn diff --git a/src/live_data_server/live_data_server/settings.py b/src/live_data_server/live_data_server/settings.py index eb331e9..1fe066c 100644 --- a/src/live_data_server/live_data_server/settings.py +++ b/src/live_data_server/live_data_server/settings.py @@ -137,9 +137,9 @@ USE_I18N = True -USE_L10N = True +USE_L10N = True # already set to True by default in 5.0 -USE_TZ = True +USE_TZ = True # already set to True by default in 5.0 # Static files (CSS, JavaScript, Images) diff --git a/src/live_data_server/plots/management/commands/purge_expired_data.py b/src/live_data_server/plots/management/commands/purge_expired_data.py index 689d2e0..e56ff4d 100644 --- a/src/live_data_server/plots/management/commands/purge_expired_data.py +++ b/src/live_data_server/plots/management/commands/purge_expired_data.py @@ -1,13 +1,10 @@ from django.core.management.base import BaseCommand -from django.utils import timezone -from ...models import DataRun +from ...view_util import purge_expired_runs class Command(BaseCommand): help = "Delete expired runs and related plots" def handle(self, *args, **options): # noqa: ARG002 - for item in DataRun.objects.all(): - if item.expiration_date < timezone.now(): - item.delete() + purge_expired_runs() diff --git a/src/live_data_server/plots/urls.py b/src/live_data_server/plots/urls.py index 76df22d..b49164c 100644 --- a/src/live_data_server/plots/urls.py +++ b/src/live_data_server/plots/urls.py @@ -17,5 +17,4 @@ re_path(r"^(?P[\w]+)/upload_user_data/$", views.upload_user_data, name="upload_user_data"), re_path(r"^(?P[\w]+)/list/$", views.get_data_list, name="get_data_list"), re_path("get_all_runs", views.get_all_runs, name="get_all_runs"), - re_path("purge_expired", views.purge_expired, name="purge_expired"), ] diff --git a/src/live_data_server/plots/view_util.py b/src/live_data_server/plots/view_util.py index 533cd3c..861cca6 100644 --- a/src/live_data_server/plots/view_util.py +++ b/src/live_data_server/plots/view_util.py @@ -190,3 +190,13 @@ def store_plot_data(instrument, run_id, data, data_type, expiration_date: dateti plot_data.data_type = data_type plot_data.timestamp = timezone.now() plot_data.save() + + +def purge_expired_runs(): + """ + Purge expired runs and related plots + """ + runs = DataRun.objects.all() + for run in runs: + if run.expiration_date < timezone.now(): + run.delete() diff --git a/src/live_data_server/plots/views.py b/src/live_data_server/plots/views.py index ef2a8ae..5f2de05 100644 --- a/src/live_data_server/plots/views.py +++ b/src/live_data_server/plots/views.py @@ -188,16 +188,3 @@ def get_all_runs(_): ) ) return JsonResponse(data_list, safe=False) - - -@csrf_exempt -@check_credentials -def purge_expired(_): - """ - Delete all expired runs and related plots - """ - runs = DataRun.objects.all() - for run in runs: - if run.expiration_date < timezone.now(): - run.delete() - return HttpResponse() diff --git a/tests/test_expiration.py b/tests/test_expiration.py index d37a41b..2e9a89e 100644 --- a/tests/test_expiration.py +++ b/tests/test_expiration.py @@ -1,5 +1,6 @@ import json import os +import subprocess from datetime import datetime, timedelta, timezone import psycopg @@ -116,10 +117,12 @@ def test_expiration_user(self, data_server): def test_deleting_expired(self): """Test the purge_expired view""" - r = requests.post(f"{TEST_URL}/plots/purge_expired/", data=self.user_data) - assert r.status_code == HTTP_OK + command = "docker exec -i live_data_server-livedata-1 bash -ic" + subcommand = "conda activate livedata && cd app && python manage.py purge_expired_data" + output = subprocess.check_output([*command.split(" "), subcommand]) + print(output) - # check if all expired runs are deleted + # Ensure the above ran and worked r = requests.post(f"{TEST_URL}/plots/get_all_runs/", data=self.user_data) runs = r.json() assert len(runs) == 2 diff --git a/tests/test_expiration2.py b/tests/test_expiration2.py deleted file mode 100644 index d15fa71..0000000 --- a/tests/test_expiration2.py +++ /dev/null @@ -1,145 +0,0 @@ -import json -import os -from datetime import datetime, timedelta, timezone - -import psycopg -import pytest -import requests - -# from django.core.management import call_command -from django.test import Client - -TEST_URL = "http://127.0.0.1" -HTTP_OK = requests.status_codes.codes["OK"] -HTTP_UNAUTHORIZED = requests.status_codes.codes["unauthorized"] -HTTP_NOT_FOUND = requests.status_codes.codes["NOT_FOUND"] -HTTP_BAD_REQUEST = requests.status_codes.codes["BAD_REQUEST"] - -# @pytest.fixture -# def client(): -# return Client() -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "live_data_server.live_data_server.settings") -c = Client() - - -class TestLiveDataServer: - # authenticate with username and password - username = os.environ.get("DJANGO_SUPERUSER_USERNAME") - password = os.environ.get("DJANGO_SUPERUSER_PASSWORD") - user_data = {"username": username, "password": password} - - @classmethod - def setup_class(cls): - """Clean the database before running tests""" - conn = psycopg.connect( - dbname=os.environ.get("DATABASE_NAME"), - user=os.environ.get("DATABASE_USER"), - password=os.environ.get("DATABASE_PASS"), - port=os.environ.get("DATABASE_PORT"), - host="localhost", - ) - cur = conn.cursor() - cur.execute("DELETE FROM plots_plotdata") - cur.execute("DELETE FROM plots_datarun") - cur.execute("DELETE FROM plots_instrument") - conn.commit() - conn.close() - - def test_expiration_plot(self, data_server): # , client): - """Test the expiration field on DataRun model for upload_plot_data""" - # client = Client() - instrument = "TEST_INST" - - # request data - filename = "reflectivity.html" - files = {"file": open(data_server.path_to(filename)).read()} - request_data = { - **self.user_data, - "data_id": filename, - } - - # create a new run - run_id = 12345 - request = c.post( - f"{TEST_URL}/plots/{instrument}/{run_id}/upload_plot_data/", data=request_data, files=files, verify=True - ) - # request = requests.post( - # f"{TEST_URL}/plots/{instrument}/{run_id}/upload_plot_data/", data=request_data, files=files, verify=True - # ) - assert request.status_code == HTTP_OK - return - # create expired run - run_id += 1 - expiration_date = datetime.now(tz=timezone.utc) - timedelta(days=365 * 3) - request_data["expiration_date"] = expiration_date - request = requests.post( - f"{TEST_URL}/plots/{instrument}/{run_id}/upload_plot_data/", - data=request_data, - files=files, - verify=True, - ) - assert request.status_code == HTTP_OK - - request = requests.post( - f"{TEST_URL}/plots/{instrument}/list/", - data=self.user_data, - ) - assert request.status_code == HTTP_OK - - r = request.json() - assert r[0]["expired"] is False - assert r[1]["expired"] is True - - @pytest.mark.skip("Not implemented") - def test_expiration_user(self, data_server): - """Test the expiration field on DataRun model for upload_user_data""" - - filename = "reflectivity.json" - with open(data_server.path_to(filename), "r") as file_handle: - files = {"file": json.dumps(json.load(file_handle))} - request_data = { - **self.user_data, - "data_id": filename, - } - - # create a new run - request = requests.post( - f"{TEST_URL}/plots/{self.username}/upload_user_data/", data=request_data, files=files, verify=True - ) - assert request.status_code == HTTP_OK - - # create expired run - expiration_date = datetime.now(tz=timezone.utc) - timedelta(days=365 * 3) - request_data["data_id"] = "reflectivity_expired.json" - request_data["expiration_date"] = expiration_date - request = requests.post( - f"{TEST_URL}/plots/{self.username}/upload_user_data/", data=request_data, files=files, verify=True - ) - assert request.status_code == HTTP_OK - - request = requests.post( - f"{TEST_URL}/plots/{self.username}/list/", - data=self.user_data, - ) - assert request.status_code == HTTP_OK - - # check that expiration field for runs are marked correctly - r = request.json() - assert r[0]["expired"] is False - assert r[1]["expired"] is True - - @pytest.mark.skip("Not implemented") - def test_deleting_expired(self): - """Test the purge_expired view""" - # r = requests.post(f"{TEST_URL}/plots/purge_expired/", data=self.user_data) - # assert r.status_code == HTTP_OK - # call_command("purge_expired_data.py") - import subprocess - - subprocess.call(["python", "src/live_data_server/manage.py", "purge_expired_data"]) - - # check if all expired runs are deleted - r = requests.post(f"{TEST_URL}/plots/get_all_runs/", data=self.user_data) - runs = r.json() - assert len(runs) == 2 - assert all(run["expired"] is False for run in runs) From ec17de264d4dbd04ac3d933de798c68e847b99d6 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Thu, 8 Aug 2024 14:55:55 -0400 Subject: [PATCH 19/28] add new migration for model changes --- .../0002_datarun_expiration_date.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 src/live_data_server/plots/migrations/0002_datarun_expiration_date.py diff --git a/src/live_data_server/plots/migrations/0002_datarun_expiration_date.py b/src/live_data_server/plots/migrations/0002_datarun_expiration_date.py new file mode 100644 index 0000000..8f1470a --- /dev/null +++ b/src/live_data_server/plots/migrations/0002_datarun_expiration_date.py @@ -0,0 +1,22 @@ +# Generated by Django 5.1 on 2024-08-08 18:55 + +import datetime + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("plots", "0001_initial"), + ] + + operations = [ + migrations.AddField( + model_name="datarun", + name="expiration_date", + field=models.DateTimeField( + default=datetime.datetime(2027, 8, 8, 18, 55, 41, 999298, tzinfo=datetime.timezone.utc), + verbose_name="Expires", + ), + ), + ] From 100df6661c8ef2c2a92e58a3fefbe8eeba7fea97 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Thu, 8 Aug 2024 15:14:06 -0400 Subject: [PATCH 20/28] remove unnecessary util command --- .github/workflows/unittest.yml | 6 ++---- Makefile | 4 ++-- .../plots/management/commands/purge_expired_data.py | 8 ++++++-- src/live_data_server/plots/view_util.py | 10 ---------- tests/test_expiration.py | 2 +- 5 files changed, 11 insertions(+), 19 deletions(-) diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index beed291..af84efb 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -36,10 +36,8 @@ jobs: channels: conda-forge,defaults mamba-version: "*" environment-file: environment.yml - # These don't seem to be valid options for this action - # See: https://github.com/conda-incubator/setup-miniconda - # cache-environment-key: ${{ runner.os }}-env-${{ hashFiles('**/environment.yml') }} - # cache-downloads-key: ${{ runner.os }}-downloads-${{ hashFiles('**/environment.yml') }} + cache-environment-key: ${{ runner.os }}-env-${{ hashFiles('**/environment.yml') }} + cache-downloads-key: ${{ runner.os }}-downloads-${{ hashFiles('**/environment.yml') }} - name: Start docker containers run: | diff --git a/Makefile b/Makefile index 0079a39..26d3eb7 100644 --- a/Makefile +++ b/Makefile @@ -40,8 +40,8 @@ docker/compose/local: docker/compose/validate ## compose and start the service l clean: ## remove all local compiled Python files rm -f `find . -type f -name '*.py[co]' ` \ `find . -type f -name '_version.py'` - rm -rf `find . -name __pycache__` \ - .ruff_cache .pytest_cache *.egg-info + rm -rf `find . -name __pycache__ -o -name "*.egg-info"` \ + .ruff_cache .pytest_cache .PHONY: check .PHONY: first_install diff --git a/src/live_data_server/plots/management/commands/purge_expired_data.py b/src/live_data_server/plots/management/commands/purge_expired_data.py index e56ff4d..6318927 100644 --- a/src/live_data_server/plots/management/commands/purge_expired_data.py +++ b/src/live_data_server/plots/management/commands/purge_expired_data.py @@ -1,10 +1,14 @@ from django.core.management.base import BaseCommand +from django.utils import timezone -from ...view_util import purge_expired_runs +from ...models import DataRun class Command(BaseCommand): help = "Delete expired runs and related plots" def handle(self, *args, **options): # noqa: ARG002 - purge_expired_runs() + runs = DataRun.objects.all() + for run in runs: + if run.expiration_date < timezone.now(): + run.delete() diff --git a/src/live_data_server/plots/view_util.py b/src/live_data_server/plots/view_util.py index 861cca6..533cd3c 100644 --- a/src/live_data_server/plots/view_util.py +++ b/src/live_data_server/plots/view_util.py @@ -190,13 +190,3 @@ def store_plot_data(instrument, run_id, data, data_type, expiration_date: dateti plot_data.data_type = data_type plot_data.timestamp = timezone.now() plot_data.save() - - -def purge_expired_runs(): - """ - Purge expired runs and related plots - """ - runs = DataRun.objects.all() - for run in runs: - if run.expiration_date < timezone.now(): - run.delete() diff --git a/tests/test_expiration.py b/tests/test_expiration.py index 2e9a89e..a9cc3a2 100644 --- a/tests/test_expiration.py +++ b/tests/test_expiration.py @@ -116,7 +116,7 @@ def test_expiration_user(self, data_server): assert r[1]["expired"] is True def test_deleting_expired(self): - """Test the purge_expired view""" + """Test the purge_expired_data command""" command = "docker exec -i live_data_server-livedata-1 bash -ic" subcommand = "conda activate livedata && cd app && python manage.py purge_expired_data" output = subprocess.check_output([*command.split(" "), subcommand]) From fc49df642e6751652392030417a52a934d6678f0 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Thu, 8 Aug 2024 15:16:19 -0400 Subject: [PATCH 21/28] eh remove unneeded USE_L10N and USE_TZ --- src/live_data_server/live_data_server/settings.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/live_data_server/live_data_server/settings.py b/src/live_data_server/live_data_server/settings.py index 1fe066c..bcd05d3 100644 --- a/src/live_data_server/live_data_server/settings.py +++ b/src/live_data_server/live_data_server/settings.py @@ -137,10 +137,6 @@ USE_I18N = True -USE_L10N = True # already set to True by default in 5.0 - -USE_TZ = True # already set to True by default in 5.0 - # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ From f296c39491f121afaa299560401c8921cd1269c7 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Fri, 9 Aug 2024 10:05:33 -0400 Subject: [PATCH 22/28] update documentation --- README.md | 79 +++++++++++++------------ docs/developer/config_for_local_use.rst | 11 ++++ docs/developer/index.rst | 1 + docs/developer/updating_data_models.rst | 38 ++++++++++++ 4 files changed, 90 insertions(+), 39 deletions(-) create mode 100644 docs/developer/updating_data_models.rst diff --git a/README.md b/README.md index c5eb77a..32d1b85 100644 --- a/README.md +++ b/README.md @@ -7,61 +7,62 @@ Data server for data plots. -Developer documentation at https://livedata-ornl.readthedocs.io/en/latest/ - ## Development +Developer documentation at + ### Dependencies -- [Conda](https://conda.io/projects/conda/en/latest/user-guide/install/index.html) / [Mamba]() +- [Conda](https://conda.io/projects/conda/en/latest/user-guide/install/index.html) / [Mamba/Miniforge](https://github.com/conda-forge/miniforge) - [Docker](https://docs.docker.com/engine/install/) and [Docker Compose](https://docs.docker.com/compose/install/) - [direnv](https://direnv.net/) (optional) ### Setup for Local Development -Clone the repository and `cd` into the project directory. +1. Clone the repository and `cd` into the project directory. -Create a conda environment `livedata`, containing all the dependencies -```python -conda env create -f environment.yml -conda activate livedata -``` +1. Create a conda environment `livedata`, containing all the dependencies -To deploy this application locally, you will need to set a number of environment variables, -for example (bash): + ```python + conda env create -f environment.yml + conda activate livedata + ``` -```bash -export DATABASE_NAME=livedatadb -export DATABASE_USER=livedatauser -export DATABASE_PASS=livedatapass -export DATABASE_HOST=db -export DATABASE_PORT=5432 -export LIVE_PLOT_SECRET_KEY="secretKey" - -# These need to be set for `pytest`, -# but are not used in the docker compose -export DJANGO_SUPERUSER_USERNAME=$DATABASE_USER -export DJANGO_SUPERUSER_PASSWORD=$DATABASE_PASS -``` +1. To deploy this application locally, you will need to set a number of environment variables, + for example (bash): -**Notes**: + ```bash + export DATABASE_NAME=livedatadb + export DATABASE_USER=livedatauser + export DATABASE_PASS=livedatapass + export DATABASE_HOST=db + export DATABASE_PORT=5432 + export LIVE_PLOT_SECRET_KEY="secretKey" -- The `DATABASE_PORT` _must_ be set to `5432`, as Postgres is configured to listen on that port by default. - If you need to change the port, you will need to modify the `docker-compose.yml` file accordingly. + # These need to be set for `pytest`, + # but are not used in the docker compose + export DJANGO_SUPERUSER_USERNAME=$DATABASE_USER + export DJANGO_SUPERUSER_PASSWORD=$DATABASE_PASS + ``` -- It is recommended to save these variables into an `.envrc` file which can be managed by [direnv](https://direnv.net/). - direnv will automatically load the variables when you `cd` into the project directory. + **Notes**: -After the secrets are set, type in the terminal shell: + - The `DATABASE_PORT` _must_ be set to `5432`, as Postgres is configured to listen on that port by default. + If you need to change the port, you will need to modify the `docker-compose.yml` file accordingly. -```bash -make docker/compose/local -``` + - It is recommended to save these variables into an `.envrc` file which can be managed by [direnv](https://direnv.net/). + direnv will automatically load the variables when you `cd` into the project directory. + +1. After the secrets are set, type in the terminal shell: + + ```bash + make docker/compose/local + ``` -This command will copy `config/docker-compose.envlocal.yml` into `docker-compose.yml` before composing all the services. + This command will copy `config/docker-compose.envlocal.yml` into `docker-compose.yml` before composing all the services. -Type `make help` to learn about other macros available as make targets. -For instance, `make docker/pruneall` will stop all containers, then remove all containers, images, networks, and volumes. + Type `make help` to learn about other macros available as make targets. + For instance, `make docker/pruneall` will stop all containers, then remove all containers, images, networks, and volumes. ### Testing @@ -84,9 +85,9 @@ DJANGO_SUPERUSER_USERNAME=***** DJANGO_SUPERUSER_PASSWORD=***** pytest ## Deployment to the Test Environment - Repository managing the provision for deployment: - - hardware and networking for deployment: https://code.ornl.gov/sns-hfir-scse/infrastructure/neutrons-test-environment/-/blob/main/terraform/servers.tf#L85-97 - - configuration independent of source code changes: https://code.ornl.gov/sns-hfir-scse/infrastructure/neutrons-test-environment/-/blob/main/ansible/testfixture02-test.yaml -- Repository managing deployment of the source to the provisioned hardware: https://code.ornl.gov/sns-hfir-scse/deployments/livedata-deploy + - hardware and networking for deployment: + - configuration independent of source code changes: +- Repository managing deployment of the source to the provisioned hardware: ## Building the Documentation diff --git a/docs/developer/config_for_local_use.rst b/docs/developer/config_for_local_use.rst index 0fe12a6..e7c7793 100644 --- a/docs/developer/config_for_local_use.rst +++ b/docs/developer/config_for_local_use.rst @@ -2,6 +2,17 @@ Configure for Local Debugging and Development ============================================= +This document describes how to set up the project for local debugging and development. + +Prerequisites +------------- +- `Docker `_ +- `Docker Compose `_ +- `Conda `_ + +Setup +----- + Clone the repository and ``cd`` into the project directory. Create a conda environment ``livedata``, containing all the dependencies diff --git a/docs/developer/index.rst b/docs/developer/index.rst index 7c9f2cb..514626b 100644 --- a/docs/developer/index.rst +++ b/docs/developer/index.rst @@ -5,5 +5,6 @@ Development Guide :maxdepth: 2 config_for_local_use + updating_data_models service_through_apache troubleshoot/index diff --git a/docs/developer/updating_data_models.rst b/docs/developer/updating_data_models.rst new file mode 100644 index 0000000..045dedc --- /dev/null +++ b/docs/developer/updating_data_models.rst @@ -0,0 +1,38 @@ +============================================= +Updating Data Models +============================================= + +| There may be times when you need to update the data models used by Django. +| This can be done by following these steps: + +#. Make the necessary changes to the models in ``src/live_data_server/plots/models.py``. +#. Generate the Django migration file(s): + + .. code-block:: bash + + cd src/live_data_server + python manage.py makemigrations + +The migration(s) will be created in the ``src/live_data_server/plots/migrations/`` directory. +First check the migration(s) to ensure they are correct. If they are, apply the migration(s): + +From within the live_data_server Docker container: + +.. code-block:: bash + + python manage.py migrate + + # or if you are not in the container + docker exec -i live_data_server-livedata-1 bash -ic ' + conda activate livedata + cd app + python manage.py migrate + ' + +If the migration(s) are not correct, you can delete them and start again: + +.. code-block:: bash + + python manage.py migrate plots zero + python manage.py makemigrations + python manage.py migrate From 92dcc28b630a722be40e2334daf9c86a521287a7 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Fri, 9 Aug 2024 11:08:00 -0400 Subject: [PATCH 23/28] add codecov.yml --- codecov.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 codecov.yml diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000..5e1ebb4 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,6 @@ +coverage: + status: + project: + default: + target: 80% # the required coverage value + threshold: 3% # the leniency in hitting the target From 83a6b325bad5c03c20358e720dd9d1d72de9e274 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Fri, 9 Aug 2024 11:17:48 -0400 Subject: [PATCH 24/28] modify codecov.yml --- codecov.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/codecov.yml b/codecov.yml index 5e1ebb4..3b7deba 100644 --- a/codecov.yml +++ b/codecov.yml @@ -2,5 +2,9 @@ coverage: status: project: default: - target: 80% # the required coverage value - threshold: 3% # the leniency in hitting the target + target: 80% + threshold: 3% + patch: + default: + target: 80% + threshold: 10% From 8412fa10b39eabe7b8eecc763dd0787bce1a8cd8 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Fri, 9 Aug 2024 14:26:37 -0400 Subject: [PATCH 25/28] revert add dependabot and update precommit --- .github/dependabot.yml | 10 ---------- .pre-commit-config.yaml | 2 +- 2 files changed, 1 insertion(+), 11 deletions(-) delete mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index dfd0e30..0000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,10 +0,0 @@ -# Set update schedule for GitHub Actions - -version: 2 -updates: - - - package-ecosystem: "github-actions" - directory: "/" - schedule: - # Check for updates to GitHub Actions every week - interval: "weekly" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6fb156b..96c2b83 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.6 + rev: v0.5.1 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 174eca2bc9cdb3ca10888c3ef51cc33f41c2c8df Mon Sep 17 00:00:00 2001 From: glass-ships Date: Mon, 12 Aug 2024 15:13:45 -0400 Subject: [PATCH 26/28] Try running purge with coverage --- tests/test_expiration.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_expiration.py b/tests/test_expiration.py index a9cc3a2..dfe01cd 100644 --- a/tests/test_expiration.py +++ b/tests/test_expiration.py @@ -118,7 +118,8 @@ def test_expiration_user(self, data_server): def test_deleting_expired(self): """Test the purge_expired_data command""" command = "docker exec -i live_data_server-livedata-1 bash -ic" - subcommand = "conda activate livedata && cd app && python manage.py purge_expired_data" + subcommand = "conda activate livedata && cd app && coverage run manage.py purge_expired_data" + # subcommand = "conda activate livedata && cd app && python manage.py purge_expired_data" output = subprocess.check_output([*command.split(" "), subcommand]) print(output) From ede7ff96b1e5d88617250b4c9255bc232453cbe2 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 13 Aug 2024 09:27:27 -0400 Subject: [PATCH 27/28] revert to django 4.2 --- environment.yml | 3 +-- src/live_data_server/live_data_server/settings.py | 6 ++++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/environment.yml b/environment.yml index dc3301a..0a0f7ea 100644 --- a/environment.yml +++ b/environment.yml @@ -6,8 +6,7 @@ dependencies: - postgresql=16 - sphinx - sphinx_rtd_theme - # - django=4.2 - - django=5.1 + - django=4.2 - django-cors-headers=4.4 - psycopg=3.2 - gunicorn diff --git a/src/live_data_server/live_data_server/settings.py b/src/live_data_server/live_data_server/settings.py index bcd05d3..743c051 100644 --- a/src/live_data_server/live_data_server/settings.py +++ b/src/live_data_server/live_data_server/settings.py @@ -40,8 +40,7 @@ # Application definition INSTALLED_APPS = [ - # "plots.apps.PlotsConfig", - "plots", + "plots.apps.PlotsConfig", "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", @@ -137,6 +136,9 @@ USE_I18N = True +USE_L10N = True + +USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ From 5e6717254ca0d3b4e61560c10170c973cfe80359 Mon Sep 17 00:00:00 2001 From: glass-ships Date: Tue, 13 Aug 2024 13:17:01 -0400 Subject: [PATCH 28/28] address comments, revert django ver, etc --- docs/conf.py | 5 +- pyproject.toml | 29 ++++---- .../live_data_server/settings.py | 15 ++-- .../management/commands/purge_expired_data.py | 3 +- src/live_data_server/plots/models.py | 5 +- src/live_data_server/plots/urls.py | 2 +- src/live_data_server/plots/view_util.py | 9 ++- src/live_data_server/plots/views.py | 69 +++++-------------- tests/test_expiration.py | 29 ++++++-- 9 files changed, 83 insertions(+), 83 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 1e6f2f8..563a3c1 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -2,13 +2,16 @@ # # For the full list of built-in configuration values, see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html + +# import os +# import sys from importlib import metadata # sys.path.insert(0, os.path.abspath("../live_data_server")) # from live_data_server import __version__ as release - release = metadata.version("live_data_server") + # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information diff --git a/pyproject.toml b/pyproject.toml index 76868ff..47b2624 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,15 +9,10 @@ dependencies = [ license = { text = "BSD-3" } [project.urls] -homepage = "https://livedata-ornl.readthedocs.io" # if no homepage, use repo url +homepage = "https://livedata-ornl.readthedocs.io" # if no homepage, use repo url [build-system] -requires = [ - "setuptools >= 40.6.0", - "wheel", - "toml", - "versioningit" -] +requires = ["setuptools >= 40.6.0", "wheel", "toml", "versioningit"] build-backend = "setuptools.build_meta" [tool.black] @@ -43,16 +38,24 @@ where = ["src"] exclude = ["tests*", "scripts*", "docs*"] [tool.pytest.ini_options] -pythonpath = [ - ".", "src", "scripts" -] +pythonpath = [".", "src", "scripts"] testpaths = ["tests"] python_files = ["test*.py"] [tool.ruff] line-length = 120 - -[tool.ruff.lint] -select = ["A", "ARG","ASYNC","BLE","C90", "E", "F", "I", "N", "UP032", "W"] +lint.select = [ + "A", + "ARG", + "ASYNC", + "BLE", + "C90", + "E", + "F", + "I", + "N", + "UP032", + "W", +] # Add additional 3rd party tool configuration here as needed diff --git a/src/live_data_server/live_data_server/settings.py b/src/live_data_server/live_data_server/settings.py index 743c051..b314a85 100644 --- a/src/live_data_server/live_data_server/settings.py +++ b/src/live_data_server/live_data_server/settings.py @@ -147,10 +147,17 @@ STATIC_ROOT = "/var/www/livedata/static/" # Add secret key to settings only if there's a non-empty environment variable with same name -if "LIVE_PLOT_SECRET_KEY" in os.environ: - secret_key = os.environ.get("LIVE_PLOT_SECRET_KEY") - if bool(secret_key): - LIVE_PLOT_SECRET_KEY = os.environ.get("LIVE_PLOT_SECRET_KEY") +secret_key = os.environ.get("LIVE_PLOT_SECRET_KEY") +if secret_key: + LIVE_PLOT_SECRET_KEY = secret_key + +# Set expiration time for live plots to 3 years if not set +expiration_time = os.environ.get("LIVE_PLOT_EXPIRATION_TIME") +if expiration_time: + LIVE_PLOT_EXPIRATION_TIME = int(expiration_time) +else: + LIVE_PLOT_EXPIRATION_TIME = 365 * 3 + # Import local settings if available try: diff --git a/src/live_data_server/plots/management/commands/purge_expired_data.py b/src/live_data_server/plots/management/commands/purge_expired_data.py index 6318927..db17068 100644 --- a/src/live_data_server/plots/management/commands/purge_expired_data.py +++ b/src/live_data_server/plots/management/commands/purge_expired_data.py @@ -1,7 +1,6 @@ from django.core.management.base import BaseCommand from django.utils import timezone - -from ...models import DataRun +from plots.models import DataRun class Command(BaseCommand): diff --git a/src/live_data_server/plots/models.py b/src/live_data_server/plots/models.py index 0e9ed42..0f16ac4 100644 --- a/src/live_data_server/plots/models.py +++ b/src/live_data_server/plots/models.py @@ -6,6 +6,7 @@ import sys from datetime import timedelta +from django.conf import settings from django.db import models from django.utils import timezone @@ -40,7 +41,9 @@ class DataRun(models.Model): run_id = models.TextField() instrument = models.ForeignKey(Instrument, on_delete=models.deletion.CASCADE) created_on = models.DateTimeField("Timestamp", auto_now_add=True) - expiration_date = models.DateTimeField("Expires", default=timezone.now() + timedelta(days=(365 * 3))) + expiration_date = models.DateTimeField( + "Expires", default=timezone.now() + timedelta(days=(settings.LIVE_PLOT_EXPIRATION_TIME)) + ) def __str__(self): return f"{self.instrument}_{self.run_number}_{self.run_id}" diff --git a/src/live_data_server/plots/urls.py b/src/live_data_server/plots/urls.py index b49164c..94ab868 100644 --- a/src/live_data_server/plots/urls.py +++ b/src/live_data_server/plots/urls.py @@ -16,5 +16,5 @@ ), re_path(r"^(?P[\w]+)/upload_user_data/$", views.upload_user_data, name="upload_user_data"), re_path(r"^(?P[\w]+)/list/$", views.get_data_list, name="get_data_list"), - re_path("get_all_runs", views.get_all_runs, name="get_all_runs"), + # re_path(r"^(?P[\w]+)/list_extra/$", views.get_data_list, name="get_data_list"), ] diff --git a/src/live_data_server/plots/view_util.py b/src/live_data_server/plots/view_util.py index 533cd3c..0428f01 100644 --- a/src/live_data_server/plots/view_util.py +++ b/src/live_data_server/plots/view_util.py @@ -6,13 +6,12 @@ import logging import sys from datetime import datetime +from typing import Optional from django.conf import settings from django.http import HttpResponse from django.utils import timezone - -# from plots.models import DataRun, Instrument, PlotData -from .models import DataRun, Instrument, PlotData +from plots.models import DataRun, Instrument, PlotData def generate_key(instrument, run_id): @@ -121,7 +120,7 @@ def get_plot_data(instrument, run_id, data_type=None): return None -def store_user_data(user, data_id, data, data_type, expiration_date: datetime): +def store_user_data(user, data_id, data, data_type, expiration_date: Optional[datetime] = None): """ Store plot data and associate it to a user identifier (a name, not an actual user since users don't log in to this system). @@ -166,7 +165,7 @@ def store_user_data(user, data_id, data, data_type, expiration_date: datetime): plot_data.save() -def store_plot_data(instrument, run_id, data, data_type, expiration_date: datetime): +def store_plot_data(instrument, run_id, data, data_type, expiration_date: Optional[datetime] = None): """ Store plot data @param instrument: instrument name diff --git a/src/live_data_server/plots/views.py b/src/live_data_server/plots/views.py index 5f2de05..ac1399b 100644 --- a/src/live_data_server/plots/views.py +++ b/src/live_data_server/plots/views.py @@ -13,16 +13,10 @@ from django.utils import dateformat, timezone from django.views.decorators.cache import cache_page from django.views.decorators.csrf import csrf_exempt +from plots.models import DataRun, Instrument, PlotData -# from live_data_server.config import DEFAULT_EXPIRATION_TIME -# from live_data_server.plots import view_util from . import view_util -# from plots.models import DataRun, Instrument, PlotData -from .models import DataRun, Instrument, PlotData - -DEFAULT_EXPIRATION_TIME = 365 * 3 # 3 years - def check_credentials(fn): """ @@ -103,9 +97,10 @@ def _store(request, instrument, run_id=None, as_user=False): raw_data = request.FILES["file"].read().decode("utf-8") data_type_default = PlotData.get_data_type_from_data(raw_data) data_type = request.POST.get("data_type", default=data_type_default) - expiration_date = request.POST.get("expiration_date", default=None) - if expiration_date is None: - expiration_date = timezone.now() + timedelta(days=DEFAULT_EXPIRATION_TIME) + expiration_date = request.POST.get( + "expiration_date", default=timezone.now() + timedelta(days=settings.LIVE_PLOT_EXPIRATION_TIME) + ) + if as_user: data_id = request.POST.get("data_id", default="") view_util.store_user_data(instrument, data_id, raw_data, data_type, expiration_date) @@ -138,53 +133,27 @@ def upload_user_data(request, user): @csrf_exempt @check_credentials -def get_data_list(_, instrument): +def get_data_list(request, instrument): """ Get a list of user data """ instrument_object = get_object_or_404(Instrument, name=instrument.lower()) data_list = [] + get_extra = request.POST.get("extra", default=False) for item in DataRun.objects.filter(instrument=instrument_object): timestamp_local = timezone.localtime(item.created_on) timestamp_formatted = dateformat.DateFormat(timestamp_local).format(settings.DATETIME_FORMAT) - expiration_local = timezone.localtime(item.expiration_date) - expiration_formatted = dateformat.DateFormat(expiration_local).format(settings.DATETIME_FORMAT) - data_list.append( - dict( - id=item.id, - run_number=str(item.run_number), - run_id=item.run_id, - timestamp=item.created_on.isoformat(), - created_on=timestamp_formatted, - expiration_date=expiration_formatted, - expired=True if expiration_local < timezone.now() else False, - ) - ) - return JsonResponse(data_list, safe=False) - - -@csrf_exempt -@check_credentials -def get_all_runs(_): - """ - Get a list of all runs for all instruments/users - """ - data_list = [] - for item in DataRun.objects.all(): - timestamp_local = timezone.localtime(item.created_on) - timestamp_formatted = dateformat.DateFormat(timestamp_local).format(settings.DATETIME_FORMAT) - expiration_local = timezone.localtime(item.expiration_date) - expiration_formatted = dateformat.DateFormat(expiration_local).format(settings.DATETIME_FORMAT) - data_list.append( - dict( - id=item.id, - run_number=str(item.run_number), - run_id=item.run_id, - instrument=item.instrument.name, - timestamp=item.created_on.isoformat(), - created_on=timestamp_formatted, - expiration_date=expiration_formatted, - expired=True if expiration_local < timezone.now() else False, - ) + data = dict( + id=item.id, + run_number=str(item.run_number), + run_id=item.run_id, + timestamp=item.created_on.isoformat(), + created_on=timestamp_formatted, ) + if get_extra: + expiration_local = timezone.localtime(item.expiration_date) + expiration_formatted = dateformat.DateFormat(expiration_local).format(settings.DATETIME_FORMAT) + data["expiration_date"] = expiration_formatted + data["expired"] = True if expiration_local < timezone.now() else False + data_list.append(data) return JsonResponse(data_list, safe=False) diff --git a/tests/test_expiration.py b/tests/test_expiration.py index dfe01cd..abc3185 100644 --- a/tests/test_expiration.py +++ b/tests/test_expiration.py @@ -70,7 +70,7 @@ def test_expiration_plot(self, data_server): request = requests.post( f"{TEST_URL}/plots/{instrument}/list/", - data=self.user_data, + data={**self.user_data, "extra": True}, ) assert request.status_code == HTTP_OK @@ -106,7 +106,7 @@ def test_expiration_user(self, data_server): request = requests.post( f"{TEST_URL}/plots/{self.username}/list/", - data=self.user_data, + data={**self.user_data, "extra": True}, ) assert request.status_code == HTTP_OK @@ -124,7 +124,24 @@ def test_deleting_expired(self): print(output) # Ensure the above ran and worked - r = requests.post(f"{TEST_URL}/plots/get_all_runs/", data=self.user_data) - runs = r.json() - assert len(runs) == 2 - assert all(run["expired"] is False for run in runs) + conn = psycopg.connect( + dbname=os.environ.get("DATABASE_NAME"), + user=os.environ.get("DATABASE_USER"), + password=os.environ.get("DATABASE_PASS"), + port=os.environ.get("DATABASE_PORT"), + host="localhost", + ) + cur = conn.cursor() + + cur.execute("SELECT * FROM plots_datarun") + results = cur.fetchall() + print(f"Runs after purge: {len(results)}") + for i in results: + print(i) + assert len(results) == 2 + + # Plots after purge + cur.execute("SELECT * FROM plots_plotdata") + results = cur.fetchall() + print(f"Plots after purge: {len(results)}") + assert len(results) == 2