diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index c5e93017..00000000 --- a/.travis.yml +++ /dev/null @@ -1,23 +0,0 @@ -sudo: required -services: - - docker -branches: - only: - - main - - production -notifications: - email: false - -matrix: - include: - - language: python - python: - - "3.8" - before_install: - - bash .travis/before_install.sh - install: - - bash .travis/install.sh - script: - - bash .travis/script.sh - after_success: - - bash .travis/after_success.sh diff --git a/.travis/before_install.sh b/.travis/before_install.sh index fe0f833c..f4b45dd3 100644 --- a/.travis/before_install.sh +++ b/.travis/before_install.sh @@ -21,36 +21,9 @@ setup_dependencies() { docker info } -# upgrade docker-compose to a more recent version so we can use it for -# testing -update_compose () { - echo "Upgrade docker-compose" - sudo rm /usr/local/bin/docker-compose - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-$(uname -s)-$(uname -m) > docker-compose - chmod +x docker-compose - sudo mv docker-compose /usr/local/bin - docker-compose --version -} - -update_docker_configuration() { - echo "INFO: - Updating docker configuration - " - - echo '{ - "experimental": true, - "storage-driver": "overlay2", - "max-concurrent-downloads": 50, - "max-concurrent-uploads": 50 -}' | sudo tee /etc/docker/daemon.json - sudo service docker restart -} - main() { stop_postgres setup_dependencies - update_compose - update_docker_configuration } main \ No newline at end of file diff --git a/LICENSE b/LICENSE index 8d81d068..7b3dd71a 100644 --- a/LICENSE +++ b/LICENSE @@ -6,22 +6,207 @@ Portions of this software are licensed as follows: * All content that resides under the "ee/" directory of this repository, if that directory exists, is licensed under the license defined in "ee/LICENSE". * All client-side JavaScript (when served directly or after being compiled, arranged, augmented, or combined), is licensed under the "MIT Expat" license. * All third party components incorporated into the IllumiDesk software are licensed under the original license provided by the owner of the applicable component. -* Content outside of the above mentioned directories or restrictions above is available under the "MIT Expat" license as defined below. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +* Content outside of the above mentioned directories or restrictions above is available under the "Apache 2.0" license as defined below. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 IllumiDesk, LLC + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Makefile b/Makefile index 3f9e4061..1ff17e19 100644 --- a/Makefile +++ b/Makefile @@ -49,3 +49,13 @@ test: dev clean: find . -name '*.pyc' -exec rm -f {} + rm -rf $(VENV_NAME) *.eggs *.egg-info dist build docs/_build .cache + +build-jhub: + rm illumidesk.zip || true + find ./src/ -type f -name '*.py[co]' -delete -o -type d -name __pycache__ -delete -o -type f -name .DS_Store -delete + zip -r compose/jupyterhub/illumidesk.zip src/ + docker build -t illumidesk/jupyterhub:k8s ./docker/jupyterhub + +push-jhub: + docker login + docker push illumidesk/jupyterhub:k8s-beta \ No newline at end of file diff --git a/README.md b/README.md index 6166074c..0fa15d52 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ # IllumiDesk -:warning: Thanks to the amazing feedback we have gotten from the community, the IllumiDesk Team is currently re implementing many of the components listed in this document. For the most part these changes are and will be backwards compatible, however, please proceed with caution if you plan on using this setup in a production environment (not recommended) :warning: +This monorepo is used to maintain IllumiDesk's authenticators, spawners, and microservices. This setup assumes that all services are running with Kubernetes. ## Overview @@ -15,7 +15,7 @@ When combined with the [nbgrader](https://github.com/jupyter/nbgrader) package i ## Why? -Running a multi-user setup using [JupyterHub](https://github.com/jupyterhub/jupyterhub) and `nbgrader` with `docker containers` requires some additional setup. Some of the questions this distribution attempts to answer are: +Running a multi-user setup using [JupyterHub](https://github.com/jupyterhub/jupyterhub) and `nbgrader` with `containers` requires some additional setup. Some of the questions this distribution attempts to answer are: - How do we manage authentication when the user isn't a system user within the JupyterHub or Jupyter Notebook container? - How do we manage permissions for student and instructor folders? @@ -24,513 +24,16 @@ Running a multi-user setup using [JupyterHub](https://github.com/jupyterhub/jupy - How should deployment tools reflect these container-based requirements and also (to the extent possible) offer users an option that is cloud-vendor agnostic? Our goal is to remove these obstacles so that you can get on with the teaching! +## Prerequisites +Kubernetes v1.17+. ## Quick Start -Follow these instructions to install the system with a set of sensible defaults. +This setup only supports Kubernetes-based installations at this time. Refer to the [helm-chart](https://github.com/illumidesk/helm-chart) repo for installation instructions. -Refer to the [customization](#customization) section for more advanced setup options, such as enabling LTI to handle requests from your LMS. - -### Prerequisites - -On remote host: - -- Tested with Ubuntu 20.04 - -### Prepare your setup - -1. Clone and change directories into this repo's root: - -``` -git clone https://github.com/IllumiDesk/illumidesk -cd illumidesk -``` - -2. Create a new hosts file from the provided YML template. - -``` -cp ansible/hosts.example ansible/hosts -``` - -3. Update the `ansible/hosts` file: - - - ansible_host: target server IPv4 address - - ansible_port: target server port (default 22) - - ansible_user: target server username for SSH access - - ansible_ssh_private_key_file: full path to SSH private key file used for SSH - - ansible_password: optional value used when the target server requires a username/password - -Refer to the [customization](#customization) section if you would like to use LTI 1.1 or LTI 1.3 with your LMS. - -> **NOTE**: the default admin user is set to `admin`. To update this default value, change the `admin_user` variable to another username. Refere to the `hosts.example` file for example. - -4. Run the deployment script (the script will prompt you for certain values): - -```bash -make deploy -``` - -Use the `ARGS="-v"` option to deploy the stack with verbosity enabled. For example: - -```bash -make deploy ARGS="-v" -``` - -Set `ARGS` to `-vv` to enable more verbosity or `-vvv` for super duper verbosity. - -1. Once the ansible playbook has finished running the stack should be available at: - - `http://:8000/` - -> **Tip**: To confirm the values you will need for the `make deploy` command to successfully connect to your instance, log into your remote instance with SSH. For example, a successfull connection with the `ssh -i my-ssh-key.pem ubuntu@1.2.3.4` command means that the values map to: - -> - ansible_host: 1.2.3.4 -> - ansible_port: 22 -> - ansible_user: ubuntu -> - ansible_ssh_private_key_file: my-ssh-key.pem -> - ansible_password: (none) - -### Initial Course Setup - -By default, this setup uses the `LTI11Authenticator`. Shared grader accounts and courses are dynamically configured when logging into the system. You may also configure the `ansible-playbook` to use the `LTI13Authenticator`. - -If you would like to setup a quickstart course, [you may do so using the standard `nbgrader quickstart` command](https://nbgrader.readthedocs.io/en/stable/command_line_tools/nbgrader-quickstart.html?highlight=quickstart) (replace `` with your desired course name): - -```bash -nbgrader quickstart --force -``` - -Click on the `Grader Console` tab and follow the [steps available within the IllumiDesk docs](https://docs.illumidesk.com) to generate and release assignments for your learners. - -> **NOTE**: It is important to note that by default the grader's configuration file, which is located in the `/home/grader-{course_id}/{course_id}` path defines the use of the `header.ipnyb` and `footer.ipynb` template files located within the course's source directory. If you receive an error when generating the assignment, then add `header.ipynb` and `footer.ipynb` (with or without content is fine, as long as the file is a Jupyter Notebook (ipynb) compatible format) files to within the `/home/grader-{course_id}/{course_id}/src` path. Updating the configration file for the grader's notebook would require you to restart the Notebook, which is currently not possible due to the fact that the shared grader notebook runs as a service. This setup therefore defaults search for these template files for those that need them. - -## Components - -* **JupyterHub**: Runs [JupyterHub](https://jupyterhub.readthedocs.org/en/latest/getting-started.html#overview) within a Docker container running as root. - -* **Setup Course Image**: Runs client services to communicate with the `JupyterHub REST API` to dynamically setup new courses. This service is only applicable when using either `LTI 1.1` or `LTI 1.3` authenticators. - -* **Authenticator**: The JupyterHub compatible authentication service. We recommend either using the `LTI11Authenticator` or `LTI13Authenticator` with your Learning Management System to take advantage of the latest features. - -* **Spawner**: Spawning service to manage user notebooks. This setup uses one class which inherits from the [DockerSpawner](https://github.com/jupyterhub/dockerspawner) class: the `IllumiDeskDockerSpawner` to set the user's docker image based on LTI role. - -* **Data Directories**: This repo uses `docker-compose` to start all services and data volumes for JupyterHub, notebook directories, databases, and the `nbgrader exchange` directory using mounts from the host's file system. - -* **Databases**: This setup replaces the default SQLite databases with standard Postgres databases running in their own containers. (You may use Postgres DB's running in other locations by updating connections strings and configuration values). The databases are used for the JupyterHub application, the Postgres laboratory environments that need access to database(s) for labs, assignments, etc., and a database for the Nbgrader application. - -* **Network**: An external bridge network named `jupyter-network` is used by default. The grader service and the user notebooks are attached to this network. - -* **Workspaces**: User servers are set and launched based on either the user's LTI compatible role (student/learner group or instructor group) or by specifying the ?next=/user-redirect/ as a query parameter that identifies the workspace type by path, for example: next=/user-redirect/theia for the Theia IDE or next=/user-redirect/vscode for VS Code IDE. - -* **Shared drive**: A shared folder that may be used to share content among users within a course. Users with access to the course's shared grader notebook (Instructors and TAs) have read/write access to the files located in the shared folder. Users accessing their own workspaces have access to the files in the shared grader notebook with read-only access. - -## Customization - -You may customize your setup by customizing additional variables in the `hosts` file. For example, you can run the `make deploy` command to set your own organization name and top level domain when using this setup behind a reverse-proxy with TLS termination. - -> **NOTE**: You may add any of the variables listed in `ansible/group_vars/all.yml` within your `hosts` file before running the `make deploy` command. - -## General JupyterHub Settings - -Most settings located in the JupyterHub configuration file (`jupyterhub_config.py`) have a set of opinionated defaults that can be changed by updating the configuration file itself. However there are two configuration options available that can be changed with the `hosts` file: - -- `admin_user`: the username that has access to the `Admin` section of the JupyterHub home page. This allows users to view running servers, stop/start the user servers, among other tasks. -- `shutdown_on_logout`: when `True` (default), the user's server is shut down when tht user logs out of their session. - -### LTI 1.3 Authenticator - -> **New in Version 0.6.0**: this setup supports user authentication with the [LTI 1.3 Core Specification](http://www.imsglobal.org/spec/lti/v1p3/) as of version 0.6.0. LTI 1.3 is built on top of OAuth2 and OIDC and therefore provides additional security features when compared to [LTI 1.1](https://www.imsglobal.org/specs/ltiv1p1). - -To enable LTI 1.3, update your ansible `hosts` configuration so that `authentication_type` is set to `lti13`. Then, add the platform (usually an LMS) endpoints required to establish a trust relationship between the tool and the platform. These additional endpoints are provided by the `lti13_private_key`, `lti13_endpoint`, `lti13_token_url`, and `lti13_authorize_url`. You also need to specify the `lti13_client_id` used by the platform to associate the tool with an OIDC compatible client id. The [`hosts.example`](./ansible/hosts.example) has some example endpoints. Keep in mind however, that unlike LTI 1.1 (instructions below), LTI 1.3's trust relationsip between the platform and the tool is explicit. - -Please refer to [the user guide documentation](https://app.gitbook.com/@illumidesk/s/guides/installation-and-configuration/learning-tools-interoperability-lti-1.3) if you need instructions on how to configure the tool using the LTI 1.3 option with specific LMS vendors. - -### LTI 1.1 Authenticator - -> **New in Version 0.2.0**: with LTI 1.1 enabled, courses and user membership are automatically set for you based on the information located within the LTI 1.1 launch request. This feature allows you to dynamically support multiple classes with multiple teacher/learner memberships from the same deployment without haveing to update the configuration files. - -To launch the stack with LTI 1.1 enabled simply change the `authentication_type` variable in your hosts file to `lti11`. -By default both the `consumer key` and `shared secret` are created for you. If you would like to add your own -values then assign them to the `lti11_consumer_key` and `lti11_shared_secret` variables in the `hosts` file. - -Then, rerun the `make deploy` copmmand to update your stack's settings. - -### Postgres for Lab Environments - -> **New in Version 0.5.0**: users that wish to provide their Learners with a shared Postgres container my do so by setting the `postgres_labs_enabled` to true. - -With the Postgres container enabled, users (both students and instructors) can connect to a shared Postgres database from within their Jupyter Notebooks by opening a connection with the standard `psycop2g` depency using the `postgres-labs` host name. IllumiDesk's [user guides provide additional examples](https://docs.illumidesk.com) on the commands and common use-cases available for this option. - -### Shared Folder - -With shared_folder_enabled set to true, users with access to the shared grader service (by default Instructors and TAs) may create files directly in the course's /shared folder. Since one shared grader notebook is launched for each course then all the files created in the /shared folder appear within the /shared/ in all end-user workspaces. - -### Additional Workspace Types - -Additional workspace types are supported by any workspace type that is supported by the underlying [jupyter-server-proxy] package. This stack has been tested with a variety of workspace types including: - -#### IDEs - -- Theia -- RStudio -- VS Code (code-server) - -#### Data Tools - -- OpenRefine - -#### Visualization/Dashboard Servers - -- Plotly Dash -- Streamlit -- Bokeh Server -- Jupyter Voila - -When you want to use a specific workspace type simply leverage the existing [user redirect](https://jupyterhub.readthedocs.io/en/stable/reference/urls.html#user-redirect) functionality available with JupyterHub combined with the query parameter next. For example, with LTI 1.1 the launch url would look like so: - -``` -https://my.example.com/hub/lti/launch?next=/user-redirect/theia -``` - -Similar to how users can toggle between /tree and /lab for Jupyter Classic and JupyterLab, respectively, the user may set other workspace types with recognized paths that point to specific workspace types. There is no restriction on what path to use, in so long as the jupyter-server-proxy implementation available with the end-user workspace [has that path defined as an entrypoint](https://jupyter-server-proxy.readthedocs.io/en/latest/server-process.html#server-process-options). - -Various LMS's also support adding custom key/values to include with the launch request. For example, the Canvas LMS has the `Custom Fields` text box and Open edX has the `Custom Parameters` text box to support additional key/values to include with the launch request. - -These query parameters do not conflict with the `git clone/merge` feature when launching workspaces. It is common to use both options when launching workspaces. This allows instructors to build labs that clone/merge git-based sources and may spawn specific and optimized workspace environments. - -**Open edX**: - -``` -["next=/user-redirect/theia", "another_custom_param=abc"] -``` - -**Canvas LMS**: - -``` -next=/user-redirect/vscode -another_custom_param=abc -``` - -### Defining Launch Requests to Clone / Merge Git-based Repos - -Instructors and content creators in many cases have their content version controlled with git-based source control solutions, such as GitHub, GitLab, or BitBucket. This setup includes the [`nbgitpuller`](https://pypi.org/project/nbgitpuller/) package and handles LTI launch requests to clone and merge source files from an upstream git-based repository. - -This functionality is decoupled from the authenticator, therefore, the options are added as query parameters when sending the POST request to the hub. Below are the definition setting and an example of a full launch request URL using LTI 1.1: - -- repo: the repositories full URL link -- branch: the git branch you would like users to clone from -- subPath: folder and path name for the file you would like your users to open when first launching the URL -- app: one of `notebook` for Classic Notebook, `lab` for JupyerLab, `theia` for THEIA IDE. - -For example, if your values are: - -- IllumiDesk launch request URL: https://acme.illumidesk.com/lti/launch -- repo: https://github.com/acme/intro-to-python -- branch: master -- subPath: 00_intro_00_content.ipynb -- app: notebook - -Then the full launch request URL would look like so: - -``` -https://acme.illumidesk.com/lti/launch?next=/user-redirect/git-pull?repo=https%3A%2F%2Fgithub.com%2Facme%2Fintro-to-python&branch=master&subPath=00_intro_00_content.ipynb&app=notebook -``` - -### Configuration Files - -The configuration changes depending on how you decide to update this setup. Essentially customizations boil down to: - -1. JupyterHub configuration using `jupyterhub_config.py`: - - - Authenticators - - Spawners - - Services - -> **Note**: By default the `jupyterhub_config.py` file is located in `/etc/jupyter/jupyterhub_config.py` within the running JupyterHub container, however, if you change this location (which would require an update to the JupyterHub's Dockerfile) then you need to make sure you are using the correct configuration file with the `jupyterhub -f /path/to/jupyterhub_config.py` option. - -Whenever possible we try to adhere to [JupyterHub's](https://jupyterhub.readthedocs.io/en/stable/installation-basics.html#folders-and-file-locations) recommended paths: - -- `/srv/jupyterhub` for all security and runtime files -- `/etc/jupyterhub` for all configuration files -- `/var/log` for log files - -2. Nbgrader configurations using `nbgrader_config.py`. - -Three `nbgrader_config.py` files should exist: - -**Grader Account** - -* **Grader's home: `/home/grader-{course_id}/.jupyter/nbgrader_config.py`**: defines how `nbgrader` authenticates with a third party service. This setup uses the `JupyterHubAuthPlugin`, the log file's location, and the `course_id` the grader account manages. -* **Grader's course: `/home/grader-{course_id}/{course_id}/nbgrader_config.py`**: configurations related to how the course files themselves are managed, such as solution delimeters, code stubs, etc. - -> **NOTE**: the grader's course confuguration has default values for `header.ipynb` and `footer.ipynb` template notebooks. These templates are added to the generated notebook, if present. These files should be located in the course's `source` sub-directory. The full paths for the header/footer templates are `/home/grader-{course_id}/{course_id}/source/header.ipynb` and `/home/grader-{course_id}/{course_id}/source/footer.ipynb`. - -**Instructor/Learner Account** - -* **Instructor/Learner settings `/etc/jupyter/nbgrader_config.py`**: defines how `nbgrader` authenticates with a third party service, such as `JupyterHub` using the `JupyterHubAuthPlugin`, the log file's location, etc. Instructor and learner accounts do **NOT** contain the `course_id` identifier in their nbgrader configuration files. - -3. Jupyter Notebook configuration using `jupyter_notebook_config.py`. This configuration is standard fare and unless required does not need customized edits. - -4. For this setup, the deployment configuration is defined primarily with `docker-compose.yml`. - -5. Cloud specific setup options by specifying settings in the `hosts` file. For now, these options are specific to `AWS EFS` mounts. This allows administrators to leverage AWS's EFS service for additional data redundancy, security, and sharing options. Shared file systems are particularly helpful when using a setup with multiple hosts such as with Docker Swarm or Kubernetes since the user's container may launch on any available virtual machine (host). To enable and use EFS, update the following `hosts` file variables: - -- **aws_efs_enabled (Required)**: set to true to enable mounts with AWS EFS, defaults to `false`. -- **aws_region (Required)**: the AWS region where the EFS service is running, defaults to `us-west-2`. -- **efs_id (Required)**: and existing AWS EFS identifier, for example `fs-0726eyyd`. Defaults to an empty string. -- **mnt_root (Recommended)**: if you test without NFS-based mounts and then mount an existing folder to an NFS-based shared directory, then you run the risk of losing your files. Change this value to use a folder other than the default `/mnt` directory to either another directory or a sub-directory within the `/mnt` directory, such as `/mnt/efs/fs1`. - -### Build the Stack - -The following docker images are created/pulled with this setup: - -- JupyterHub image -- Postgres image -- Reverse-proxy image -- Jupyter Notebook Student image -- Jupyter Notebook Instructor image -- Jupyter Notebook shared Grader image - -When building the images the configuration files are copied to the image from the host using the `COPY` command. Environment variables are stored in `env.*` files. You can either customize the environment variables within the `env.*` files or add new ones as needed. The `env.*` files are used by docker-compose to reduce the file's verbosity. - -### Spawners - -By default this setup includes the `IllumiDeskDockerSpawner` class. However, you should be able to use any container based spawner. This implementation utilizes the `auth_state_hook` to get the user's authentication dictionary, and based on the spawner class sets the docker image to spawn based on the `user_role` key with the spawner's `auth_state_hook`. The `pre_spawn_hook` to add user directories with the appropriate permissions, since users are not added to the operating system. - -**Note**: the user is redirected to their server by default with `JupyterHub.redirect_to_server = True`. - -#### General Spawner Settings - -Most settings located in the JupyterHub configuration file (`jupyterhub_config.py`) have a set of opinionated defaults that can be changed by updating the configuration file itself. However there are two configuration options available that can be changed with the `hosts` file: - -- `mem_limit`: [limit the amount of memory](https://jupyterhub.readthedocs.io/en/stable/reference/spawners.html#memory-limits-guarantees) (RAM) available for end-user containers. For example, entering `2G` will limit the user to two (2) gigabytes of RAM. -- `cpu_limit`: [limit access](https://jupyterhub.readthedocs.io/en/stable/reference/spawners.html#cpu-limits-guarantees) to available virtual CPUs. For example, entering `0.5` will limit the user to 50% of one of the available virtual CPUs in the system. - -#### IllumiDeskDockerSpawner - -The `IllumiDeskDockerSpawner` interprets LTI-based roles to determine which container to launch based on the user's role. If used with `nbgrader`, this class provides users with a container prepared for students to fetch and submit assignment and instructors with access the shared grader service for each course. - -Edit the `JupyterHub.spawner_class` to update the spawner used by JupyterHub when launching user containers. For example, if you are changing the spawner from `DockerSpawner` to `KubeSpawner`: - -Before: - -```python -c.JupyterHub.spawner_class = 'dockerspawner.IllumiDeskDockerSpawner' -``` - -After: - -```python -c.JupyterHub.spawner_class = 'kubespawner.KubeSpawner' -``` - -As mentioned in the [authenticator](#authenticator) section, make sure you refer to the spawner's documentation to consider all settings before launching JupyterHub. In most cases the spawners provide drop-in replacement of the provided `IllumiDeskDockerSpawner` class. However, setting spawners other than `IllumiDeskDockerSpawner` may break compatibility with the grading services. - -### Proxies - -There are two types of proxies that work with JupyterHub: - -- JupyterHub's proxy -- Externally managed reverse-proxy - -JupyterHub's proxy manages routing and optionally TSL termination. Reverse-proxies help manage multiple services with one domain, including JupyterHub. - -This setup uses JupyterHub's [configurable-http-proxy]((https://github.com/jupyterhub/configurable-http-proxy)) running in a separate container which enables JupyterHub restarts without interrupting active sessions between end-users and their Jupyter Notebooks. - -> **Warning**: CHP is **not** setup with TSL. Refer to [CHP's official documentation](https://github.com/jupyterhub/configurable-http-proxy) to set up TSL termination. - -### Jupyter Notebook Images - -**Requirements** - -- The Jupyter Notebook image needs to have `JupyterHub` installed and this version of JupyterHub **must coincide with the version of JupyterHub that is spawing the Jupyter Notebook**. By default the `illumidesk/docker-stacks` images have JupyterHub installed. -- Use one of images provided by the [`jupyter/docker-stacks`](https://github.com/jupyter/docker-stacks). -- Make sure the image is on the host used by the spawner to launch the user's Jupyter Notebook. - -Images are pulled from `DockerHub`, including the end-user base image. This base image comes pre installed with a script to enable Jupyter Notebook / Jupyter Lab extensions based on the user's LTI role. - -The nbgrader extensions are enabled within the images like so: - -| | Students | Instructors | Formgraders | -|---|---|---|---| -| Create Assignment | no | no | yes | -| Assignment List | yes | yes | no | -| Formgrader | no | no | yes | -| Course List | no | yes | no | - -Refer to [this section](https://nbgrader.readthedocs.io/en/stable/user_guide/installation.html#installing-and-activating-extensions) of the `nbgrader` docs for more information on how you can enable and disable specific extensions. - -### Grading with Multiple Instructors - -As of `nbgrader 0.6.0`, nbgrader supports the [JupyterHubAuthPlugin](https://nbgrader.readthedocs.io/en/stable/configuration/jupyterhub_config.html#jupyterhub-authentication) to determine the user's membership within a course. The section that describes how to run [nbgrader with JupyterHub] is well written. However for the sake of clarity, some of the key points and examples are written below. - -The following rules are defined to determine access to nbgrader features: - -- Users with the student role are members of the `nbgrader-{course_id}` group(s). Students are shown assignments only for course(s) with `{course_id}`. -- Users with the instructor role are members of the `formgrade-{course_id}` group(s). Instructors are shown links to course(s) to access `{course_id}`. To access the formgrader, instructors access to the `{course_id}` service (essentially a shared notebook) and authenticate to the `{course_id}` service using JupyterHub as an OAuth2 server. - -> **NOTE** It's important to emphasize that **instructors do not grade assignments with their own notebook server** but with a **shared notebook** which runs as a JupyterHub service and which is owned by the shared `grader-{course_id}` account. - -The configuration for this setup is located in four locations. The first deals with the grader notebook as an [externally managed JupyterHub service](https://jupyterhub.readthedocs.io/en/stable/getting-started/services-basics.html), and the other three deal with the location and settings for `nbgrader_config.py`. - -The examples below use the `course101` as the course name (known as `context_label` in LTI terms): - -1. Within `jupyterhub_config.py` which defines a service: - -- Name -- Access by group -- Ownership -- API token -- URL -- Command - -For example: - -```python -c.JupyterHub.services = [ - { - 'name': 'course101', - 'url': 'http://127.0.0.1:9999', - 'command': [ - 'jupyterhub-singleuser', - '--group=formgrade-course101', - '--debug', - ], - 'user': 'grader-course101', - 'cwd': '/home/grader-course101', - 'api_token': 'api_token_course101' - }] -``` - -2. The global `nbgrader_config.py` used by all roles, located in `/etc/jupyter/nbgrader_config.py` which defines: - -- Authenticator plugin class -- Exchange directory location - -For example: - -```python -c.Exchange.path_includes_course = True -c.Exchange.root = '/srv/nbgrader/exchange' -c.Authenticator.plugin_class = JupyterHubAuthPlugin -``` - -3. The `nbgrader_config.py` located within the shared grader account home directory: (`/home/grader-course101/.jupyter/nbgrader_config.py`) which defines: - -- Course root path -- Course name - -For example: - -```python -c.CourseDirectory.root = '/home/grader-course101/course101' -c.CourseDirectory.course_id = 'course101' -``` - -4. The `nbgrader_config.py` located within the course directory: (`/home/grader-course101/course101/nbgrader_config.py`) which defines: - -- The course_id -- Nbgrader application options - -For example: - -```python -c.CourseDirectory.course_id = 'course101' -c.ClearSolutions.text_stub = 'ADD YOUR ANSWER HERE' -``` - -### Some Notes on Authentication, User Directories, and Local System Users - -The examples provided in this repo assume that users are **not** local system users. A custom -`Spawner.pre_spawn_hook` is used to create user directories before spawing their notebook. - -## Environment Variables - -The services included with this setup rely on environment variables to work properly. You can override them by either setting the ansible veriable when running the playbook or my manually modifying the environment variables on the `env.*` host files after the playbook has run. - - - -### Environment Variables for all services `env.common` - -| Variable | Type | Description | Default Value | -|---|---|---|---| -| DOCKER_NETWORK_NAME | `string` | Docker image used by users without an assigned role. | `illumidesk/illumidesk-notebook:latest` | -| ILLUMIDESK_DIR | `string` | IllumiDesk directory with source files. | `$HOME/illumidesk_deployment` | -| JUPYTERHUB_API_TOKEN | `string` | Docker network name for docker-compose and dockerspawner | `jupyter-network` | -| JUPYTERHUB_API_URL | `string` | Internal API URL corresponding to JupyterHub. | `http://jupyterhub:8081` | -| JUPYTERHUB_BASE_URL | `string` | Working directory for Jupyter Notebooks | `/home/jovyan` | -| JUPYTERHUB_CONFIG_PATH | `string` | Notebook grader user | `/srv/jupyterhub` | -| EXCHANGE_DIR | `string` | Exchange directory path | `/srv/nbgrader/exchange` | -| MNT_ROOT | `string` | Host directory root | `/mnt` | -| NB_NON_GRADER_UID | `string` | Host user directory UID | `1000` | -| NB_GID | `string` | Host user directory GID | `100` | -| ORGANIZATION_NAME | `string` | Organization name. | `my-edu` | -| PGDATA | `string` | Postgres data file path | `/var/lib/postgresql/data` | -| POSTGRES_NBGRADER_DB | `string` | Postgres database name | `nbgrader` | -| POSTGRES_NBGRADER_HOST | `string` | Postgres host for Nbgrader | `nbgrader-db` | -| POSTGRES_NBGRADER_PASSWORD | `string` | Postgres password for Nbgrader | `nbgrader` | -| POSTGRES_NBGRADER_PORT | `string` | Postgres port for Nbgrader | `5432` | -| POSTGRES_NBGRADER_USER | `string` | Postgres username for Nbgrader | `nbgrader` | -| SHARED_FOLDER_ENABLED | `string` | Specifies the use of shared folder (between grader and student notebooks) | `True` | - -### Environment Variables pertaining to JupyterHub, located in `env.jhub` - -| Variable | Type | Description | Default Value | -|---|---|---|---| -| DOCKER_END_USER_IMAGE | `string` | Docker image used by users without an assigned role. | `illumidesk/illumidesk-notebook:latest` | -| DOCKER_NOTEBOOK_DIR | `string` | Working directory for Jupyter Notebooks | `/home/jovyan` | -| JUPYTERHUB_ADMIN_USER | `string` | JupyterHub admin user | `admin` | -| JUPYTERHUB_CRYPT_KEY | `string` | Cyptographic key used to encrypt cookies. | `` | -| JUPYTERHUB_API_URL | `string` | Internal API URL corresponding to JupyterHub. | `http://jupyterhub:8081` | -| JUPYTERHUB_SHUTDOWN_ON_LOGOUT | `string` | Shut down the user's server when logging out | `True` | -| LTI11_CONSUMER_KEY | `string` | LTI 1.1 consumer key | `ild_test_consumer_key` | -| LTI11_SHARED_SECRET | `string` | LTI 1.1 shared secret | `ild_test_shared_secret` | -| LTI13_AUTHORIZE_URL | `string` | LTI 1.3 authorization URL, such as `https://my.platform.domain/api/lti/authorize_redirect` | `''` | -| LTI13_CLIENT_ID | `string` | LTI 1.3 client id used to identify the tool's installation within the platform, such as `125900000000000071` | `''` | -| LTI13_ENDPOINT | `string` | LTI 1.3 platform endpoint, such as `https://my.platform.domain/api/lti/security/jwks` | `''` | -| LTI13_PRIVATE_KEY | `string` | Private key used with LTI 1.3 to create public JSON Web Keys (JWK) | `/secrets/keys/rsa_private.pem` | -| LTI13_TOKEN_URL | `string` | LTI 1.3 token URL surfaced by the platform, such as `https://my.platform.domain/login/oauth2/token` | `''` | -| NB_NON_GRADER_UID | `string` | Host user directory UID | `1000` | -| NB_GID | `string` | Host user directory GID | `100` | -| MNT_ROOT | `string` | Host directory root | `/mnt` | -| ORGANIZATION_NAME | `string` | Organization name. | `my-org` | -| POSTGRES_JUPYTERHUB_DB | `string` | Postgres database name | `illumidesk` | -| POSTGRES_JUPYTERHUB_HOST | `string` | Postgres host | `jupyterhub-db` | -| POSTGRES_JUPYTERHUB_PASSWORD | `string` | Postgres database password | `illumidesk` | -| POSTGRES_JUPYTERHUB_PORT | `string` | Postgres database port | `5432` | -| POSTGRES_JUPYTERHUB_USER | `string` | Postgres database username | `illumidesk` | -| SHARED_FOLDER_ENABLED | `string` | Specifies the use of shared folder (between grader and student notebooks) | `True` | -| SPAWNER_MEM_LIMIT | `string` | Spawner memory limit | `2G` | -| SPAWNER_CPU_LIMIT | `string` | Spawner cpu limit | `0.5` | - -### Environment Variables pertaining to setup-course service, located in `env.setup-course` - -| Variable | Type | Description | Default Value | -|---|---|---|---| -| DOCKER_NETWORK_NAME | `string` | JupyterHub API token | `jupyter-network` | -| DOCKER_GRADER_IMAGE | `string` | Docker image used for the grader services | `illumidesk/grader-notebook:latest` | -| JUPYTERHUB_API_URL | `string` | JupyterHub client id used with OAuth2 | `http://reverse-proxy:8000/hub/api` | -| JUPYTERHUB_SERVICE_NAME | `string` | Notebook grader user id | `jupyterhub` | -| MNT_ROOT | `string` | Notebook grader user id | `/mnt` | -| NB_GRADER_UID | `string` | Notebook grader user id | `10001` | -| NB_GID | `string` | Notebook grader user id | `100` | -| SHARED_FOLDER_ENABLED | `string` | Specifies the use of shared folder (between grader and student notebooks) | `True` | - ---- - -## Resources - -### Documentation - -- [JupyterHub documentation](https://jupyterhub.readthedocs.io/en/stable/) -- [JupyterHub API](https://jupyterhub.readthedocs.io/en/stable/api/) -- [Nbgrader documentation](https://nbgrader.readthedocs.io/en/stable/) - -### Sources of Inspiration - -- [jupyterhub-deploy-docker](https://github.com/jupyterhub/jupyterhub-deploy-docker) -- [jupyterhub-docker](https://github.com/defeo/jupyterhub-docker) +## Development Installation +Refer to the [contributing](./CONTRIBUTING.md) guide located in the root of this repo. ### General Guidelines This project enforces the [Contributor Covenant](./CODE_OF_CONDUCT.md). Be kind and build a nice open source community with us. diff --git a/ansible/ansible.cfg b/ansible/ansible.cfg deleted file mode 100644 index 7eb02786..00000000 --- a/ansible/ansible.cfg +++ /dev/null @@ -1,4 +0,0 @@ -[defaults] -inventory = ./hosts -host_key_checking = False -roles_path = roles \ No newline at end of file diff --git a/ansible/group_vars/all.yml b/ansible/group_vars/all.yml deleted file mode 100644 index dc8cc806..00000000 --- a/ansible/group_vars/all.yml +++ /dev/null @@ -1,106 +0,0 @@ ---- -# Set as pilot environment to enable announcements on all pages stating as such -pilot_environment: "{{ pilot_environment_param | default('true')}}" - -# Set as pilot environment to enable announcements on all pages stating as such -pilot_announcement_text: "{{ pilot_environment_text_param | default('WARNING: This is a demo environment. Contact hello@illumidesk.com if you would like access to the Production system. Thanks!')}}" - -# Ansible working directory on remote host -working_dir: "{{ ansible_env.PWD }}/illumidesk_deployment" - -# End-user account info -org_name: "{{ org_name_param | default('my') }}" - -## Docker -# base jupyterhub image -docker_jhub_base_image: "{{ docker_jhub_base_image_param | default('illumidesk/jupyterhub:py3.8') }}" - -# built jupyterhub image -docker_illumidesk_jhub_image: "{{ docker_illumidesk_jhub_image_param | default('illumidesk/jupyterhub:latest') }}" - -# illumidesk notebooks base image -docker_illumidesk_end_user_image: "{{ docker_illumidesk_end_user_image_param | default('illumidesk/illumidesk-notebook:latest') }}" - -# illumidesk grader notebook image -docker_illumidesk_grader_image: "{{ docker_illumidesk_grader_image_param | default('illumidesk/grader-notebook:latest') }}" - -# setup-course image -docker_setup_course_image: "{{ docker_setup_course_image_param | default('illumidesk/setup-course:latest') }}" - -# postgres image -docker_postgres_image: "{{ docker_postgres_image_param | default('postgres:11.6-alpine') }}" - -# dockerfiles -docker_jupyterhub_dockerfile: "{{ docker_jupyterhub_dockerfile_param | default('Dockerfile.jhub') }}" -docker_setup_course_dockerfile: "{{ docker_setup_course_dockerfile_param | default('Dockerfile.setup-course') }}" - -## Jupyterhub -# Admin users -admin_user: "{{ admin_user_param | default('admin') }}" - -# Shutdown on logout -shutdown_on_logout: "{{ shutdown_on_logout_param | default('True') }}" - -# Base URL. Add the / character before the value, for example /acme. -base_url: "{{ base_url_param | default('') }}" - -# Mount directories -mnt_root: "{{ mnt_root_param | default('/mnt') }}" - -# Mount with AWS EFS -aws_efs_enabled: "{{ aws_efs_enabled_param | default('false') }}" - -# Specify the EFS id -efs_id: "{{ efs_id_param | default('') }}" - -# Specify the AWS region (used for EFS mounts) -aws_region: "{{ aws_region_param | default('us-west-2') }}" - -# Postgres for notebooks labs -postgres_labs_enabled: "{{ postgres_labs_enabled_param | default('false') }}" - -# Specify the authentication type to install/use in the deployment -# types are: lti11 or lti13 -authentication_type: "{{ authentication_type_param | default('lti11') }}" - -# LTI 1.1 credentials -lti11_consumer_key: "{{ lti11_consumer_key_result | default('ild_test_consumer_key')}}" -lti11_shared_secret: "{{ lti11_shared_secret_result | default('ild_test_shared_secret')}}" - -# LTI 1.3 config -lti13_client_id: "{{ lti13_client_id_param | default('')}}" -lti13_private_key: "{{ lti13_private_key_param | default('/secrets/keys/rsa_private.pem')}}" -lti13_endpoint: "{{ lti13_endpoint_param | default('https://illumidesk.instructure.com')}}" -lti13_token_url: "{{ lti13_token_url_param | default('https://illumidesk.instructure.com/login/oauth2/token')}}" -lti13_authorize_url: "{{ lti13_authorize_url_param | default('https://illumidesk.instructure.com/api/lti/authorize_redirect')}}" - -# shared drive -shared_folder_enabled: "{{ shared_folder_enabled_param | default('true') }}" - -# spawner -spawner_mem_limit: "{{ spawner_mem_limit_param | default('2G') }}" -spawner_cpu_limit: "{{ spawner_cpu_limit_param | default('0.5') }}" - -# postgres data directory -pg_data: "{{ pg_data_param | default('/var/lib/postgresql/data') }}" - -# Postgres labs -postgres_labs_host: "{{ postgres_labs_host_param | default('labs-db') }}" -postgres_labs_user: "{{ postgres_labs_user_param | default('postgres') }}" -postgres_labs_password: "{{ postgres_labs_password_param | default('postgres') }}" -postgres_labs_port: "{{ postgres_labs_port_param | default('5432') }}" -postgres_labs_dbname: "{{ postgres_labs_dbname_param | default('postgres') }}" - -# Postgres settings for NBgrader -postgres_nbgrader_host: "{{ postgres_nbgrader_host_param | default('nbgrader-db') }}" -postgres_nbgrader_user: "{{ postgres_nbgrader_user_param | default('nbgrader') }}" -postgres_nbgrader_password: "{{ postgres_nbgrader_password_param | default('nbgrader') }}" -postgres_nbgrader_port: "{{ postgres_nbgrader_port_param | default('5432') }}" -postgres_nbgrader_dbname: "{{ postgres_nbgrader_dbname_param | default('postgress') }}" - -# Postgres settings for JupyterHub -postgres_jupyterhub_host: "{{ postgres_jupyterhub_host_param | default('jupyterhub-db') }}" -postgres_jupyterhub_user: "{{ postgres_jupyterhub_user_param | default('illumidesk') }}" -postgres_jupyterhub_password: "{{ postgres_jupyterhub_password_param | default('illumidesk') }}" -postgres_jupyterhub_port: "{{ postgres_jupyterhub_port_param | default('5432') }}" -postgres_jupyterhub_dbname: "{{ postgres_jupyterhub_dbname_param | default('illumidesk') }}" diff --git a/ansible/hosts.example b/ansible/hosts.example deleted file mode 100644 index 9b8a8432..00000000 --- a/ansible/hosts.example +++ /dev/null @@ -1,128 +0,0 @@ -all: - hosts: - illumidesk: - ## Required settings - #------------------- - # IPv4 host address - ansible_host: 127.0.0.1 - - # IPv4 host address - ansible_port: 22 - - # IPv4 host address - ansible_user: root - - # IPv4 host address - ansible_ssh_private_key_file: /path/to/my/key/file - - # Explicitly set the python interpreter to python3 - ansible_python_interpreter: /usr/bin/python3 - - # uncomment and add a password only if your remote instance uses userid/password - # for SSH authentication - # ansible_password: my-secret-password - - ## (Optional) Endpoint/namespace settings - #------------------- - - # uncomment and set to false to remove announcements designated for Pilot environments - # pilot_environment: true - - # uncomment and update Pilot environment announcement text, defaults to text below - # pilot_announcement_text: "WARNING: This is a demo environment. Contact hello@illumidesk.com if you would like access to the Production system. Thanks!" - - # uncomment to add your organization name, defaults to my-org. - # org_name: my-org - - # uncomment to add top level domain, such as example.com - # tld: example.com - - # uncomment to use a custom base url starting with the / character, - # defaults to an empty string. - # base_url: /path - - # uncomment to start a container within the docker-compose network which allows - # users to connect to a shared postgres container (useful for lab environments) - # postgres_labs_enabled: true - - ## NFS/EFS settings - # uncomment and set to true to enable mounts with aws efs, defaults to false - # aws_efs_enabled: true - - # uncomment and specify the efs id for your AWS EFS service, defaults to an empty string - # efs_id: - - # uncomment and confirm your aws region, defaults to us-west-2 - # aws_region: us-west-2 - - # uncomment and set mount directory a subdirectory within the /mnt directory, such as - # /mnt/efs/fs1, to reduce the risk of overwriting content that may already exist in /mnt - # mnt_root: /mnt/efs/fs1 - - # set to true to enable the shared drive option - shared_folder_enabled: true - - ### Genaral Options - #------------------- - # provide a string of that represents one admin user. Username should represent - # the normalized username once logged into the system. - # admin_user: foo - - # uncomment and change the value to False if you would like to leave the user's server - # (container) in the running state after the user logs out of their session. This also applies - # to when the cookie-based session expires (14 days by default). - # shutdown_on_logout: True - - ## Authentication settings - #------------------- - - # set the authentication type to use. types supported are: lti11 or lti13 - - authentication_type: lti11 - - ### LTI 1.1 - #------------------- - - # lti 1.1 consumer key, by default this is set dynamically. add - # own value to override setting. we recommend using the openssl - # command to create secure strings: e.g. `openssl rand -hex 16` - # lti11_consumer_key: - - # lti 1.1 shared secret, by default set dynamically. set to at - # least 32 random bytes: e.g. `openssl rand -hex 32` - # lti11_shared_secret: - - ### LTI 1.3 - #------------------- - - # private key path and file name (pem) - # lti13_private_key: /secrets/keys/rsa_private.pem - - # the client id which represents the tools installation on the platform. the - # example below is an example clint id displayed by the details column in developer - # keys from the canvas lms. - # lti13_client_id: 125900000000000001 - - # lti 1.3 urls/endpoints, the ones provided below are example endpoints - # used with the canvas lms. - # lti13_private_key: /secrets/keys/rsa_private.pem - # lti13_endpoint: https://illumidesk.instructure.com/api/lti/security/jwks - # lti13_token_url: https://illumidesk.instructure.com/login/oauth2/token - # lti13_authorize_url: https://illumidesk.instructure.com/api/lti/authorize_redirect - - ## Spawner - # uncomment and add your preferred memory limit settings for user workspaces - # spawner_mem_limit: 2G - - # uncomment and add your preferred cpu limit settings for user workspaces - # spawner_cpu_limit: 0.5 - - ## NBGrader Database settings - #------------------- - # Uncomment and change the values below as needed. The values below reflect the defaults as - # commented in the docs. - # postgres_nbgrader_dbname: nbgrader - # postgres_nbgrader_host: postgres-nbgrader - # postgres_nbgrader_port: nbgrader - # postgres_nbgrader_user: nbgrader - # postgres_nbgrader_password: nbgrader diff --git a/ansible/provisioning.yml b/ansible/provisioning.yml deleted file mode 100644 index 0b9c5bf8..00000000 --- a/ansible/provisioning.yml +++ /dev/null @@ -1,56 +0,0 @@ ---- - # play 1 - - name: Common - hosts: illumidesk - become: true - gather_facts: true - roles: - - { role: common, tags: common } - - # play 2 - - name: IllumiDesk - hosts: illumidesk - become: true - gather_facts: false - roles: - - { role: illumidesk, tags: illumidesk } - - # play 3 - - name: JupyterHub - hosts: illumidesk - become: true - gather_facts: false - roles: - - { role: jupyterhub, tags: jupyterhub } - - # play 4 - - name: User workspaces - hosts: illumidesk - become: true - gather_facts: false - roles: - - { role: workspaces, tags: workspaces } - - # play 5 - - name: Setup-Course app - hosts: illumidesk - become: true - gather_facts: false - roles: - - { role: setup_course, tags: setup_course } - - # play 6 - - name: Reverse proxy files - hosts: illumidesk - become: true - gather_facts: false - roles: - - { role: reverse-proxy, tags: proxy } - -# play 7 - - name: Launch stack - hosts: illumidesk - become: true - gather_facts: false - roles: - - { role: launch, tags: launch } diff --git a/ansible/roles/common/tasks/main.yml b/ansible/roles/common/tasks/main.yml deleted file mode 100644 index 90d7e1a9..00000000 --- a/ansible/roles/common/tasks/main.yml +++ /dev/null @@ -1,127 +0,0 @@ -# Default to python3 -- name: install python for ansible - raw: test -e /usr/bin/python3 || (apt -y update && apt install -y python3-minimal) - changed_when: False -- setup: # noqa 502 - -- name: add universe repository for focal - apt_repository: - repo: deb http://archive.ubuntu.com/ubuntu focal universe - state: present - when: ansible_distribution_release == 'focal' - -- name: install aptitude package with apt - apt: - name: aptitude - force_apt_get: yes - -- name: update all packages to the latest version - apt: - upgrade: "yes" - update_cache: "yes" - cache_valid_time: '3600' - -- name: install required system packages # noqa 403 - apt: - name: "{{ item }}" - state: latest - update_cache: yes - loop: [ - 'apt-transport-https', - 'ca-certificates', - 'curl', - 'python3-pip', - 'python3-setuptools', - 'software-properties-common', - 'virtualenv' - ] - -- name: install system dependencies required by efs - apt: - name: "{{ item }}" - state: latest - update_cache: yes - loop: [ - 'nfs-common', - ] - when: aws_efs_enabled|bool - -- name: install docker-compose - get_url: - url: https://github.com/docker/compose/releases/download/1.25.5/docker-compose-Linux-x86_64 - dest: /usr/local/bin/docker-compose - mode: '0755' - -- name: create root directory on host for mounts - file: - path: "{{ item.dest }}" - mode: "{{ item.mode }}" - owner: "{{ item.owner }}" - group: "{{ item.group }}" - state: directory - with_items: - - { dest: "{{ mnt_root }}", mode: '0755', owner: 'root', group: 'root' } - -- name: ensure rpcbind service is running - service: - name: rpcbind - state: started - enabled: yes - when: aws_efs_enabled|bool - -- name: get current availability zone from aws - uri: - url: http://169.254.169.254/latest/meta-data/placement/availability-zone - return_content: yes - register: _aws_current_az - when: aws_efs_enabled|bool - -- name: mount efs volume - mount: - backup: "yes" - name: "{{ mnt_root }}" - src: "{{ _aws_current_az.content }}.{{ efs_id }}.efs.{{ aws_region }}.amazonaws.com:/" - fstype: nfs4 - opts: "nfsvers=4.1" - state: mounted - when: aws_efs_enabled|bool - -- name: create directories within mount/nfs directory - file: - path: "{{ item.dest }}" - mode: "{{ item.mode }}" - owner: "{{ item.owner }}" - group: "{{ item.group }}" - state: directory - with_items: - - { dest: "{{ mnt_root }}/{{ org_name }}", mode: '0755', owner: 'root', group: 'root' } - - { dest: "{{ mnt_root }}/{{ org_name }}/exchange", mode: '0777', owner: 'root', group: 'root' } - -- name: add docker gpg apt key - apt_key: - url: https://download.docker.com/linux/ubuntu/gpg - state: present - -- name: add docker repository - apt_repository: - repo: deb https://download.docker.com/linux/ubuntu bionic stable - state: present - -- name: update apt and install docker-ce # noqa 403 - apt: - update_cache: yes - name: docker-ce - state: latest - -- name: install docker python client - pip: - name: docker - -- name: create a docker network - docker_network: - name: 'jupyter-network' - -- name: create a working directory - file: - path: "{{ working_dir }}" - state: directory diff --git a/ansible/roles/illumidesk/tasks/main.yml b/ansible/roles/illumidesk/tasks/main.yml deleted file mode 100644 index b2e172c9..00000000 --- a/ansible/roles/illumidesk/tasks/main.yml +++ /dev/null @@ -1,23 +0,0 @@ ---- -- name: synchronize illumidesk directory to working directory - synchronize: - src: ../../../../src - dest: "{{ working_dir }}" - rsync_opts: - - "--no-motd" - - "--exclude=build*" - - "--exclude=*.egg-info" - - "--exclude=*.pyc" - - "--exclude=__pycache__" - -- name: create a zip archive of the illumidesk package - archive: - path: "{{ working_dir }}/src" - dest: "{{ working_dir }}/illumidesk.zip" - format: zip - -- name: create lti 11 configuration file from template - template: - src: lti11-cartridge.xml.j2 - dest: "{{ working_dir }}/lti11-cartridge.xml" - when: authentication_type == "lti11" diff --git a/ansible/roles/illumidesk/templates/lti11-cartridge.xml.j2 b/ansible/roles/illumidesk/templates/lti11-cartridge.xml.j2 deleted file mode 100644 index 39ab009b..00000000 --- a/ansible/roles/illumidesk/templates/lti11-cartridge.xml.j2 +++ /dev/null @@ -1,23 +0,0 @@ - - - IllumiDesk - - https://{{org_name}}.illumidesk.com/hub/lti/launch - - public - {{org_name}}.illumidesk.com - 1000 - 1000 - - true - IllumiDesk - https://{{org_name}}.illumidesk.com/hub/lti/launch - public - _blank - - - \ No newline at end of file diff --git a/ansible/roles/jupyterhub/files/jupyterhub_config_lti11.py b/ansible/roles/jupyterhub/files/jupyterhub_config_lti11.py deleted file mode 100644 index b9cf81c4..00000000 --- a/ansible/roles/jupyterhub/files/jupyterhub_config_lti11.py +++ /dev/null @@ -1,73 +0,0 @@ -import os - -from illumidesk.apis.setup_course_service import get_current_service_definitions -from illumidesk.authenticators.authenticator import LTI11Authenticator -from illumidesk.authenticators.authenticator import setup_course_hook -from illumidesk.grades.handlers import SendGradesHandler -from illumidesk.spawners.spawners import IllumiDeskDockerSpawner - - -c = get_config() - -# load the base configuration file (with common settings) -load_subconfig('/etc/jupyterhub/jupyterhub_config_base.py') # noqa: F821 - -########################################## -# BEGIN JUPYTERHUB APPLICATION -########################################## - -# LTI 1.1 authenticator class. -c.JupyterHub.authenticator_class = LTI11Authenticator - -# Spawn end-user container and enable extensions by role -c.JupyterHub.spawner_class = IllumiDeskDockerSpawner - -########################################## -# END JUPYTERHUB APPLICATION -########################################## - -########################################## -# BEGIN LTI 1.1 AUTHENTICATOR -########################################## -c.LTIAuthenticator.consumers = { - os.environ.get('LTI_CONSUMER_KEY') - or 'ild_test_consumer_key': os.environ.get('LTI_SHARED_SECRET') - or 'ild_test_shared_secret' -} - -# Custom Handlers -# the first one is used to send grades to LMS -# this url pattern was changed to accept spaces in the assignment name -c.JupyterHub.extra_handlers = [ - (r'/submit-grades/(?P[a-zA-Z0-9-_]+)/(?P.*)$', SendGradesHandler,), -] - -########################################## -# END LTI 1.1 AUTHENTICATOR -########################################## - -########################################## -# BEGIN GENERAL AUTHENTICATION -########################################## - -# Post auth hook to setup course -c.Authenticator.post_auth_hook = setup_course_hook - -########################################## -# END GENERAL AUTHENTICATION -########################################## - -########################################## -# SETUP COURSE SERVICE -########################################## - -# Dynamic config to setup new courses -extra_services = get_current_service_definitions() - -# load k/v's when starting jupyterhub -c.JupyterHub.load_groups.update(extra_services['load_groups']) -c.JupyterHub.services.extend(extra_services['services']) - -########################################## -# END SETUP COURSE SERVICE -########################################## diff --git a/ansible/roles/jupyterhub/files/jupyterhub_config_lti13.py b/ansible/roles/jupyterhub/files/jupyterhub_config_lti13.py deleted file mode 100644 index 45eb9067..00000000 --- a/ansible/roles/jupyterhub/files/jupyterhub_config_lti13.py +++ /dev/null @@ -1,77 +0,0 @@ -import os - -from illumidesk.apis.setup_course_service import get_current_service_definitions -from illumidesk.authenticators.authenticator import LTI13Authenticator -from illumidesk.authenticators.authenticator import setup_course_hook -from illumidesk.authenticators.handlers import LTI13LoginHandler - -from illumidesk.grades.handlers import SendGradesHandler - -from illumidesk.lti13.handlers import FileSelectHandler -from illumidesk.lti13.handlers import LTI13ConfigHandler -from illumidesk.lti13.handlers import LTI13JWKSHandler - -from illumidesk.spawners.spawners import IllumiDeskDockerSpawner - -c = get_config() - - -# load the base configuration file (with common settings) -load_subconfig('/etc/jupyterhub/jupyterhub_config_base.py') # noqa: F821 - -########################################## -# BEGIN LTI 1.3 AUTHENTICATOR -########################################## - -# LTI 1.3 authenticator class. -c.JupyterHub.authenticator_class = LTI13Authenticator - -# Spawn containers with by role -c.JupyterHub.spawner_class = IllumiDeskDockerSpawner - -# created after installing app in lms -c.LTI13Authenticator.client_id = os.environ.get('LTI13_CLIENT_ID') -c.LTI13Authenticator.endpoint = os.environ.get('LTI13_ENDPOINT') -c.LTI13Authenticator.token_url = os.environ.get('LTI13_TOKEN_URL') -c.LTI13Authenticator.authorize_url = os.environ.get('LTI13_AUTHORIZE_URL') - -# Custom Handlers used for LTI endpoints -# the first one is used to send grades to LMS -# this url pattern was changed to accept spaces in the assignment name -c.JupyterHub.extra_handlers = [ - (r'/submit-grades/(?P[a-zA-Z0-9-_]+)/(?P.*)$', SendGradesHandler), - (r'/lti/launch$', LTI13LoginHandler), - (r'/lti13/config$', LTI13ConfigHandler), - (r'/lti13/jwks$', LTI13JWKSHandler), - (r'/lti13/file-selection$', FileSelectHandler), -] - -########################################## -# END LTI 1.3 AUTHENTICATOR -########################################## - -########################################## -# BEGIN GENERAL AUTHENTICATION (OVERRIDE) -########################################## - -# Post auth hook to setup course -c.Authenticator.post_auth_hook = setup_course_hook - -########################################## -# END GENERAL AUTHENTICATION -########################################## - -########################################## -# SETUP COURSE SERVICE -########################################## - -# Dynamic config to setup new courses -extra_services = get_current_service_definitions() - -# load k/v's when starting jupyterhub -c.JupyterHub.load_groups.update(extra_services['load_groups']) -c.JupyterHub.services.extend(extra_services['services']) - -########################################## -# END SETUP COURSE SERVICE -########################################## diff --git a/ansible/roles/jupyterhub/files/requirements.txt b/ansible/roles/jupyterhub/files/requirements.txt deleted file mode 100644 index 0f17edfe..00000000 --- a/ansible/roles/jupyterhub/files/requirements.txt +++ /dev/null @@ -1,29 +0,0 @@ -# jupyterhub -jupyterhub==1.1.0 - -# postgres -psycopg2-binary==2.8.6 - -# traefik (reverse proxy) -jupyterhub-traefik-proxy==0.1.6 - -# Authenticators -jupyterhub-ltiauthenticator==0.4.0 -oauthenticator==0.11.0 - -# Spawners -dockerspawner==0.11.1 -git+git://github.com/jupyterhub/wrapspawner.git@94b779af3926a90be922356bb9ab18153b918733 - -# Utils -filelock==3.0.12 -josepy==1.3.0 -nbgitpuller==0.9.0 -pem==20.1.0 -pycryptodome==3.9.7 -pyjwkest==1.4.2 -PyJWT==1.7.1 -pylti==0.7.0 - -# Cull idle servers -jupyterhub-idle-culler==1.0 diff --git a/ansible/roles/jupyterhub/files/share/static/images/favicon.ico b/ansible/roles/jupyterhub/files/share/static/images/favicon.ico deleted file mode 100644 index f468d25d..00000000 Binary files a/ansible/roles/jupyterhub/files/share/static/images/favicon.ico and /dev/null differ diff --git a/ansible/roles/jupyterhub/files/share/static/images/illumidesk-80.png b/ansible/roles/jupyterhub/files/share/static/images/illumidesk-80.png deleted file mode 100644 index 3a5d3182..00000000 Binary files a/ansible/roles/jupyterhub/files/share/static/images/illumidesk-80.png and /dev/null differ diff --git a/ansible/roles/jupyterhub/files/share/templates/404.html b/ansible/roles/jupyterhub/files/share/templates/404.html deleted file mode 100644 index 81332856..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/404.html +++ /dev/null @@ -1,5 +0,0 @@ -{% extends "error.html" %} - -{% block error_detail %} -

Fooey! Page not found ...

-{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/405.html b/ansible/roles/jupyterhub/files/share/templates/405.html deleted file mode 100644 index c68ace53..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/405.html +++ /dev/null @@ -1,5 +0,0 @@ -{% extends "error.html" %} - -{% block error_detail %} -

Please sign in from your Learning Management System (LMS) ...

-{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/500.html b/ansible/roles/jupyterhub/files/share/templates/500.html deleted file mode 100644 index 56e56b70..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/500.html +++ /dev/null @@ -1,7 +0,0 @@ -{% extends "error.html" %} - -{% block error_detail %} -

It's us, not you! We have encountered an internal error on our end.

-

-

We have logged this error and should have a remedy soon.

-{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/admin.html b/ansible/roles/jupyterhub/files/share/templates/admin.html deleted file mode 100644 index a0c6fb87..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/admin.html +++ /dev/null @@ -1,171 +0,0 @@ -{% extends "page.html" %} - -{% macro th(label, key='', colspan=1) %} -{{label}} - {% if key %} - - - {% endif %} - -{% endmacro %} - -{% block main %} - -
- - - - {% block thead %} - {{ th("User (%i)" % users|length, 'name') }} - {{ th("Admin", 'admin') }} - {{ th("Last Activity", 'last_activity') }} - {{ th("Running (%i)" % running|length, 'running', colspan=2) }} - {% endblock thead %} - - - - - - - {% for user in users %} - {% for spawner in user.all_spawners() %} - - {% block user_row scoped %} - - - - - - - - - - - - - {% endblock user_row %} - {% endfor %} - {% endfor %} - -
- Add Users - - Start All - Stop All - - Shutdown Hub -
{{user.name}} - {%- if spawner.name -%} - /{{ spawner.name }} - {%- endif -%} - - {%- if spawner.name == '' -%} - {% if user.admin %}admin{% endif %} - {%- endif -%} - - {%- if spawner.last_activity -%} - {{ spawner.last_activity.isoformat() + 'Z' }} - {%- else -%} - Never - {%- endif -%} - - - stop server - - - start server - - - {%- if admin_access %} - - access server - - {%- endif %} - - {%- if spawner.name == '' -%} - edit user - {%- endif -%} - - {%- if spawner.name == '' -%} - {#- user row -#} - {%- if user.name != current_user.name -%} - delete user - {%- endif -%} - {%- else -%} - {#- named spawner row -#} - delete server - {%- endif -%} -
-
- - -{% call modal('Delete User', btn_class='btn-danger delete-button') %} - Are you sure you want to delete user USER? - This operation cannot be undone. -{% endcall %} - -{% call modal('Stop All Servers', btn_label='Stop All', btn_class='btn-danger stop-all-button') %} - Are you sure you want to stop all your users' servers? Kernels will be shutdown and unsaved data may be lost. -{% endcall %} - -{% call modal('Start All Servers', btn_label='Start All', btn_class='btn-primary start-all-button') %} - Are you sure you want to start all servers? This can slam your server resources. -{% endcall %} - -{% call modal('Shutdown Hub', btn_label='Shutdown', btn_class='btn-danger shutdown-button') %} - Are you sure you want to shutdown the Hub? - You can choose to leave the proxy and/or single-user servers running by unchecking the boxes below: -
- -
-
- -
-{% endcall %} - -{% macro user_modal(name, multi=False) %} -{% call modal(name, btn_class='btn-primary save-button') %} -
- <{%- if multi -%} - textarea - {%- else -%} - input type="text" - {%- endif %} - class="form-control username-input" - placeholder="{%- if multi -%} usernames separated by lines{%- else -%} username {%-endif-%}"> - {%- if multi -%}{%- endif -%} -
-
- -
-{% endcall %} -{% endmacro %} - -{{ user_modal('Edit User') }} - -{{ user_modal('Add Users', multi=True) }} - -{% endblock %} - -{% block script %} -{{ super() }} - -{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/error.html b/ansible/roles/jupyterhub/files/share/templates/error.html deleted file mode 100644 index 8a151e6e..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/error.html +++ /dev/null @@ -1,64 +0,0 @@ -{% extends "page.html" %} - -{% block login_widget %} -{% endblock %} - -{% block main %} - -
- {% block h1_error %} -

- {{status_code}} : {{status_message}} -

- {% endblock h1_error %} - {% block error_detail %} - {% if message %} -

- {{message}} -

- {% endif %} - {% if message_html %} -

- {{message_html | safe}} -

- {% endif %} - {% if extra_error_html %} -

- {{extra_error_html | safe}} -

- {% endif %} - {% endblock error_detail %} -
- -{% endblock %} - -{% block script %} - {{super()}} - - -{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/file_select.html b/ansible/roles/jupyterhub/files/share/templates/file_select.html deleted file mode 100644 index ff275242..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/file_select.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - Select file - - - - - -
-
- Please select a file: -
-
-
-
- -
    - {% for file in files %} -
  • - -
  • - {% endfor %} -
- -
-
- - \ No newline at end of file diff --git a/ansible/roles/jupyterhub/files/share/templates/home.html b/ansible/roles/jupyterhub/files/share/templates/home.html deleted file mode 100644 index 88379e56..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/home.html +++ /dev/null @@ -1,93 +0,0 @@ -{% extends "page.html" %} -{% if announcement_home %} - {% set announcement = announcement_home %} -{% endif %} - -{% block main %} -
- - {% if allow_named_servers %} -

- Named Servers -

- -

- In addition to your default server, - you may have additional {% if named_server_limit_per_user > 0 %}{{ named_server_limit_per_user }} {% endif %}server(s) with names. - This allows you to have more than one server running at the same time. -

- - {% set named_spawners = user.all_spawners(include_default=False)|list %} - - - - - - - - - - - - - - - {% for spawner in named_spawners %} - - {# name #} - - {# url #} - - {# activity #} - - {# actions #} - - - {% endfor %} - -
Server nameURLLast activityActions
- - - Add New Server - -
{{ spawner.name }} - - {{ user.server_url(spawner.name) }} - - - {% if spawner.last_activity %} - {{ spawner.last_activity.isoformat() + 'Z' }} - {% else %} - Never - {% endif %} - - stop - - start - - delete -
- {% endif %} -
-{% endblock main %} - -{% block script %} -{{ super() }} - -{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/login.html b/ansible/roles/jupyterhub/files/share/templates/login.html deleted file mode 100644 index 3f0f94e4..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/login.html +++ /dev/null @@ -1,84 +0,0 @@ -{% extends "page.html" %} -{% if announcement_login %} - {% set announcement = announcement_login %} -{% endif %} - -{% block login_widget %} -{% endblock %} - -{% block main %} - -{% block login %} -
-{% if custom_html %} -{{ custom_html | safe }} -{% elif login_service %} - -{% else %} -
-
- Sign in -
-
- - - - {% if login_error %} - - {% endif %} - - - - - - -
-
-{% endif %} -
-{% endblock login %} - -{% endblock %} - -{% block script %} -{{ super() }} - - -{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/logout.html b/ansible/roles/jupyterhub/files/share/templates/logout.html deleted file mode 100644 index 76fe0f8d..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/logout.html +++ /dev/null @@ -1,14 +0,0 @@ -{% extends "page.html" %} -{% if announcement_logout %} - {% set announcement = announcement_logout %} -{% endif %} - -{% block main %} - -
-

- Successfully logged out. -

-
- -{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/not_running.html b/ansible/roles/jupyterhub/files/share/templates/not_running.html deleted file mode 100644 index 182e7ba0..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/not_running.html +++ /dev/null @@ -1,44 +0,0 @@ -{% extends "page.html" %} - -{% block main %} - -
-
-
- {% block heading %} -

- {% if failed %} - Spawn failed - {% else %} - Server not running - {% endif %} -

- {% endblock %} - {% block message %} -

- {% if failed %} - The latest attempt to start your server {{ server_name }} has failed. - {% if failed_message %} - {{ failed_message }} - {% endif %} - Would you like to retry starting it? - {% else %} - Your server {{ server_name }} is not running. Would you like to start it? - {% endif %} -

- {% endblock %} - {% block start_button %} - - {% if failed %} - Relaunch - {% else %} - Launch - {% endif %} - Server {{ server_name }} - - {% endblock %} -
-
-
- -{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/oauth.html b/ansible/roles/jupyterhub/files/share/templates/oauth.html deleted file mode 100644 index 2e57a69b..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/oauth.html +++ /dev/null @@ -1,51 +0,0 @@ -{% extends "page.html" %} - -{% block login_widget %} -{% endblock %} - -{% block main %} -
-

Authorize access

- -

- A service is attempting to authorize with your - IllumiDesk account -

- -

- {{ oauth_client.description }} (oauth URL: {{ oauth_client.redirect_uri }}) - would like permission to identify you. - {% if scopes == ["identify"] %} - It will not be able to take actions on your behalf. - {% endif %} -

- -

The application will be able to:

-
-
- {% for scope in scopes %} -
- -
- {% endfor %} - -
-
-
- - -{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/page.html b/ansible/roles/jupyterhub/files/share/templates/page.html deleted file mode 100644 index 732ecd4b..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/page.html +++ /dev/null @@ -1,211 +0,0 @@ -{% macro modal(title, btn_label=None, btn_class="btn-primary") %} -{% set key = title.replace(' ', '-').lower() %} -{% set btn_label = btn_label or title %} - -{% endmacro %} - - - - - - - - - {% block title %}IllumiDesk{% endblock %} - - - - {% block stylesheet %} - - {% endblock %} - {% block scripts %} - - - - {% endblock %} - - - - - - {% block meta %} - {% endblock %} - - - - - - - -{% block nav_bar %} - -{% endblock %} - - -{% block announcement %} -{% if announcement %} -
- {{ announcement | safe }} -
-{% endif %} -{% endblock %} -{% if services %} - {% for service in services if service.name == 'announcement' %} -
-
-
- - -
- {% endfor %} -{% endif%} - - - - -{% block main %} -{% endblock %} - -{% call modal('Error', btn_label='OK') %} -
- The error -
-{% endcall %} - -{% block script %} -{% endblock %} - - - - - diff --git a/ansible/roles/jupyterhub/files/share/templates/spawn.html b/ansible/roles/jupyterhub/files/share/templates/spawn.html deleted file mode 100644 index f1330b1f..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/spawn.html +++ /dev/null @@ -1,31 +0,0 @@ -{% extends "page.html" %} -{% if announcement_spawn %} - {% set announcement = announcement_spawn %} -{% endif %} - -{% block main %} - -
- {% block heading %} -
-

Server Options

-
- {% endblock %} -
- {% if for_user and user.name != for_user.name -%} -

Spawning server for {{ for_user.name }}

- {% endif -%} - {% if error_message -%} -

- Error: {{error_message}} -

- {% endif %} -
- {{spawner_options_form | safe}} -
- -
-
-
- -{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/spawn_pending.html b/ansible/roles/jupyterhub/files/share/templates/spawn_pending.html deleted file mode 100644 index b1aedb3a..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/spawn_pending.html +++ /dev/null @@ -1,92 +0,0 @@ -{% extends "page.html" %} - -{% block main %} - -
-
-
- {% block message %} -

Your server is starting up.

-

You will be redirected automatically when it's ready for you.

- {% endblock %} -
-
- 0% Complete -
-
-

-
-
-
-
-
- Event log -
-
-
-
-
- -{% endblock %} - -{% block script %} -{{ super() }} - -{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/stop_pending.html b/ansible/roles/jupyterhub/files/share/templates/stop_pending.html deleted file mode 100644 index 04a8de1d..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/stop_pending.html +++ /dev/null @@ -1,32 +0,0 @@ -{% extends "page.html" %} - -{% block main %} - -
-
-
- {% block message %} -

Your server is stopping.

-

You will be able to start it again once it has finished stopping.

- {% endblock message %} -

- refresh -
-
-
- -{% endblock %} - -{% block script %} -{{ super() }} - -{% endblock %} diff --git a/ansible/roles/jupyterhub/files/share/templates/token.html b/ansible/roles/jupyterhub/files/share/templates/token.html deleted file mode 100644 index a493c8e8..00000000 --- a/ansible/roles/jupyterhub/files/share/templates/token.html +++ /dev/null @@ -1,146 +0,0 @@ -{% extends "page.html" %} - -{% block main %} - -
-
-
-
- -
-
- - - - This note will help you keep track of what your tokens are for. - -
-
-
- -
- -
- - {% if api_tokens %} -
-

API Tokens

-

- These are tokens with full access to the JupyterHub API. - Anything you can do with JupyterHub can be done with these tokens. - Revoking the API token for a running server will require restarting that server. -

- - - - - - - - - - {% for token in api_tokens %} - - {% block token_row scoped %} - - - - - {% endblock token_row %} - - {% endfor %} - -
NoteLast usedCreated
{{token.note}} - {%- if token.last_activity -%} - {{ token.last_activity.isoformat() + 'Z' }} - {%- else -%} - Never - {%- endif -%} - - {%- if token.created -%} - {{ token.created.isoformat() + 'Z' }} - {%- else -%} - N/A - {%- endif -%} - - -
-
- {% endif %} - - {% if oauth_clients %} -
-

Authorized Applications

-

- These are applications that use OAuth with JupyterHub - to identify users (mostly notebook servers). - - OAuth tokens can generally only be used to identify you, - not take actions on your behalf. -

- - - - - - - - - - {% for client in oauth_clients %} - - {% block client_row scoped %} - - - - - {% endfor %} - -
ApplicationLast usedFirst authorized
{{ client['description'] }} - {%- if client['last_activity'] -%} - {{ client['last_activity'].isoformat() + 'Z' }} - {%- else -%} - Never - {%- endif -%} - - {%- if client['created'] -%} - {{ client['created'].isoformat() + 'Z' }} - {%- else -%} - N/A - {%- endif -%} - - - {% endblock client_row %} -
-
- {% endif %} -
-{% endblock main %} - -{% block script %} -{{ super() }} - -{% endblock script %} diff --git a/ansible/roles/jupyterhub/files/wait-for-postgres.sh b/ansible/roles/jupyterhub/files/wait-for-postgres.sh deleted file mode 100644 index 0b8b6896..00000000 --- a/ansible/roles/jupyterhub/files/wait-for-postgres.sh +++ /dev/null @@ -1,16 +0,0 @@ -# https://docs.docker.com/compose/startup-order/ -#!/bin/sh - -set -e - -host="$1" -shift -cmd="$@" - -until PGPASSWORD=$POSTGRES_PASSWORD psql -h "$host" -U "postgres" -c '\q'; do - >&2 echo "Postgres is unavailable - sleeping" - sleep 1 -done - ->&2 echo "Postgres is up - executing command" -exec $cmd \ No newline at end of file diff --git a/ansible/roles/jupyterhub/tasks/main.yml b/ansible/roles/jupyterhub/tasks/main.yml deleted file mode 100644 index b14dac37..00000000 --- a/ansible/roles/jupyterhub/tasks/main.yml +++ /dev/null @@ -1,146 +0,0 @@ ---- -- name: create external postgres data volume - docker_volume: - name: db-data - -- name: create external postgres data volume for notebooks labs - docker_volume: - name: db-labs-data - -- name: create external postgres data volume for nbgrader - docker_volume: - name: db-nbgrader-data - -- name: create external jupyterhub and setup-course data volume - docker_volume: - name: jupyterhub-data - -- name: generate random key for jupyterhub crypt key # noqa 301 305 - shell: /usr/bin/openssl rand -hex 32 - register: _jhub_crypt_key_result - -- name: assign jupyterhub cryptographic key from stdout - set_fact: - jhub_crypt_key={{ _jhub_crypt_key_result.stdout }} - -- name: generate random key for jupyterhub api token # noqa 301 305 - shell: /usr/bin/openssl rand -hex 32 - register: _jhub_api_token_result - -- name: assign jhub api token from stdout - set_fact: jhub_api_token={{ _jhub_api_token_result.stdout }} - -- name: generate random key for proxy # noqa 301 305 - shell: /usr/bin/openssl rand -hex 32 - register: _config_auth_proxy_token_result - -- name: assign config auth proxy token from stdout - set_fact: - config_auth_proxy_token={{ _config_auth_proxy_token_result.stdout }} - -- name: generate random lti11 client id (consumer key) # noqa 301 305 - shell: /usr/bin/openssl rand -hex 16 - register: _lti11_consumer_key_result - when: authentication_type == "lti11" - -- name: assign lti11 client id (consumer key) from stdout - set_fact: - lti11_consumer_key_result={{ _lti11_consumer_key_result.stdout }} - when: authentication_type == "lti11" - -- name: generate random lti11 shared secret # noqa 301 305 - shell: /usr/bin/openssl rand -hex 16 - register: _lti11_shared_secret_result - when: authentication_type == "lti11" - -- name: assign lti11 shared secret from stdout - set_fact: - lti11_shared_secret_result={{ _lti11_shared_secret_result.stdout }} - when: authentication_type == "lti11" - -- name: copy requirements.txt for base jupyterhub image - copy: - src: requirements.txt - dest: "{{ working_dir }}/jupyterhub-requirements.txt" - -- name: Authentication type to install - debug: - msg: "authentication_type?: {{ authentication_type }}" - -- name: copy the custom jupyterhub configuration file with lti v1.1 settings - copy: - src: jupyterhub_config_lti11.py - dest: "{{ working_dir }}/jupyterhub_config.py" - when: authentication_type == "lti11" - -- name: copy the custom jupyterhub configuration file with lti v1.3 settings - copy: - src: jupyterhub_config_lti13.py - dest: "{{ working_dir }}/jupyterhub_config.py" - when: authentication_type == "lti13" - -- name: copy postgres utility scripts - copy: - src: wait-for-postgres.sh - dest: "{{ working_dir }}/wait-for-postgres.sh" - -- name: copy favicon - copy: - src: share/static/images/favicon.ico - dest: "{{ working_dir }}/favicon.ico" - -- name: copy logo - copy: - src: share/static/images/illumidesk-80.png - dest: "{{ working_dir }}/illumidesk-80.png" - -- name: copy jupyterhub template files - copy: - src: share/templates - dest: "{{ working_dir }}/share" - -- name: ensures {{working_dir}}/keys directory exists - file: - path: "{{working_dir}}/keys" - state: directory - -- name: create lti v1.3 private key in pem format - openssl_privatekey: - path: "{{working_dir}}/keys/rsa_private.pem" - mode: '0644' - -- name: create the common env var file from template - template: - src: env.common.j2 - dest: "{{ working_dir }}/env.common" - -- name: create the jupyterhub env var file from template - template: - src: env.jhub.j2 - dest: "{{ working_dir }}/env.jhub" - -- name: create the base of jupyterhub configuration file with common settings - template: - src: jupyterhub_config_base.py.j2 - dest: "{{ working_dir }}/jupyterhub_config_base.py" - -- name: create base jupyterhub dockerfile from template - template: - src: Dockerfile.jhub.j2 - dest: "{{ working_dir }}/Dockerfile.jhub" - -- name: build jupyterhub image - docker_image: - name: "{{ docker_illumidesk_jhub_image }}" - build: - path: "{{ working_dir }}" - dockerfile: "{{ docker_jupyterhub_dockerfile }}" - pull: no - state: present - source: build - force_source: yes - -- name: create the docker-compose yaml file from template - template: - src: docker-compose.yml.j2 - dest: "{{ working_dir }}/docker-compose.yml" diff --git a/ansible/roles/jupyterhub/templates/Dockerfile.jhub.j2 b/ansible/roles/jupyterhub/templates/Dockerfile.jhub.j2 deleted file mode 100644 index af44d88c..00000000 --- a/ansible/roles/jupyterhub/templates/Dockerfile.jhub.j2 +++ /dev/null @@ -1,61 +0,0 @@ -ARG BASE_IMAGE={{docker_jhub_base_image}} -FROM ${BASE_IMAGE} - -# add additional packages for postgres -RUN apt-get update \ - && apt-get install -yq --no-install-recommends \ - apt-utils \ - fonts-liberation \ - git \ - libpq-dev \ - nano \ - postgresql-client \ - software-properties-common \ - sudo \ - wget \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -# always make sure pip is up to date! -RUN python3 -m pip install --no-cache --upgrade setuptools pip - -# Enable prompt color in the skeleton .bashrc before creating the default JH_USER -# TODO: add steps to add JH_USER and run the jhub with this user -RUN sed -i 's/^#force_color_prompt=yes/force_color_prompt=yes/' /etc/skel/.bashrc - -WORKDIR /tmp - -# Install packages from requirements file -COPY jupyterhub-requirements.txt /tmp/jupyterhub/requirements.txt -RUN python3 -m pip install --no-cache -r /tmp/jupyterhub/requirements.txt - -# Install illumidesk package -COPY illumidesk.zip /tmp/illumidesk.zip -RUN python3 -m pip install --no-cache /tmp/illumidesk.zip - -# Copy logo and favicon -COPY illumidesk-80.png /usr/local/share/jupyterhub/static/images/illumidesk-80.png -COPY favicon.ico /usr/local/share/jupyterhub/static/favicon.ico - -# Copy templates -COPY share/templates/. /usr/local/share/jupyterhub/templates/. - -# Copy the base configuration file and the custom file containing special settings -# /etc/jupyterhub/jupyterhub_config.py by default -COPY jupyterhub_config.py /etc/jupyterhub/jupyterhub_config.py -COPY jupyterhub_config_base.py /etc/jupyterhub/jupyterhub_config_base.py - -WORKDIR /srv/jupyterhub - -# Get announcement from jupyterhub examples folder -RUN wget https://raw.githubusercontent.com/jupyterhub/jupyterhub/d126baa443ad7d893be2ff4a70afe9ef5b8a4a1a/examples/service-announcement/announcement.py - -# Copy postgres util script and update permissions -COPY wait-for-postgres.sh /srv/jupyterhub/wait-for-postgres.sh -RUN chmod +x /srv/jupyterhub/wait-for-postgres.sh - -# Run standard command but wait for postgres -# https://docs.docker.com/compose/startup-order/ -CMD ["/srv/jupyterhub/wait-for-postgres.sh", "python3", "jupyterhub", "-f", "/etc/jupyterhub/jupyterhub_config.py"] - -HEALTHCHECK CMD curl --fail http://localhost:8081/ || exit 1 diff --git a/ansible/roles/jupyterhub/templates/docker-compose.yml.j2 b/ansible/roles/jupyterhub/templates/docker-compose.yml.j2 deleted file mode 100644 index 0ee711df..00000000 --- a/ansible/roles/jupyterhub/templates/docker-compose.yml.j2 +++ /dev/null @@ -1,99 +0,0 @@ -version: "3.5" - -services: - jupyterhub: - restart: on-failure - image: {{docker_illumidesk_jhub_image}} - depends_on: - - reverse-proxy - - jupyterhub-db - - setup-course - volumes: - - /var/run/docker.sock:/var/run/docker.sock:rw - - {{mnt_root}}/{{org_name}}/home:/home - - {{mnt_root}}/{{org_name}}/shared:/shared - - data:/data - - ./reverse-proxy:/etc/traefik - - ./keys:/secrets/keys - env_file: - - env.common - - env.jhub - command: > - jupyterhub -f /etc/jupyterhub/jupyterhub_config.py --debug - labels: - - "traefik.enable=true" - - "traefik.http.routers.jhub_router.service=jupyterhub" - jupyterhub-db: - image: {{docker_postgres_image}} - container_name: jupyterhub-db - restart: always - environment: - - POSTGRES_USER={{postgres_jupyterhub_user}} - - POSTGRES_PASSWORD={{postgres_jupyterhub_password}} - volumes: - - db:{{pg_data}} - reverse-proxy: - image: traefik:v1.7-alpine - container_name: reverse-proxy - ports: - - "8000:8000" - - "443:443" - - "8080:8080" - volumes: - - ./reverse-proxy/:/etc/traefik/ - - /var/run/docker.sock:/var/run/docker.sock - restart: on-failure - setup-course: - restart: on-failure - image: {{docker_setup_course_image}} - container_name: setup-course - command: hypercorn --bind 0.0.0.0:8000 --workers 1 illumidesk.setup_course.app:app - env_file: - - env.common - - env.setup_course - volumes: - - /var/run/docker.sock:/var/run/docker.sock:rw - - {{mnt_root}}:{{mnt_root}} - - data:/srv/jupyterhub - - {{ working_dir }}:{{ working_dir }} -{% if 'rds.amazonaws.com' not in postgres_nbgrader_host%} - {{postgres_nbgrader_host}}: - image: {{docker_postgres_image}} - container_name: {{postgres_nbgrader_host}} - restart: always - environment: - - POSTGRES_USER={{postgres_nbgrader_user}} - - POSTGRES_PASSWORD={{postgres_nbgrader_password}} - volumes: - - db-nbgrader:{{pg_data}} -{% endif %} -{% if postgres_labs_enabled is sameas true %} - {{postgres_labs_host}}: - image: {{docker_postgres_image}} - container_name: {{postgres_labs_host}} - restart: always - environment: - - POSTGRES_USER={{postgres_labs_user}} - - POSTGRES_PASSWORD={{postgres_labs_password}} - volumes: - - db-labs:{{pg_data}} -{% endif %} - -volumes: - data: - external: - name: jupyterhub-data - db: - external: - name: db-data - db-labs: - external: - name: db-labs-data - db-nbgrader: - external: - name: db-nbgrader-data - -networks: - default: - external: - name: jupyter-network diff --git a/ansible/roles/jupyterhub/templates/env.common.j2 b/ansible/roles/jupyterhub/templates/env.common.j2 deleted file mode 100644 index 7439620a..00000000 --- a/ansible/roles/jupyterhub/templates/env.common.j2 +++ /dev/null @@ -1,38 +0,0 @@ -# docker network name -DOCKER_NETWORK_NAME=jupyter-network - -# deployment working directory -ILLUMIDESK_DIR={{working_dir}} - -# organization name used with nfs -ORGANIZATION_NAME={{org_name}} - -# api token should be equal to value set for jupyterhub -JUPYTERHUB_API_TOKEN={{jhub_api_token}} - -# base url for jupyterhub application -JUPYTERHUB_BASE_URL={{base_url}} - -# jupyterhub api url using proxy name and the proxy's external port -JUPYTERHUB_API_URL=http://reverse-proxy:8000{{base_url}}/hub/api - -# local path to save the dynamic config -JUPYTERHUB_CONFIG_PATH=/srv/jupyterhub -JUPYTERHUB_SERVICE_NAME=jupyterhub - -# grader uid/gid to restrict access -MNT_ROOT={{mnt_root}} -NB_GID=100 - -# enable/disable shared folder between students and graders -SHARED_FOLDER_ENABLED={{shared_folder_enabled}} - -# Postgres data directory -PGDATA={{pg_data}} - -# Postgres settings for NBgrader -POSTGRES_NBGRADER_DB={{postgres_nbgrader_dbname}} -POSTGRES_NBGRADER_USER={{postgres_nbgrader_user}} -POSTGRES_NBGRADER_PASSWORD={{postgres_nbgrader_password}} -POSTGRES_NBGRADER_HOST={{postgres_nbgrader_host}} -POSTGRES_NBGRADER_PORT={{postgres_nbgrader_port}} diff --git a/ansible/roles/jupyterhub/templates/env.jhub.j2 b/ansible/roles/jupyterhub/templates/env.jhub.j2 deleted file mode 100644 index 7a1d3c2f..00000000 --- a/ansible/roles/jupyterhub/templates/env.jhub.j2 +++ /dev/null @@ -1,52 +0,0 @@ -# images used for roles -DOCKER_END_USER_IMAGE={{docker_illumidesk_end_user_image}} - -# user home directory within container -DOCKER_NOTEBOOK_DIR=/home/jovyan - -# ngrader exchange directory -EXCHANGE_DIR=/srv/nbgrader/exchange - -# jupyterhub cryptographic key (mostly for cookies) -JUPYTERHUB_CRYPT_KEY={{jhub_crypt_key}} - -# shutdown server on logout -JUPYTERHUB_SHUTDOWN_ON_LOGOUT={{shutdown_on_logout}} - -# admin users -JUPYTERHUB_ADMIN_USER={{admin_user}} - -# Postgres settings for JupyterHub -POSTGRES_JUPYTERHUB_DB={{postgres_jupyterhub_dbname}} -POSTGRES_JUPYTERHUB_USER={{postgres_jupyterhub_user}} -POSTGRES_JUPYTERHUB_PASSWORD={{postgres_jupyterhub_password}} -POSTGRES_JUPYTERHUB_HOST={{postgres_jupyterhub_host}} -POSTGRES_JUPYTERHUB_PORT={{postgres_jupyterhub_port}} - -# Mount directory root and permissions set with spawner -NB_NON_GRADER_UID=1000 - -# lti 1.1 -LTI_CONSUMER_KEY={{lti11_consumer_key}} -LTI_SHARED_SECRET={{lti11_shared_secret}} - -# lti 1.3 -LTI13_AUTHORIZE_URL={{lti13_authorize_url}} -LTI13_CLIENT_ID={{lti13_client_id}} -LTI13_ENDPOINT={{lti13_endpoint}} -LTI13_PRIVATE_KEY={{lti13_private_key}} -LTI13_TOKEN_URL={{lti13_token_url}} - -# setup course service name -DOCKER_SETUP_COURSE_SERVICE_NAME=setup-course -DOCKER_SETUP_COURSE_PORT=8000 - -# traefik-proxy -PROXY_API_URL=http://reverse-proxy:8099 - -# anouncement service (internal) -ANNOUNCEMENT_SERVICE_PORT=8889 - -# spawner -SPAWNER_MEM_LIMIT={{spawner_mem_limit}} -SPAWNER_CPU_LIMIT={{spawner_cpu_limit}} diff --git a/ansible/roles/jupyterhub/templates/jupyterhub_config_base.py.j2 b/ansible/roles/jupyterhub/templates/jupyterhub_config_base.py.j2 deleted file mode 100644 index 1fee1548..00000000 --- a/ansible/roles/jupyterhub/templates/jupyterhub_config_base.py.j2 +++ /dev/null @@ -1,234 +0,0 @@ -import os -import sys - -from distutils.util import strtobool - -from dockerspawner import DockerSpawner # noqa: F401 - -from illumidesk.apis.announcement_service import ANNOUNCEMENT_JHUB_SERVICE_DEFINITION -from illumidesk.spawners.hooks import custom_auth_state_hook -from illumidesk.spawners.hooks import custom_pre_spawn_hook - -c = get_config() - -########################################## -# BEGIN JUPYTERHUB APPLICATION -########################################## - -{% if pilot_environment is sameas true %} -# Announcement page for Pilot environments -c.JupyterHub.template_vars = {'announcement': '{{ pilot_announcement_text }}'} - -# Limit the number of active end user servers -c.JupyterHub.active_server_limit = 20 - -# Limit the number of concurrent servers that are spawned -c.JupyterHub.concurrent_spawn_limit = 5 - -# redirect users to their servers by default -c.JupyterHub.redirect_to_server = False -{% else %} - -# redirect users to the home page by default -c.JupyterHub.redirect_to_server = True -{% endif %} - -# Set to debug for teting -c.JupyterHub.log_level = 'DEBUG' - -# Allows multiple single-server per user -c.JupyterHub.allow_named_servers = False - -# Load data files -c.JupyterHub.data_files_path = '/usr/local/share/jupyterhub/' - -# Use custom logo -c.JupyterHub.logo_file = os.path.join('/usr/local/share/jupyterhub/', 'static', 'images', 'illumidesk-80.png') - -# Template files -c.JupyterHub.template_paths = ('/usr/local/share/jupyterhub/templates',) - -# Allow the hub to listen on any ip address -c.JupyterHub.hub_ip = '0.0.0.0' - -# This is usually the hub container's name -c.JupyterHub.hub_connect_ip = 'jupyterhub' - -# Provide iframe support -c.JupyterHub.tornado_settings = { - "headers": {"Content-Security-Policy": "frame-ancestors 'self' *"}, - "cookie_options": {"SameSite": "None", "Secure": True}, -} - -# Load data files -c.JupyterHub.data_files_path = '/usr/local/share/jupyterhub/' - -# Persist hub cookie secret on volume mounted inside container -data_dir = '/data' -c.JupyterHub.cookie_secret_file = os.path.join(data_dir, 'jupyterhub_cookie_secret') - -# Allow admin access to end-user notebooks -c.JupyterHub.admin_access = True - -# Refrain from cleaning up servers when restarting the hub -c.JupyterHub.cleanup_servers = False - -# Define some static services that jupyterhub will manage -# Although the cull-idle service is internal, and therefore does not need an explicit -# registration of the jupyterhub api token, we add it here so the internal api client -# can use the token to utilize RESTful endpoints with full CRUD priviledges. -c.JupyterHub.services = [ - { - 'name': 'idle-culler', - 'admin': True, - 'command': [sys.executable, '-m', 'jupyterhub_idle_culler', '--timeout=3600'], - 'api_token': os.environ.get('JUPYTERHUB_API_TOKEN'), - }, - ANNOUNCEMENT_JHUB_SERVICE_DEFINITION, -] - -# JupyterHub postgres settings -c.JupyterHub.db_url = 'postgresql://{user}:{password}@{host}:{port}/{db}'.format( - user=os.environ.get('POSTGRES_JUPYTERHUB_USER'), - password=os.environ.get('POSTGRES_JUPYTERHUB_PASSWORD'), - host=os.environ.get('POSTGRES_JUPYTERHUB_HOST'), - port=os.environ.get('POSTGRES_JUPYTERHUB_PORT'), - db=os.environ.get('POSTGRES_JUPYTERHUB_DB'), -) - -# Do not redirect user to his/her server (if running) -c.JupyterHub.redirect_to_server = False - -# JupyterHub's base url -base_url = os.environ.get('JUPYTERHUB_BASE_URL') or '' -c.JupyterHub.base_url = base_url - -# Shutdown servers on logout -# convert string to boolean if the value comes from the env var -shutdown_on_logout = os.environ.get('JUPYTERHUB_SHUTDOWN_ON_LOGOUT') or 'True' -c.JupyterHub.shutdown_on_logout = bool(strtobool(shutdown_on_logout)) - -########################################## -# END JUPYTERHUB APPLICATION -########################################## - -########################################## -# BEGIN REVERSE PROXY -########################################## -# Use an external service to manage the proxy -from jupyterhub_traefik_proxy import TraefikTomlProxy - -# configure JupyterHub to use TraefikTomlProxy -c.JupyterHub.proxy_class = TraefikTomlProxy - -# mark the proxy as externally managed -c.TraefikTomlProxy.should_start = False - -# indicate the proxy url to allow register new routes -c.TraefikProxy.traefik_api_url = os.environ.get('PROXY_API_URL') or 'http://reverse-proxy:8099' - -# traefik api endpoint login password -c.TraefikTomlProxy.traefik_api_password = 'admin' - -# traefik api endpoint login username -c.TraefikTomlProxy.traefik_api_username = 'api_admin' - -# traefik's dynamic configuration file -c.TraefikTomlProxy.toml_dynamic_config_file = '/etc/traefik/rules.toml' - -########################################## -# END REVERSE PROXY -########################################## - -########################################## -# BEGIN GENERAL AUTHENTICATION -########################################## - -admin_user = os.environ.get('JUPYTERHUB_ADMIN_USER') -# Add other admin users as needed -c.Authenticator.admin_users = { - admin_user, -} - -# If using an authenticator which requires additional logic, -# set to True. -c.Authenticator.enable_auth_state = True - -########################################## -# END GENERAL AUTHENTICATION -########################################## - -########################################## -# BEGIN GENERAL SPAWNER -########################################## - -# End user image -c.Spawner.image = os.environ.get('DOCKER_END_USER_IMAGE') or 'illumidesk/full-notebook:latest' - -# Limit the container's use of CPU -cpu_limit = os.environ.get('SPAWNER_CPU_LIMIT') or 0.5 -c.DockerSpawner.cpu_limit = float(cpu_limit) - -# Limit memory -c.Spawner.mem_limit = os.environ.get('SPAWNER_MEM_LIMIT') or '2G' - -########################################## -# END GENERAL SPAWNER -########################################## - -########################################## -# BEGIN CUSTOM DOCKERSPAWNER -########################################## - -# Allow container to use any ip address -c.DockerSpawner.host_ip = '0.0.0.0' - -# specify the command used by the spawner to start every container -spawn_cmd = os.environ.get('DOCKER_SPAWN_CMD') or 'start-singleuser.sh' -c.DockerSpawner.extra_create_kwargs.update({'command': spawn_cmd}) - -# Tell the user containers to connect to our docker network -network_name = os.environ.get('DOCKER_NETWORK_NAME') or 'jupyter-network' -c.DockerSpawner.network_name = network_name - -# Remove containers when stopping the hub -c.DockerSpawner.remove_containers = True -c.DockerSpawner.remove = True - -# nbgrader exchange directory -exchange_dir = os.environ.get('EXCHANGE_DIR') or '/srv/nbgrader/exchange' - -# Organization name -org_name = os.environ.get('ORGANIZATION_NAME') or 'my-org' - -# Notebook directory within docker image -notebook_dir = os.environ.get('DOCKER_NOTEBOOK_DIR') - -# Root directory to mount org, home, and exchange folders -mnt_root = os.environ.get('MNT_ROOT') - -# Mount volumes -c.DockerSpawner.volumes = { - f'{mnt_root}/{org_name}' + '/home/{raw_username}': notebook_dir, - f'{mnt_root}/{org_name}/exchange': exchange_dir, -} - -c.DockerSpawner.name_template = 'jupyter-{raw_username}' - -# start the container with root so we can update uid/gid using the docker-stacks hooks -c.DockerSpawner.extra_create_kwargs.update({ 'user': 'root'}) - -# these env vars are set within the docker image but add them here for good measure -nb_non_grader_uid = os.environ.get('NB_NON_GRADER_UID') or '1000' -nb_gid = os.environ.get('NB_GSID') or '100' -c.DockerSpawner.environment = {'NB_UID': nb_non_grader_uid, 'NB_GID': nb_gid, 'NB_USER': 'jovyan'} - -# Get additional authentication data from the authenticator -c.DockerSpawner.auth_state_hook = custom_auth_state_hook - -# The customized pre_spawn_hook creates user folders for non system users -c.DockerSpawner.pre_spawn_hook = custom_pre_spawn_hook - -########################################## -# END CUSTOM DOCKERSPAWNER -########################################## diff --git a/ansible/roles/launch/tasks/main.yml b/ansible/roles/launch/tasks/main.yml deleted file mode 100644 index aec17bcb..00000000 --- a/ansible/roles/launch/tasks/main.yml +++ /dev/null @@ -1,5 +0,0 @@ ---- -- name: launch with docker-compose # noqa 301 - command: docker-compose up -d - args: - chdir: "{{ working_dir }}" diff --git a/ansible/roles/reverse-proxy/files/rules.toml b/ansible/roles/reverse-proxy/files/rules.toml deleted file mode 100644 index e69de29b..00000000 diff --git a/ansible/roles/reverse-proxy/files/traefik.toml b/ansible/roles/reverse-proxy/files/traefik.toml deleted file mode 100644 index 251501e9..00000000 --- a/ansible/roles/reverse-proxy/files/traefik.toml +++ /dev/null @@ -1,44 +0,0 @@ -logLevel = "DEBUG" -# the default entrypoint -defaultentrypoints = ["http"] - -# the api entrypoint -[api] -dashboard = true -entrypoint = "auth_api" - -# websockets protocol -[wss] -protocol = "http" - - -[retry] -attempts = 400 - -[forwardingTimeouts] - -# dialTimeout is the amount of time to wait until a connection to a backend server can be established. -# -# Optional -# Default: "30s" -dialTimeout = "200s" - -[respondingTimeouts] - -# idleTimeout is the maximum duration an idle (keep-alive) connection will remain idle before closing itself. -# This needs to be set longer than the GCP load balancer timeout -idleTimeout = "620s" - - -# the port on localhost where traefik accepts http requests -[entryPoints.http] -address = ":8000" - -# the port on localhost where the traefik api and dashboard can be found -[entryPoints.auth_api] -address = ":8099" - -# the dynamic configuration file -[file] -filename = "/etc/traefik/rules.toml" -watch = true \ No newline at end of file diff --git a/ansible/roles/reverse-proxy/tasks/main.yml b/ansible/roles/reverse-proxy/tasks/main.yml deleted file mode 100644 index 539272e2..00000000 --- a/ansible/roles/reverse-proxy/tasks/main.yml +++ /dev/null @@ -1,16 +0,0 @@ ---- -- name: create the proxy working directory - file: - path: "{{ working_dir }}/reverse-proxy" - state: directory - -- name: copy the traefik configuration file - copy: - src: traefik.toml - dest: "{{ working_dir }}/reverse-proxy/traefik.toml" - -- name: copy the rules.toml file - copy: - src: rules.toml - dest: "{{ working_dir }}/reverse-proxy/rules.toml" - mode: '0755' diff --git a/ansible/roles/setup_course/files/requirements.txt b/ansible/roles/setup_course/files/requirements.txt deleted file mode 100644 index 5effb18d..00000000 --- a/ansible/roles/setup_course/files/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -docker-compose==1.26.2 -Hypercorn==0.10.1 -quart==0.12.0 -psycopg2-binary==2.8.6 diff --git a/ansible/roles/setup_course/tasks/main.yml b/ansible/roles/setup_course/tasks/main.yml deleted file mode 100644 index d9afe7ee..00000000 --- a/ansible/roles/setup_course/tasks/main.yml +++ /dev/null @@ -1,25 +0,0 @@ ---- -- name: create setup-course dockerfile from template - template: - src: Dockerfile.setup-course.j2 - dest: "{{ working_dir }}/Dockerfile.setup-course" - -- name: copy requirements.txt - copy: - src: requirements.txt - dest: "{{ working_dir }}/setup-course-requirements.txt" - -- name: create the setup course env var file from template - template: - src: "env.setup_course.j2" - dest: "{{ working_dir }}/env.setup_course" - -- name: build setup-course microservice image - docker_image: - name: illumidesk/setup-course - build: - path: "{{ working_dir }}" - dockerfile: "{{ docker_setup_course_dockerfile }}" - pull: no - source: build - force_source: yes diff --git a/ansible/roles/setup_course/templates/Dockerfile.setup-course.j2 b/ansible/roles/setup_course/templates/Dockerfile.setup-course.j2 deleted file mode 100644 index a0cc32ae..00000000 --- a/ansible/roles/setup_course/templates/Dockerfile.setup-course.j2 +++ /dev/null @@ -1,22 +0,0 @@ -FROM python:3.8-buster - -SHELL [ "/bin/bash", "-c" ] - -WORKDIR /tmp - -# Install illumidesk package -COPY illumidesk.zip /tmp/illumidesk.zip -RUN python3 -m pip install --no-cache /tmp/illumidesk.zip \ - && rm /tmp/illumidesk.zip - -COPY setup-course-requirements.txt /tmp/requirements.txt -RUN python3 -m pip install --no-cache-dir \ - -r /tmp/requirements.txt - -WORKDIR /usr/src/app - -EXPOSE 8000 - -CMD ["hypercorn", "--bind", "0.0.0.0:8000", "--workers", "1", "illumidesk.setup_course.app:app"] - -HEALTHCHECK CMD curl --fail http://localhost:8000/config || exit 1 diff --git a/ansible/roles/setup_course/templates/env.setup_course.j2 b/ansible/roles/setup_course/templates/env.setup_course.j2 deleted file mode 100644 index 7a125e2a..00000000 --- a/ansible/roles/setup_course/templates/env.setup_course.j2 +++ /dev/null @@ -1,21 +0,0 @@ -# grader service image -DOCKER_GRADER_IMAGE={{docker_illumidesk_grader_image}} - -# organization name used with nfs -ORGANIZATION_NAME={{org_name}} - -# api token should be equal to value set for jupyterhub -JUPYTERHUB_API_TOKEN={{jhub_api_token}} - -# base url for jupyterhub application -JUPYTERHUB_BASE_URL={{base_url}} - -# jupyterhub api url using proxy name and the proxy's external port -JUPYTERHUB_API_URL=http://reverse-proxy:8000{{base_url}}/hub/api - -# local path to save the dynamic config -JUPYTERHUB_CONFIG_PATH=/srv/jupyterhub -JUPYTERHUB_SERVICE_NAME=jupyterhub - -# Grader user and group idsS -NB_GRADER_UID=10001 diff --git a/ansible/roles/workspaces/tasks/main.yml b/ansible/roles/workspaces/tasks/main.yml deleted file mode 100644 index f454abe3..00000000 --- a/ansible/roles/workspaces/tasks/main.yml +++ /dev/null @@ -1,10 +0,0 @@ ---- -- name: pull end-user notebook image - docker_image: - name: "{{ docker_illumidesk_end_user_image }}" - source: pull - -- name: pull grader notebook image - docker_image: - name: "{{ docker_illumidesk_grader_image }}" - source: pull diff --git a/dev-requirements.in b/dev-requirements.in index 682dcc39..2faeda63 100644 --- a/dev-requirements.in +++ b/dev-requirements.in @@ -1,8 +1,11 @@ --c requirements.txt black==20.8b1 codecov==2.1.9 coverage==5.3 flake8==3.8.4 +flask==1.1.2 +flask-sqlalchemy==2.4.4 +gunicorn==20.0.4 +kubernetes==12.0.0 mock==4.0.2 pip-tools==5.4.0 pre-commit==2.9.2 @@ -12,5 +15,4 @@ pytest-cov==2.10.1 pytest-html==3.0.0 pytest-metadata==1.10.0 pytest-mock==3.3.1 -quart==0.13.1 requests==2.24.0 diff --git a/dev-requirements.txt b/dev-requirements.txt index 0fece83b..0d9acffd 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -4,63 +4,68 @@ # # pip-compile dev-requirements.in # -aiofiles==0.5.0 # via quart appdirs==1.4.4 # via black, virtualenv attrs==20.3.0 # via pytest black==20.8b1 # via -r dev-requirements.in -blinker==1.4 # via quart -certifi==2020.11.8 # via requests +cachetools==4.2.0 # via google-auth +certifi==2020.11.8 # via kubernetes, requests cfgv==3.2.0 # via pre-commit chardet==3.0.4 # via requests -click==7.1.2 # via black, pip-tools, quart +click==7.1.2 # via black, flask, pip-tools codecov==2.1.9 # via -r dev-requirements.in coverage==5.3 # via -r dev-requirements.in, codecov, pytest-cov distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv flake8==3.8.4 # via -r dev-requirements.in -h11==0.11.0 # via hypercorn, wsproto -h2==4.0.0 # via hypercorn -hpack==4.0.0 # via h2 -hypercorn==0.11.0 # via quart -hyperframe==6.0.0 # via h2 +flask-sqlalchemy==2.4.4 # via -r dev-requirements.in +flask==1.1.2 # via -r dev-requirements.in, flask-sqlalchemy +google-auth==1.24.0 # via kubernetes +gunicorn==20.0.4 # via -r dev-requirements.in identify==1.5.5 # via pre-commit idna==2.10 # via requests iniconfig==1.0.1 # via pytest -itsdangerous==1.1.0 # via quart -jinja2==2.11.2 # via -c requirements.txt, quart -markupsafe==1.1.1 # via -c requirements.txt, jinja2 +itsdangerous==1.1.0 # via flask +jinja2==2.11.2 # via flask +kubernetes==12.0.0 # via -r dev-requirements.in +markupsafe==1.1.1 # via jinja2 mccabe==0.6.1 # via flake8 mock==4.0.2 # via -r dev-requirements.in mypy-extensions==0.4.3 # via black nodeenv==1.5.0 # via pre-commit -packaging==20.4 # via -c requirements.txt, pytest +oauthlib==3.1.0 # via requests-oauthlib +packaging==20.4 # via pytest pathspec==0.8.0 # via black pip-tools==5.4.0 # via -r dev-requirements.in pluggy==0.13.1 # via pytest pre-commit==2.9.2 # via -r dev-requirements.in -priority==1.3.0 # via hypercorn py==1.9.0 # via pytest +pyasn1-modules==0.2.8 # via google-auth +pyasn1==0.4.8 # via pyasn1-modules, rsa pycodestyle==2.6.0 # via flake8 pyflakes==2.2.0 # via flake8 -pyparsing==2.4.7 # via -c requirements.txt, packaging +pyparsing==2.4.7 # via packaging pytest-asyncio==0.14.0 # via -r dev-requirements.in pytest-cov==2.10.1 # via -r dev-requirements.in pytest-html==3.0.0 # via -r dev-requirements.in pytest-metadata==1.10.0 # via -r dev-requirements.in, pytest-html pytest-mock==3.3.1 # via -r dev-requirements.in pytest==6.1.2 # via -r dev-requirements.in, pytest-asyncio, pytest-cov, pytest-html, pytest-metadata, pytest-mock -pyyaml==5.3.1 # via -c requirements.txt, pre-commit -quart==0.13.1 # via -r dev-requirements.in +python-dateutil==2.8.1 # via kubernetes +pyyaml==5.3.1 # via kubernetes, pre-commit regex==2020.9.27 # via black -requests==2.24.0 # via -r dev-requirements.in, codecov -six==1.15.0 # via -c requirements.txt, packaging, pip-tools, virtualenv -toml==0.10.1 # via black, hypercorn, pre-commit, pytest, quart +requests-oauthlib==1.3.0 # via kubernetes +requests==2.24.0 # via -r dev-requirements.in, codecov, kubernetes, requests-oauthlib +rsa==4.6 # via google-auth +six==1.15.0 # via google-auth, kubernetes, packaging, pip-tools, python-dateutil, virtualenv, websocket-client +sqlalchemy==1.3.20 # via flask-sqlalchemy +toml==0.10.1 # via black, pre-commit, pytest typed-ast==1.4.1 # via black typing-extensions==3.7.4.3 # via black -urllib3==1.25.11 # via requests +urllib3==1.25.11 # via kubernetes, requests virtualenv==20.0.33 # via pre-commit -werkzeug==1.0.1 # via quart -wsproto==0.15.0 # via hypercorn +websocket-client==0.57.0 # via kubernetes +werkzeug==1.0.1 # via flask # The following packages are considered to be unsafe in a requirements file: # pip +# setuptools diff --git a/pytest.ini b/pytest.ini index 452cf35d..34bd619c 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,7 @@ [pytest] -testpaths = src/tests +testpaths = + src/illumidesk/tests + src/grader-service/tests python_files = test_*.py markers = group: mark as a test for groups diff --git a/requirements.in b/requirements.in deleted file mode 100644 index 8f7d9fb1..00000000 --- a/requirements.in +++ /dev/null @@ -1 +0,0 @@ -ansible==2.10.3 diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index d65a62ce..00000000 --- a/requirements.txt +++ /dev/null @@ -1,140 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --generate-hashes -# -ansible-base==2.10.3 \ - --hash=sha256:35a208726b10fecbcf00c263ae4572b48f505b5796fb77a85c3e9c1036ea5e4f \ - # via ansible -ansible==2.10.3 \ - --hash=sha256:eb1d08b9b98a60e90e7123a12f40770780f29f9d73168da55d449106a9f4d348 \ - # via -r requirements.in -cffi==1.14.4 \ - --hash=sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e \ - --hash=sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d \ - --hash=sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a \ - --hash=sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec \ - --hash=sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362 \ - --hash=sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668 \ - --hash=sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c \ - --hash=sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b \ - --hash=sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06 \ - --hash=sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698 \ - --hash=sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2 \ - --hash=sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c \ - --hash=sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7 \ - --hash=sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009 \ - --hash=sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03 \ - --hash=sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b \ - --hash=sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909 \ - --hash=sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53 \ - --hash=sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35 \ - --hash=sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26 \ - --hash=sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b \ - --hash=sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb \ - --hash=sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293 \ - --hash=sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd \ - --hash=sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d \ - --hash=sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3 \ - --hash=sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d \ - --hash=sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca \ - --hash=sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d \ - --hash=sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775 \ - --hash=sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375 \ - --hash=sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b \ - --hash=sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b \ - --hash=sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f \ - # via cryptography -cryptography==3.2.1 \ - --hash=sha256:07ca431b788249af92764e3be9a488aa1d39a0bc3be313d826bbec690417e538 \ - --hash=sha256:13b88a0bd044b4eae1ef40e265d006e34dbcde0c2f1e15eb9896501b2d8f6c6f \ - --hash=sha256:32434673d8505b42c0de4de86da8c1620651abd24afe91ae0335597683ed1b77 \ - --hash=sha256:3cd75a683b15576cfc822c7c5742b3276e50b21a06672dc3a800a2d5da4ecd1b \ - --hash=sha256:4e7268a0ca14536fecfdf2b00297d4e407da904718658c1ff1961c713f90fd33 \ - --hash=sha256:545a8550782dda68f8cdc75a6e3bf252017aa8f75f19f5a9ca940772fc0cb56e \ - --hash=sha256:55d0b896631412b6f0c7de56e12eb3e261ac347fbaa5d5e705291a9016e5f8cb \ - --hash=sha256:5849d59358547bf789ee7e0d7a9036b2d29e9a4ddf1ce5e06bb45634f995c53e \ - --hash=sha256:6dc59630ecce8c1f558277ceb212c751d6730bd12c80ea96b4ac65637c4f55e7 \ - --hash=sha256:7117319b44ed1842c617d0a452383a5a052ec6aa726dfbaffa8b94c910444297 \ - --hash=sha256:75e8e6684cf0034f6bf2a97095cb95f81537b12b36a8fedf06e73050bb171c2d \ - --hash=sha256:7b8d9d8d3a9bd240f453342981f765346c87ade811519f98664519696f8e6ab7 \ - --hash=sha256:a035a10686532b0587d58a606004aa20ad895c60c4d029afa245802347fab57b \ - --hash=sha256:a4e27ed0b2504195f855b52052eadcc9795c59909c9d84314c5408687f933fc7 \ - --hash=sha256:a733671100cd26d816eed39507e585c156e4498293a907029969234e5e634bc4 \ - --hash=sha256:a75f306a16d9f9afebfbedc41c8c2351d8e61e818ba6b4c40815e2b5740bb6b8 \ - --hash=sha256:bd717aa029217b8ef94a7d21632a3bb5a4e7218a4513d2521c2a2fd63011e98b \ - --hash=sha256:d25cecbac20713a7c3bc544372d42d8eafa89799f492a43b79e1dfd650484851 \ - --hash=sha256:d26a2557d8f9122f9bf445fc7034242f4375bd4e95ecda007667540270965b13 \ - --hash=sha256:d3545829ab42a66b84a9aaabf216a4dce7f16dbc76eb69be5c302ed6b8f4a29b \ - --hash=sha256:d3d5e10be0cf2a12214ddee45c6bd203dab435e3d83b4560c03066eda600bfe3 \ - --hash=sha256:efe15aca4f64f3a7ea0c09c87826490e50ed166ce67368a68f315ea0807a20df \ - # via ansible-base -jinja2==2.11.2 \ - --hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \ - --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 \ - # via ansible-base -markupsafe==1.1.1 \ - --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \ - --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \ - --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \ - --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \ - --hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \ - --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \ - --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \ - --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \ - --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \ - --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \ - --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \ - --hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \ - --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \ - --hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \ - --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \ - --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \ - --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \ - --hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \ - --hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \ - --hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \ - --hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \ - --hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \ - --hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \ - --hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \ - --hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \ - --hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \ - --hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \ - --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \ - --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \ - --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \ - --hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \ - --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \ - --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \ - # via jinja2 -packaging==20.4 \ - --hash=sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8 \ - --hash=sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181 \ - # via ansible-base -pycparser==2.20 \ - --hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \ - --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 \ - # via cffi -pyparsing==2.4.7 \ - --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ - --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b \ - # via packaging -pyyaml==5.3.1 \ - --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ - --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ - --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ - --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ - --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ - --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ - --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ - --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ - --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ - --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ - --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \ - # via ansible-base -six==1.15.0 \ - --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ - --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \ - # via cryptography, packaging diff --git a/src/grader-service/README.md b/src/grader-service/README.md new file mode 100644 index 00000000..4b52ac9f --- /dev/null +++ b/src/grader-service/README.md @@ -0,0 +1,15 @@ +# IllumiDesk's Grader Setup Service + +## Overview + +Microservice used to setup new shared grader notebooks. + +## Dev Install + +Install in editable mode: + + python3 -m pip install -e . + +## Update Dependencies + + pip-compile diff --git a/src/_version.py b/src/grader-service/_version.py similarity index 97% rename from src/_version.py rename to src/grader-service/_version.py index e82a899f..c4ab52f2 100644 --- a/src/_version.py +++ b/src/grader-service/_version.py @@ -3,8 +3,8 @@ # for now, update the version so that its the same as the one reflected # within the repo's root package.json version_info = ( + 1, 0, - 10, 0, ) __version__ = ".".join(map(str, version_info[:3])) diff --git a/src/grader-service/grader-service/__init__.py b/src/grader-service/grader-service/__init__.py new file mode 100644 index 00000000..7d2908cf --- /dev/null +++ b/src/grader-service/grader-service/__init__.py @@ -0,0 +1,35 @@ +# (C) Copyright IllumiDesk, LLC, 2020. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +from flask import Flask + +import os + +from .models import db + + +project_dir = os.path.dirname(os.path.abspath(__file__)) +database_file = "sqlite:///{}".format(os.path.join(project_dir, "gradersetup.db.sqlite3")) + + +def create_app(): + """Creates the grader setup service as a Flask application using SQLite as the database with the SQLAlchemy ORM. + + Returns: + flask_app: the Flask application object + """ + flask_app = Flask(__name__) + flask_app.config['SQLALCHEMY_DATABASE_URI'] = database_file + flask_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False + flask_app.app_context().push() + db.init_app(flask_app) + db.create_all() + return flask_app diff --git a/src/illumidesk/setup_course/constants.py b/src/grader-service/grader-service/constants.py similarity index 50% rename from src/illumidesk/setup_course/constants.py rename to src/grader-service/grader-service/constants.py index c07caf17..470e8283 100644 --- a/src/illumidesk/setup_course/constants.py +++ b/src/grader-service/grader-service/constants.py @@ -1,9 +1,18 @@ +# (C) Copyright IllumiDesk, LLC, 2020. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + NBGRADER_HOME_CONFIG_TEMPLATE = """ c = get_config() - c.CourseDirectory.root = '/home/{grader_name}/{course_id}' -c.CourseDirectory.course_id = '{course_id}' c.ClearSolutions.code_stub = {{ "python": "# your code here\\nraise NotImplementedError", "javascript": "// your code here\\nthrow new Error();", @@ -16,7 +25,6 @@ NBGRADER_COURSE_CONFIG_TEMPLATE = """ c = get_config() - c.CourseDirectory.course_id = '{course_id}' c.IncludeHeaderFooter.header = 'source/header.ipynb' c.IncludeHeaderFooter.footer = 'source/footer.ipynb' diff --git a/src/grader-service/grader-service/grader_service.py b/src/grader-service/grader-service/grader_service.py new file mode 100644 index 00000000..23ba75b1 --- /dev/null +++ b/src/grader-service/grader-service/grader_service.py @@ -0,0 +1,318 @@ +# (C) Copyright IllumiDesk, LLC, 2020. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +import logging +import os +import shutil +import sys + +from datetime import datetime + +from kubernetes import client +from kubernetes import config +from kubernetes.config import ConfigException + +from pathlib import Path +from secrets import token_hex +from .constants import NBGRADER_HOME_CONFIG_TEMPLATE +from .constants import NBGRADER_COURSE_CONFIG_TEMPLATE + + +logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) +logger = logging.getLogger(__name__) + +# namespace to deploy new pods +NAMESPACE = os.environ.get('ILLUMIDESK_K8S_NAMESPACE', 'default') +# image name for grader-notebooks +GRADER_IMAGE_NAME = os.environ.get('GRADER_IMAGE_NAME', 'illumidesk/grader-notebook:latest') +# mount root path for grader and course home directories +MNT_ROOT = os.environ.get('ILLUMIDESK_MNT_ROOT', '/illumidesk-courses') +# shared directory to use with students and instructors +EXCHANGE_MNT_ROOT = os.environ.get('ILLUMIDESK_NB_EXCHANGE_MNT_ROOT', '/illumidesk-nb-exchange') +GRADER_PVC = os.environ.get('GRADER_PVC', 'grader-setup-pvc') +GRADER_EXCHANGE_SHARED_PVC = os.environ.get('GRADER_SHARED_PVC', 'exchange-shared-volume') + +# user UI and GID to use within the grader container +NB_UID = 10001 +NB_GID = 100 + +# NBGrader DATABASE settings to save in nbgrader_config.py file +nbgrader_db_host = os.environ.get('POSTGRES_NBGRADER_HOST') +nbgrader_db_password = os.environ.get('POSTGRES_NBGRADER_PASSWORD') +nbgrader_db_user = os.environ.get('POSTGRES_NBGRADER_USER') +nbgrader_db_port = os.environ.get('POSTGRES_NBGRADER_PORT') +nbgrader_db_name = os.environ.get('POSTGRES_NBGRADER_DB_NAME') + + +class GraderServiceLauncher: + def __init__(self, org_name: str, course_id: str): + """ + Helper class to launch grader notebooks within the kubernetes cluster + + Args: + org_name: the organization name + course_id: the course id + + Raises: + ConfigException if the kubectl python client does not have a valid configuration set. + """ + try: + # try to load the cluster credentials + # Configs can be set in Configuration class directly or using helper utility + config.load_incluster_config() + except ConfigException: + # next method uses the KUBECONFIG env var by default + config.load_kube_config() + # Uncomment the following lines to enable debug logging + c = client.Configuration() + c.debug = False + apps_v1 = client.AppsV1Api(api_client=client.ApiClient(configuration=c)) + self.apps_v1 = client.AppsV1Api() + self.coreV1Api = client.CoreV1Api() + self.course_id = course_id + self.org_name = org_name + self.grader_name = f'grader-{self.course_id}' + self.grader_token = token_hex(32) + # Course home directory, its parent should be the grader name + self.course_dir = Path(f'{MNT_ROOT}/{self.org_name}/home/grader-{self.course_id}/{self.course_id}') + # set the exchange directory path + self.exchange_dir = Path(EXCHANGE_MNT_ROOT, self.org_name, 'exchange') + + def grader_deployment_exists(self) -> bool: + """Check if there is a deployment for the grader service name + """ + # Filter deployments by the current namespace and a specific name (metadata collection) + deployment_list = self.apps_v1.list_namespaced_deployment( + namespace=NAMESPACE, + field_selector=f'metadata.name={self.grader_name}' + ) + if deployment_list and deployment_list.items: + return True + + return False + + def grader_service_exists(self) -> bool: + """Check if the grader service exists + """ + # Filter deployments by the current namespace and a specific name (metadata collection) + service_list = self.coreV1Api.list_namespaced_service( + namespace=NAMESPACE, + field_selector=f'metadata.name={self.grader_name}' + ) + if service_list and service_list.items: + return True + + return False + + def create_grader_deployment(self): + """Deploy the grader service + """ + # first create the home directories for grader/course + try: + self._create_exchange_directory() + self._create_grader_directories() + self._create_nbgrader_files() + except Exception as e: + msg = 'An error occurred trying to create directories and files for nbgrader.' + logger.error(f'{msg}{e}') + raise Exception(msg) + + # Create grader deployement + deployment = self._create_deployment_object() + api_response = self.apps_v1.create_namespaced_deployment(body=deployment, namespace=NAMESPACE) + logger.info(f'Deployment created. Status="{str(api_response.status)}"') + # Create grader service + service = self._create_service_object() + self.coreV1Api.create_namespaced_service(namespace=NAMESPACE, body=service) + + def _create_exchange_directory(self): + """Creates the exchange directory in the file system and sets permissions. + """ + logger.info(f'Creating exchange directory {self.exchange_dir}') + self.exchange_dir.mkdir(parents=True, exist_ok=True) + self.exchange_dir.chmod(0o777) + + def _create_grader_directories(self): + """ + Creates home directories with specific permissions + Directories to create: + - grader_root: //home/grader- + - course_root: //home/grader-/ + """ + logger.debug( + f'Create course directory "{self.course_dir}" with special permissions {NB_UID}:{NB_GID}' + ) + self.course_dir.mkdir(parents=True, exist_ok=True) + # change the course directory owner + shutil.chown(str(self.course_dir), user=NB_UID, group=NB_GID) + # change the grader-home directory owner + shutil.chown(str(self.course_dir.parent), user=NB_UID, group=NB_GID) + + def _create_nbgrader_files(self): + """Creates nbgrader configuration files used in the grader's home directory and the + course directory located within the grader's home directory. + """ + # create the .jupyter directory (a child of grader_root) + jupyter_dir = self.course_dir.parent.joinpath('.jupyter') + jupyter_dir.mkdir(parents=True, exist_ok=True) + shutil.chown(str(jupyter_dir), user=NB_UID, group=NB_GID) + # Write the nbgrader_config.py file at grader home directory + grader_nbconfig_path = jupyter_dir.joinpath('nbgrader_config.py') + logger.info(f'Writing the nbgrader_config.py file at jupyter directory (within the grader home): {grader_nbconfig_path}') + # write the file + grader_home_nbconfig_content = NBGRADER_HOME_CONFIG_TEMPLATE.format( + grader_name=self.grader_name, + course_id=self.course_id, + db_url=f'postgresql://{nbgrader_db_user}:{nbgrader_db_password}@{nbgrader_db_host}:5432/{self.org_name}_{self.course_id}' + ) + grader_nbconfig_path.write_text(grader_home_nbconfig_content) + # Write the nbgrader_config.py file at grader home directory + course_nbconfig_path = self.course_dir.joinpath('nbgrader_config.py') + logger.info(f'Writing the nbgrader_config.py file at course home directory: {course_nbconfig_path}') + # write the second file + course_home_nbconfig_content = NBGRADER_COURSE_CONFIG_TEMPLATE.format( + course_id=self.course_id + ) + course_nbconfig_path.write_text(course_home_nbconfig_content) + + def _create_service_object(self): + """Creates the grader setup service as a valid kubernetes service for persistence. + + Returns: + V1Service: a kubernetes service object that represents the grader service + """ + service = client.V1Service( + kind='Service', + metadata=client.V1ObjectMeta(name=self.grader_name), + spec=client.V1ServiceSpec( + type='ClusterIP', + ports=[client.V1ServicePort(port=8888, target_port=8888, protocol='TCP')], + selector={'component': self.grader_name} + ) + ) + return service + + def _create_deployment_object(self): + """Creates the deployment object for the grader service using environment variables + + Returns: + V1Deployment: a valid kubernetes deployment object + """ + # Configureate Pod template container + # Volumes to mount as subPaths of PV + sub_path_grader_home = str(self.course_dir.parent).strip('/') + sub_path_exchange = str(self.exchange_dir.relative_to(EXCHANGE_MNT_ROOT)) + # define the container to launch + container = client.V1Container( + name='grader-notebook', + image=GRADER_IMAGE_NAME, + command=['start-notebook.sh', f'--group=formgrade-{self.course_id}'], + ports=[client.V1ContainerPort(container_port=8888)], + working_dir=f'/home/{self.grader_name}', + resources=client.V1ResourceRequirements( + requests={"cpu": "100m", "memory": "200Mi"}, limits={"cpu": "500m", "memory": "1G"} + ), + security_context=client.V1SecurityContext(allow_privilege_escalation=False), + env=[ + client.V1EnvVar(name='JUPYTERHUB_SERVICE_NAME', value=self.course_id), + client.V1EnvVar(name='JUPYTERHUB_API_TOKEN', value=self.grader_token), + # we're using the K8s Service name 'hub' (defined in the jhub helm chart) + # to connect from our grader-notebooks + client.V1EnvVar(name='JUPYTERHUB_API_URL', value='http://hub:8081/hub/api'), + client.V1EnvVar(name='JUPYTERHUB_BASE_URL', value='/'), + client.V1EnvVar(name='JUPYTERHUB_SERVICE_PREFIX', value=f'/services/{self.course_id}/'), + client.V1EnvVar(name='JUPYTERHUB_CLIENT_ID', value=f'service-{self.course_id}'), + client.V1EnvVar(name='JUPYTERHUB_USER', value=self.grader_name), + client.V1EnvVar(name='NB_UID', value=str(NB_UID)), + client.V1EnvVar(name='NB_GID', value=str(NB_GID)), + client.V1EnvVar(name='NB_USER', value=self.grader_name), + ], + volume_mounts=[ + client.V1VolumeMount( + mount_path=f'/home/{self.grader_name}', + name=GRADER_PVC, + sub_path=sub_path_grader_home + ), + client.V1VolumeMount( + mount_path='/srv/nbgrader/exchange', + name=GRADER_EXCHANGE_SHARED_PVC, + sub_path=sub_path_exchange + ) + ] + ) + # Create and configurate a spec section + template = client.V1PodTemplateSpec( + metadata=client.V1ObjectMeta( + labels={ + 'component': self.grader_name, + 'app': 'illumidesk'} + ), + spec=client.V1PodSpec( + containers=[container], + security_context=client.V1PodSecurityContext(run_as_user=0), + volumes=[ + client.V1Volume( + name=GRADER_PVC, + persistent_volume_claim=client.V1PersistentVolumeClaimVolumeSource(claim_name=GRADER_PVC) + ), + client.V1Volume( + name=GRADER_EXCHANGE_SHARED_PVC, + persistent_volume_claim=client.V1PersistentVolumeClaimVolumeSource(claim_name=GRADER_EXCHANGE_SHARED_PVC) + ), + + ] + ) + ) + # Create the specification of deployment + spec = client.V1DeploymentSpec( + replicas=1, template=template, selector={'matchLabels': {'component': self.grader_name}} + ) + # Instantiate the deployment object + deployment = client.V1Deployment( + api_version="apps/v1", kind="Deployment", metadata=client.V1ObjectMeta(name=self.grader_name), spec=spec + ) + + return deployment + + def delete_grader_deployment(self): + """Deletes the grader deployment + """ + # first delete the service + if self.grader_service_exists(): + self.coreV1Api.delete_namespaced_service(name=self.grader_name, namespace=NAMESPACE) + # then delete the deployment + if self.grader_deployment_exists(): + self.apps_v1.delete_namespaced_deployment(name=self.grader_name, namespace=NAMESPACE) + + def update_jhub_deployment(self): + """Executes a patch in the jhub deployment. With this the jhub will be replaced with a new pod + """ + jhub_deployments = self.apps_v1.list_namespaced_deployment( + namespace=NAMESPACE, + label_selector='component=hub' + ) + if jhub_deployments.items: + # add new label with the current datetime (only used to the replacement occurs) + for deployment in jhub_deployments.items: + # get the jhub deployment template + current_metadata = deployment.spec.template.metadata + current_labels = current_metadata.labels + # add the label + current_labels.update({'restarted_at': datetime.now().strftime('%m_%d_%Y_%H_%M_%S')}) + current_metadata.labels = current_labels + # update the deployment object + deployment.spec.template.metatada = current_metadata + api_response = self.apps_v1.patch_namespaced_deployment( + name='hub', + namespace=NAMESPACE, + body=deployment + ) + logger.info(f'Jhub patch response:{api_response}') diff --git a/src/grader-service/grader-service/main.py b/src/grader-service/grader-service/main.py new file mode 100644 index 00000000..fa492f8a --- /dev/null +++ b/src/grader-service/grader-service/main.py @@ -0,0 +1,170 @@ +# (C) Copyright IllumiDesk, LLC, 2020. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +import logging +import os +import shutil +import sys + +from flask import jsonify + +from pathlib import Path + +from . import create_app +from .models import db +from .models import GraderService +from .grader_service import GraderServiceLauncher +from .grader_service import NB_UID +from .grader_service import NB_GID + + +logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) +logger = logging.getLogger(__name__) + +app = create_app() + + +@app.route('/services//', methods=['POST']) +def launch(org_name: str, course_id: str): + """ + Creates a new grader-notebook pod if not exists + + Args: + org_name: the organization name + course_id: the grader's course id (label) + + Returns: + JSON: True/False on whether or not the grader service was successfully launched + + example: + ``` + { + success: "True" + } + ``` + """ + launcher = GraderServiceLauncher(org_name=org_name, course_id=course_id) + if not launcher.grader_deployment_exists(): + try: + launcher.create_grader_deployment() + # Register the new service to local database + new_service = GraderService( + name=course_id, + course_id=course_id, + url=f'http://{launcher.grader_name}:8888', + api_token=launcher.grader_token + ) + db.session.add(new_service) + db.session.commit() + # then do patch for jhub deployment + # with this the jhub pod will be restarted and get/load new services + launcher.update_jhub_deployment() + except Exception as e: + return jsonify(success=False, message=str(e)), 500 + + return jsonify(success=True) + else: + return jsonify(success=False, message=f'A grader service already exists for this course_id:{course_id}'), 409 + + +@app.route('/services', methods=['GET']) +def services(): + """ + Returns the grader-notebook list used as services defined in the JupyterHub config. + + Returns: + JSON: a list of service dictionaries with the name and url and the groups associated + to the grader service. + + example: + ``` + { + services: [{"name":":8888"...}], + groups: {"formgrade-": ["grader-"] } + } + ``` + """ + services = GraderService.query.all() + # format a json + services_resp = [] + groups_resp = {} + for s in services: + services_resp.append({ + 'name': s.name, + 'url': s.url, + 'oauth_no_confirm': s.oauth_no_confirm, + 'admin': s.admin, + 'api_token': s.api_token + }) + # add the jhub user group + groups_resp.update({f'formgrade-{s.course_id}': [f'grader-{s.course_id}']}) + return jsonify(services=services_resp, groups=groups_resp) + + +@app.route("/services//", methods=['DELETE']) +def services_deletion(org_name: str, course_id: str): + """Deletes the grader setup service + + Args: + org_name (str): the organization name + course_id (str): the course id (label) + + Returns: + JSON: True if the grader was successfully deleted false otherwise + """ + launcher = GraderServiceLauncher(org_name=org_name, course_id=course_id) + try: + launcher.delete_grader_deployment() + service_saved = GraderService.query.filter_by(course_id=course_id).first() + if service_saved: + db.session.delete(service_saved) + db.session.commit() + return jsonify(success=True) + except Exception as e: + return jsonify(success=False, error=str(e)), 500 + + +@app.route("/courses///", methods=['POST']) +def assignment_dir_creation(org_name: str, course_id: str, assignment_name: str): + """Creates the directories required to manage assignments. + + Args: + org_name (str): the organization name + course_id (str): the course id (label) + assignment_name (str): the assignment name + + Returns: + JSON: True if the assignment directories were successfully created, false otherwise + """ + launcher = GraderServiceLauncher(org_name=org_name, course_id=course_id) + assignment_dir = os.path.abspath(Path(launcher.course_dir, 'source', assignment_name)) + if not os.path.isdir(assignment_dir): + logger.info('Creating source dir %s for the assignment %s' % (assignment_dir, assignment_name)) + os.makedirs(assignment_dir) + logger.info('Fixing folder permissions for %s' % assignment_dir) + shutil.chown(str(Path(assignment_dir).parent), user=NB_UID, group=NB_GID) + shutil.chown(str(assignment_dir), user=NB_UID, group=NB_GID) + + return jsonify(success=True) + + +@app.route("/healthcheck") +def healthcheck(): + """Healtheck endpoint + + Returns: + JSON: True if the service is alive + """ + return jsonify(success=True) + + +if __name__ == "__main__": + app.run(host='0.0.0.0') diff --git a/src/grader-service/grader-service/models.py b/src/grader-service/grader-service/models.py new file mode 100644 index 00000000..f5efc967 --- /dev/null +++ b/src/grader-service/grader-service/models.py @@ -0,0 +1,41 @@ +# (C) Copyright IllumiDesk, LLC, 2020. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +import flask_sqlalchemy + +db = flask_sqlalchemy.SQLAlchemy() + + +class GraderService(db.Model): + """Base model for the grader setup service that inherits from the base SQLAlchemy Model class. + + Attrs: + id: the grader setup service primary key + name: the grader setup service name + course_id: the course id (label) referenced when calling services + url: the grader setup service's URL (endpoint) + admin: admin priviledges as defined by the JupyterHub's services configuration option + api_token: the token used to access the JupyterHub so that it is run as an externally managed service + + Returns: + The grader Service object's name and url properties + """ + __tablename__ = 'grader_services' + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(60), unique=True, nullable=False) + course_id = db.Column(db.String(50), nullable=False) + url = db.Column(db.String(100), nullable=False) + oauth_no_confirm = db.Column(db.Boolean, default=True) + admin = db.Column(db.Boolean, default=True) + api_token = db.Column(db.String(150), nullable=True) + + def __repr__(self): + return "".format(self.name, self.url) diff --git a/src/grader-service/grader-service/wsgi.py b/src/grader-service/grader-service/wsgi.py new file mode 100644 index 00000000..8155bdbc --- /dev/null +++ b/src/grader-service/grader-service/wsgi.py @@ -0,0 +1,15 @@ +# (C) Copyright IllumiDesk, LLC, 2020. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +from .main import app + +if __name__ == "__main__": + app.run() diff --git a/src/grader-service/requirements.txt b/src/grader-service/requirements.txt new file mode 100644 index 00000000..1480d3a2 --- /dev/null +++ b/src/grader-service/requirements.txt @@ -0,0 +1,35 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile +# +cachetools==4.2.0 # via google-auth +certifi==2020.12.5 # via kubernetes, requests +chardet==3.0.4 # via requests +click==7.1.2 # via flask +flask-sqlalchemy==2.4.4 # via illumidesk-grader-setup-service (setup.py) +flask==1.1.2 # via flask-sqlalchemy, illumidesk-grader-setup-service (setup.py) +google-auth==1.24.0 # via kubernetes +gunicorn==20.0.4 # via illumidesk-grader-setup-service (setup.py) +idna==2.10 # via requests +itsdangerous==1.1.0 # via flask +jinja2==2.11.2 # via flask +kubernetes==12.0.0 # via illumidesk-grader-setup-service (setup.py) +markupsafe==1.1.1 # via jinja2 +oauthlib==3.1.0 # via requests-oauthlib +pyasn1-modules==0.2.8 # via google-auth +pyasn1==0.4.8 # via pyasn1-modules, rsa +python-dateutil==2.8.1 # via kubernetes +pyyaml==5.3.1 # via kubernetes +requests-oauthlib==1.3.0 # via kubernetes +requests==2.25.0 # via kubernetes, requests-oauthlib +rsa==4.6 # via google-auth +six==1.15.0 # via google-auth, kubernetes, python-dateutil, websocket-client +sqlalchemy==1.3.20 # via flask-sqlalchemy +urllib3==1.26.2 # via kubernetes, requests +websocket-client==0.57.0 # via kubernetes +werkzeug==1.0.1 # via flask + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/src/grader-service/setup.py b/src/grader-service/setup.py new file mode 100644 index 00000000..4846be49 --- /dev/null +++ b/src/grader-service/setup.py @@ -0,0 +1,46 @@ +import os +import sys + +from setuptools import setup +from setuptools import find_packages + + +v = sys.version_info +if v[:2] < (3, 6): + error = 'ERROR: IllumiDesk requires Python version 3.6 or above.' + print(error, file=sys.stderr) + sys.exit(1) + +shell = False +if os.name in ('nt', 'dos'): + shell = True + warning = 'WARNING: Windows is not officially supported' + print(warning, file=sys.stderr) + +# Get the current package version. +here = os.path.abspath(os.path.dirname(__file__)) +version_ns = {} +with open(os.path.join('_version.py')) as f: + exec(f.read(), {}, version_ns) + +setup( + name='illumidesk-grader-setup-service', + version=version_ns['__version__'], + description='IllumiDesk grader setup service package', + long_description=open('README.md').read(), + long_description_content_type='text/markdown', + url='https://github.com/illumidesk/illumidesk', + author='The IllumiDesk Team', + author_email='hello@illumidesk.com', + license='Apache 2.0', + packages=find_packages(exclude='./tests'), + install_requires=[ + 'flask==1.1.2', + 'flask-sqlalchemy==2.4.4', + 'gunicorn==20.0.4', + 'kubernetes==12.0.0', + ], # noqa: E231 + package_data={ + '': ['*.html'], + }, # noqa: E231 +) diff --git a/src/README.md b/src/illumidesk/README.md similarity index 57% rename from src/README.md rename to src/illumidesk/README.md index e137b7bb..197c11ca 100644 --- a/src/README.md +++ b/src/illumidesk/README.md @@ -1,9 +1,8 @@ -# illumidesk +# IllumiDesk Custom JupyterHub LTI Authenticators ## Overview -- JupyterHub Authenticators -- JupyterHub Spawners +- JupyterHub compatible Authenticators - JupyterHub REST API client ## Dev Install diff --git a/src/illumidesk/_version.py b/src/illumidesk/_version.py new file mode 100644 index 00000000..c4ab52f2 --- /dev/null +++ b/src/illumidesk/_version.py @@ -0,0 +1,13 @@ +"""illumidesk version info""" + +# for now, update the version so that its the same as the one reflected +# within the repo's root package.json +version_info = ( + 1, + 0, + 0, +) +__version__ = ".".join(map(str, version_info[:3])) + +if len(version_info) > 3: + __version__ = "%s%s" % (__version__, version_info[3]) diff --git a/src/illumidesk/apis/setup_course_service.py b/src/illumidesk/apis/setup_course_service.py deleted file mode 100644 index 552a7155..00000000 --- a/src/illumidesk/apis/setup_course_service.py +++ /dev/null @@ -1,78 +0,0 @@ -import json -import logging -import os - -from tornado.httpclient import AsyncHTTPClient - -from typing import Dict - -import requests - - -# course setup service name -INTENAL_SERVICE_NAME = os.environ.get('DOCKER_SETUP_COURSE_SERVICE_NAME') or 'setup-course' -# course setup service port -SERVICE_PORT = os.environ.get('DOCKER_SETUP_COURSE_PORT') or '8000' - - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - - -SERVICE_BASE_URL = f'http://{INTENAL_SERVICE_NAME}:{SERVICE_PORT}' -SERVICE_COMMON_HEADERS = {'Content-Type': 'application/json'} - - -def get_current_service_definitions() -> str: - """ - Gets the file content that contains the new services and groups that are used as grader services - - Returns: the contents of configuration file - """ - # get the response from service config endpoint - response = requests.get(f'{SERVICE_BASE_URL}/config') - # store course setup configuration - config = response.json() - return config - - -async def register_new_service(data: Dict[str, str]) -> str: - """ - Helps to register (asynchronously) new course definition through the setup-course service - Args: - data: a dict with the org, course_id (label) and the domain. - - Example: - ```await SetupCourseService.register_new_service(data = { - 'org': org, - 'course_id': course_id, - 'domain': handler.request.host, - })``` - - Returns: the response as json - - """ - client = AsyncHTTPClient() - - response = await client.fetch( - SERVICE_BASE_URL, - headers=SERVICE_COMMON_HEADERS, - body=json.dumps(data), - method='POST', - ) - if not response.body: - raise json.JSONDecodeError('The setup course response body is empty', '', 0) - resp_json = json.loads(response.body) - logger.debug(f'Setup-Course service response: {resp_json}') - return resp_json - - -def make_rolling_update() -> None: - """ - Triggers the rolling-update request BUT without wait for the response. - It's very important to understand that we not have to wait 'cause the current process/jupyterhub will be killed - """ - client = AsyncHTTPClient() - url = f'{SERVICE_BASE_URL}/rolling-update' - # WE'RE NOT USING <<>> because the rolling update should occur later - client.fetch(url, headers=SERVICE_COMMON_HEADERS, body='', method='POST') diff --git a/src/illumidesk/__init__.py b/src/illumidesk/illumidesk/__init__.py similarity index 100% rename from src/illumidesk/__init__.py rename to src/illumidesk/illumidesk/__init__.py diff --git a/src/illumidesk/apis/__init__.py b/src/illumidesk/illumidesk/apis/__init__.py similarity index 100% rename from src/illumidesk/apis/__init__.py rename to src/illumidesk/illumidesk/apis/__init__.py diff --git a/src/illumidesk/apis/announcement_service.py b/src/illumidesk/illumidesk/apis/announcement_service.py similarity index 92% rename from src/illumidesk/apis/announcement_service.py rename to src/illumidesk/illumidesk/apis/announcement_service.py index 5c2d677f..e985d439 100644 --- a/src/illumidesk/apis/announcement_service.py +++ b/src/illumidesk/illumidesk/apis/announcement_service.py @@ -33,4 +33,4 @@ async def add_announcement(message: str) -> None: headers['Authorization'] = f'token {jupyterhub_api_token}' body_data = {'announcement': message} client = AsyncHTTPClient() - await client.fetch(ANNOUNCEMENT_INTERNAL_URL, headers=headers, body=json.dumps(body_data), method='POST') + client.fetch(ANNOUNCEMENT_INTERNAL_URL, headers=headers, body=json.dumps(body_data), method='POST') diff --git a/src/illumidesk/apis/jupyterhub_api.py b/src/illumidesk/illumidesk/apis/jupyterhub_api.py similarity index 100% rename from src/illumidesk/apis/jupyterhub_api.py rename to src/illumidesk/illumidesk/apis/jupyterhub_api.py diff --git a/src/illumidesk/apis/nbgrader_service.py b/src/illumidesk/illumidesk/apis/nbgrader_service.py similarity index 85% rename from src/illumidesk/apis/nbgrader_service.py rename to src/illumidesk/illumidesk/apis/nbgrader_service.py index e1ebb238..b62994af 100644 --- a/src/illumidesk/apis/nbgrader_service.py +++ b/src/illumidesk/illumidesk/apis/nbgrader_service.py @@ -1,7 +1,5 @@ import logging import os -from pathlib import Path -import shutil from illumidesk.authenticators.utils import LTIUtils @@ -22,6 +20,7 @@ nbgrader_db_port = os.environ.get('POSTGRES_NBGRADER_PORT') or 5432 nbgrader_db_password = os.environ.get('POSTGRES_NBGRADER_PASSWORD') nbgrader_db_user = os.environ.get('POSTGRES_NBGRADER_USER') +mnt_root = os.environ.get('ILLUMIDESK_MNT_ROOT', '/illumidesk-courses') org_name = os.environ.get('ORGANIZATION_NAME') or 'my-org' @@ -61,7 +60,7 @@ def __init__(self, course_id: str, check_database_exists: bool = False): raise ValueError('course_id missing') self.course_id = LTIUtils().normalize_string(course_id) - self.course_dir = f'/home/grader-{self.course_id}/{self.course_id}' + self.course_dir = f'{mnt_root}/{org_name}/home/grader-{self.course_id}/{self.course_id}' self.uid = int(os.environ.get('NB_GRADER_UID') or '10001') self.gid = int(os.environ.get('NB_GID') or '100') @@ -116,7 +115,7 @@ def get_course(self) -> Course: logger.debug(f'course got from db:{course}') return course - def create_assignment_in_nbgrader(self, assignment_name: str, **kwargs: dict) -> Assignment: + def register_assignment(self, assignment_name: str, **kwargs: dict) -> Assignment: """ Adds an assignment to nbgrader database @@ -127,20 +126,12 @@ def create_assignment_in_nbgrader(self, assignment_name: str, **kwargs: dict) -> """ if not assignment_name: raise ValueError('assignment_name missing') - assignment_name = LTIUtils().normalize_string(assignment_name) logger.debug('Assignment name normalized %s to save in gradebook' % assignment_name) assignment = None with Gradebook(self.db_url, course_id=self.course_id) as gb: try: assignment = gb.update_or_create_assignment(assignment_name, **kwargs) logger.debug('Added assignment %s to gradebook' % assignment_name) - assignment_dir = os.path.abspath(Path(self.course_dir, 'source', assignment_name)) - if not os.path.isdir(assignment_dir): - logger.debug('Creating source dir %s for the assignment %s' % (assignment_dir, assignment_name)) - os.makedirs(assignment_dir) - logger.debug('Fixing folder permissions for %s' % assignment_dir) - shutil.chown(str(Path(assignment_dir).parent), user=self.uid, group=self.gid) - shutil.chown(str(assignment_dir), user=self.uid, group=self.gid) except InvalidEntry as e: logger.debug('Error ocurred by adding assignment to gradebook: %s' % e) return assignment diff --git a/src/illumidesk/illumidesk/apis/setup_course_service.py b/src/illumidesk/illumidesk/apis/setup_course_service.py new file mode 100644 index 00000000..404b9128 --- /dev/null +++ b/src/illumidesk/illumidesk/apis/setup_course_service.py @@ -0,0 +1,69 @@ +import logging +import os + +from tornado.httpclient import AsyncHTTPClient +from tornado.httpclient import HTTPError + +from traitlets.traitlets import Bool + + +# course setup service name +INTENAL_SERVICE_NAME = os.environ.get('SETUP_COURSE_SERVICE_NAME') or 'grader-setup-service' +# course setup service port +SERVICE_PORT = os.environ.get('SETUP_COURSE_PORT') or '8000' + + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + + +SERVICE_BASE_URL = f'http://{INTENAL_SERVICE_NAME}:{SERVICE_PORT}' +SERVICE_COMMON_HEADERS = {'Content-Type': 'application/json'} + + +async def create_assignment_source_dir(org_name: str, course_id: str, assignment_name: str) -> Bool: + """ + Calls the grader setup service to create the assignment source directory + + returns: True when the service response is 200 + """ + client = AsyncHTTPClient() + try: + response = await client.fetch( + f'{SERVICE_BASE_URL}/courses/{org_name}/{course_id}/{assignment_name}', + headers=SERVICE_COMMON_HEADERS, + body='', + method='POST', + ) + logger.debug(f'Grader-setup service response: {response.body}') + return True + except HTTPError as e: + # HTTPError is raised for non-200 responses + logger.error(f'Grader-setup service returned an error: {e}') + return False + + +async def register_new_service(org_name: str, course_id: str) -> Bool: + """ + Helps to register (asynchronously) new course definition through the grader setup service + Args: + org: organization name + course_id: the course name detected in the request args + Returns: True when a new deployment was launched (k8s) otherwise False + + """ + client = AsyncHTTPClient() + try: + response = await client.fetch( + f'{SERVICE_BASE_URL}/services/{org_name}/{course_id}', + headers=SERVICE_COMMON_HEADERS, + body='', + method='POST', + ) + logger.debug(f'Grader-setup service response: {response.body}') + return True + except HTTPError as e: + # HTTPError is raised for non-200 responses + # the response can be found in e.response. + logger.error(f'Grader-setup service returned an error: {e}') + return False diff --git a/src/illumidesk/authenticators/__init__.py b/src/illumidesk/illumidesk/authenticators/__init__.py similarity index 100% rename from src/illumidesk/authenticators/__init__.py rename to src/illumidesk/illumidesk/authenticators/__init__.py diff --git a/src/illumidesk/authenticators/authenticator.py b/src/illumidesk/illumidesk/authenticators/authenticator.py similarity index 94% rename from src/illumidesk/authenticators/authenticator.py rename to src/illumidesk/illumidesk/authenticators/authenticator.py index 692eb491..aec7acf3 100644 --- a/src/illumidesk/authenticators/authenticator.py +++ b/src/illumidesk/illumidesk/authenticators/authenticator.py @@ -20,8 +20,8 @@ from illumidesk.apis.jupyterhub_api import JupyterHubAPI from illumidesk.apis.announcement_service import AnnouncementService from illumidesk.apis.nbgrader_service import NbGraderServiceHelper -from illumidesk.apis.setup_course_service import make_rolling_update from illumidesk.apis.setup_course_service import register_new_service +from illumidesk.apis.setup_course_service import create_assignment_source_dir from illumidesk.authenticators.handlers import LTI11AuthenticateHandler from illumidesk.authenticators.handlers import LTI13LoginHandler @@ -38,6 +38,11 @@ logger.setLevel(logging.DEBUG) +ORG_NAME = os.environ.get('ORGANIZATION_NAME') or 'my-org' +if not ORG_NAME: + raise EnvironmentError('ORGANIZATION_NAME env-var is not set') + + async def setup_course_hook( authenticator: Authenticator, handler: RequestHandler, authentication: Dict[str, str] ) -> Dict[str, str]: @@ -62,9 +67,6 @@ async def setup_course_hook( lti_utils = LTIUtils() jupyterhub_api = JupyterHubAPI() - org = os.environ.get('ORGANIZATION_NAME') - if not org: - raise EnvironmentError('ORGANIZATION_NAME env-var is not set') # normalize the name and course_id strings in authentication dictionary course_id = lti_utils.normalize_string(authentication['auth_state']['course_id']) nb_service = NbGraderServiceHelper(course_id) @@ -80,20 +82,14 @@ async def setup_course_hook( elif user_is_an_instructor(user_role): # assign the user in 'formgrade-' group await jupyterhub_api.add_instructor_to_jupyterhub_group(course_id, username) - data = { - 'org': org, - 'course_id': course_id, - 'domain': handler.request.host, - } - setup_response = await register_new_service(data) + # launch the new (?) grader-notebook as a service + setup_response = await register_new_service(org_name=ORG_NAME, course_id=course_id) # In case of new courses launched then execute a rolling update with jhub to reload our configuration file - if 'is_new_setup' in setup_response and setup_response['is_new_setup'] is True: + if setup_response is True: # notify the user the browser needs to be reload (when traefik redirects to a new jhub) await AnnouncementService.add_announcement('A new service was detected, please reload this page...') - - logger.debug('The current jupyterhub instance will be updated by setup-course service...') - make_rolling_update() + logger.debug('The current jupyterhub instance will be updated by grader setup service...') return authentication @@ -158,10 +154,10 @@ async def authenticate(self, handler: BaseHandler, data: Dict[str, str] = None) lms_vendor = args['tool_consumer_info_product_family_code'] # We use the course_id to setup the grader service notebook. Since this service - # runs as a docker container we need to normalize the string so we can use it + # runs as a container we need to normalize the string so we can use it # as a container name. if 'context_label' in args and args['context_label']: - course_id = lti_utils.normalize_string(args['context_label']) + course_id = args['context_label'] self.log.debug('Course context_label normalized to: %s' % course_id) else: raise HTTPError(400, 'Course label not included in the LTI request') @@ -259,7 +255,7 @@ async def authenticate(self, handler: BaseHandler, data: Dict[str, str] = None) self.log.debug( 'Creating a new assignment from the Authentication flow with title %s' % assignment_name ) - nbgrader_service.create_assignment_in_nbgrader(assignment_name) + nbgrader_service.register_assignment(assignment_name) # ensure the user name is normalized username_normalized = lti_utils.normalize_string(username) self.log.debug('Assigned username is: %s' % username_normalized) @@ -387,7 +383,7 @@ async def authenticate( # noqa: C901 ] # if there is a resource link request then process additional steps if not validator.is_deep_link_launch(jwt_decoded): - process_additional_steps_for_resource_launch(self.log, course_id, jwt_decoded) + await process_resource_link(self.log, course_id, jwt_decoded) lms_user_id = jwt_decoded['sub'] if 'sub' in jwt_decoded else username @@ -406,10 +402,8 @@ async def authenticate( # noqa: C901 } -def process_additional_steps_for_resource_launch( - logger: Any, - course_id: str, - jwt_body_decoded: Dict[str, Any], +async def process_resource_link( + logger: Any, course_id: str, jwt_body_decoded: Dict[str, Any], ) -> None: """ Executes additional processes with the claims that come only with LtiResourceLinkRequest @@ -426,6 +420,9 @@ def process_additional_steps_for_resource_launch( nbgrader_service = NbGraderServiceHelper(course_id, True) nbgrader_service.update_course(lms_lineitems_endpoint=course_lineitems) if resource_link_title: - # resource_link_title_normalize = lti_utils.normalize_string(resource_link_title) - logger.debug('Creating a new assignment from the Authentication flow with title %s' % resource_link_title) - nbgrader_service.create_assignment_in_nbgrader(resource_link_title) + assignment_name = LTIUtils().normalize_string(resource_link_title) + logger.debug('Creating a new assignment from the Authentication flow with title %s' % assignment_name) + # register the new assignment in nbgrader database + nbgrader_service.register_assignment(assignment_name) + # create the assignment source directory by calling the grader-setup service + await create_assignment_source_dir(ORG_NAME, course_id, assignment_name) diff --git a/src/illumidesk/authenticators/constants.py b/src/illumidesk/illumidesk/authenticators/constants.py similarity index 100% rename from src/illumidesk/authenticators/constants.py rename to src/illumidesk/illumidesk/authenticators/constants.py diff --git a/src/illumidesk/authenticators/handlers.py b/src/illumidesk/illumidesk/authenticators/handlers.py similarity index 100% rename from src/illumidesk/authenticators/handlers.py rename to src/illumidesk/illumidesk/authenticators/handlers.py diff --git a/src/illumidesk/authenticators/templates/file-select.html b/src/illumidesk/illumidesk/authenticators/templates/file-select.html similarity index 100% rename from src/illumidesk/authenticators/templates/file-select.html rename to src/illumidesk/illumidesk/authenticators/templates/file-select.html diff --git a/src/illumidesk/authenticators/utils.py b/src/illumidesk/illumidesk/authenticators/utils.py similarity index 96% rename from src/illumidesk/authenticators/utils.py rename to src/illumidesk/illumidesk/authenticators/utils.py index b78f8ee1..a0e3257f 100644 --- a/src/illumidesk/authenticators/utils.py +++ b/src/illumidesk/illumidesk/authenticators/utils.py @@ -37,16 +37,16 @@ def normalize_string(self, name: str) -> str: if not name: raise ValueError('Name is empty') # truncate name after 30th character - name = (name[:30] + '') if len(name) > 30 else name + name = (name[:25] + '') if len(name) > 30 else name # remove special characters name = re.sub(r'[^\w-]+', '', name) # if the first character is any of _.- remove it name = name.lstrip('_.-') # convert to lower case name = name.lower() - # limit course_id to 22 characters, since its used for o/s username + # limit course_id to 25 characters, since its used for o/s username # in jupyter/docker-stacks compatible grader notebook (NB_USER) - normalized_name = name[0:20] + normalized_name = name[0:25] self.log.debug('String normalized to %s' % normalized_name) return normalized_name diff --git a/src/illumidesk/authenticators/validator.py b/src/illumidesk/illumidesk/authenticators/validator.py similarity index 100% rename from src/illumidesk/authenticators/validator.py rename to src/illumidesk/illumidesk/authenticators/validator.py diff --git a/src/illumidesk/grades/__init__.py b/src/illumidesk/illumidesk/grades/__init__.py similarity index 100% rename from src/illumidesk/grades/__init__.py rename to src/illumidesk/illumidesk/grades/__init__.py diff --git a/src/illumidesk/grades/exceptions.py b/src/illumidesk/illumidesk/grades/exceptions.py similarity index 100% rename from src/illumidesk/grades/exceptions.py rename to src/illumidesk/illumidesk/grades/exceptions.py diff --git a/src/illumidesk/grades/handlers.py b/src/illumidesk/illumidesk/grades/handlers.py similarity index 100% rename from src/illumidesk/grades/handlers.py rename to src/illumidesk/illumidesk/grades/handlers.py diff --git a/src/illumidesk/grades/sender_controlfile.py b/src/illumidesk/illumidesk/grades/sender_controlfile.py similarity index 100% rename from src/illumidesk/grades/sender_controlfile.py rename to src/illumidesk/illumidesk/grades/sender_controlfile.py diff --git a/src/illumidesk/grades/senders.py b/src/illumidesk/illumidesk/grades/senders.py similarity index 100% rename from src/illumidesk/grades/senders.py rename to src/illumidesk/illumidesk/grades/senders.py diff --git a/src/illumidesk/lti13/__init__.py b/src/illumidesk/illumidesk/lti13/__init__.py similarity index 100% rename from src/illumidesk/lti13/__init__.py rename to src/illumidesk/illumidesk/lti13/__init__.py diff --git a/src/illumidesk/lti13/auth.py b/src/illumidesk/illumidesk/lti13/auth.py similarity index 100% rename from src/illumidesk/lti13/auth.py rename to src/illumidesk/illumidesk/lti13/auth.py diff --git a/src/illumidesk/lti13/handlers.py b/src/illumidesk/illumidesk/lti13/handlers.py similarity index 100% rename from src/illumidesk/lti13/handlers.py rename to src/illumidesk/illumidesk/lti13/handlers.py diff --git a/src/illumidesk/setup_course/__init__.py b/src/illumidesk/illumidesk/spawners/__init__.py similarity index 100% rename from src/illumidesk/setup_course/__init__.py rename to src/illumidesk/illumidesk/spawners/__init__.py diff --git a/src/illumidesk/spawners/hooks.py b/src/illumidesk/illumidesk/spawners/hooks.py similarity index 100% rename from src/illumidesk/spawners/hooks.py rename to src/illumidesk/illumidesk/spawners/hooks.py diff --git a/src/illumidesk/spawners/spawners.py b/src/illumidesk/illumidesk/spawners/spawners.py similarity index 60% rename from src/illumidesk/spawners/spawners.py rename to src/illumidesk/illumidesk/spawners/spawners.py index e5da76fd..b17127e2 100644 --- a/src/illumidesk/spawners/spawners.py +++ b/src/illumidesk/illumidesk/spawners/spawners.py @@ -1,10 +1,10 @@ -from dockerspawner import DockerSpawner +from kubespawner import KubeSpawner from traitlets.traitlets import Bool -class IllumiDeskDockerSpawner(DockerSpawner): - """Extends the DockerSpawner by defining the common behavior for our Spwaners that work +class IllumiDeskKubeSpawner(KubeSpawner): + """Extends the KubeSpawner by defining the common behavior for our Spwaners that work with LTI versions 1.1 and 1.3 """ diff --git a/src/requirements.txt b/src/illumidesk/requirements.txt similarity index 57% rename from src/requirements.txt rename to src/illumidesk/requirements.txt index 507da649..e1421e5e 100644 --- a/src/requirements.txt +++ b/src/illumidesk/requirements.txt @@ -6,100 +6,108 @@ # alembic==1.4.3 # via jupyterhub, nbgrader argon2-cffi==20.1.0 # via notebook -async-generator==1.10 # via jupyterhub, nbclient -attrs==20.3.0 # via jsonschema +async-generator==1.10 # via jupyterhub, jupyterhub-kubespawner, nbclient +attrs==20.2.0 # via jsonschema backcall==0.2.0 # via ipython bleach==3.2.1 # via nbconvert -certifi==2020.11.8 # via requests +cachetools==4.2.0 # via google-auth +certifi==2020.6.20 # via kubernetes, requests certipy==0.1.3 # via jupyterhub -cffi==1.14.4 # via argon2-cffi, cryptography +cffi==1.14.3 # via argon2-cffi, cryptography chardet==3.0.4 # via requests -cryptography==3.2.1 # via josepy, jwcrypto, pyopenssl +cryptography==3.1.1 # via josepy, jwcrypto, pyopenssl decorator==4.4.2 # via ipython defusedxml==0.6.0 # via nbconvert -docker==4.4.0 # via dockerspawner -dockerspawner==0.11.1 # via illumidesk (setup.py) entrypoints==0.3 # via jupyterhub, nbconvert -escapism==1.0.1 # via dockerspawner +escapism==1.0.1 # via jupyterhub-kubespawner filelock==3.0.12 # via illumidesk (setup.py) future==0.18.2 # via pyjwkest +google-auth==1.24.0 # via kubernetes httplib2==0.18.1 # via oauth2, pylti idna==2.10 # via requests ipykernel==5.3.4 # via ipywidgets, jupyter, jupyter-console, notebook, qtconsole ipython-genutils==0.2.0 # via nbformat, notebook, qtconsole, traitlets -ipython==7.19.0 # via ipykernel, ipywidgets, jupyter-console +ipython==7.18.1 # via ipykernel, ipywidgets, jupyter-console ipywidgets==7.5.1 # via jupyter jedi==0.17.2 # via ipython -jinja2==2.11.2 # via jupyterhub, nbconvert, notebook -josepy==1.5.0 # via illumidesk (setup.py) +jinja2==2.11.2 # via jupyterhub, jupyterhub-kubespawner, nbconvert, notebook +josepy==1.4.0 # via illumidesk (setup.py) jsonschema==3.2.0 # via jupyter-telemetry, nbformat, nbgrader jupyter-client==6.1.7 # via ipykernel, jupyter-console, nbclient, nbgrader, notebook, qtconsole jupyter-console==6.2.0 # via jupyter -jupyter-core==4.7.0 # via jupyter-client, nbconvert, nbformat, nbgrader, notebook, qtconsole +jupyter-core==4.6.3 # via jupyter-client, nbconvert, nbformat, nbgrader, notebook, qtconsole jupyter-telemetry==0.1.0 # via jupyterhub jupyter==1.0.0 # via nbgrader -jupyterhub-ltiauthenticator==0.4.0 # via illumidesk (setup.py) -jupyterhub==1.2.1 # via dockerspawner, illumidesk (setup.py), jupyterhub-ltiauthenticator, oauthenticator +jupyterhub-kubespawner==0.14.1 # via illumidesk (setup.py) +jupyterhub-ltiauthenticator==1.0.0 # via illumidesk (setup.py) +jupyterhub==1.1.0 # via illumidesk (setup.py), jupyterhub-kubespawner, jupyterhub-ltiauthenticator, oauthenticator jupyterlab-pygments==0.1.2 # via nbconvert jwcrypto==0.8 # via illumidesk (setup.py) +kubernetes==12.0.1 # via jupyterhub-kubespawner lti==0.9.5 # via illumidesk (setup.py) -lxml==4.6.2 # via lti +lxml==4.5.2 # via lti mako==1.1.3 # via alembic markupsafe==1.1.1 # via jinja2, mako mistune==0.8.4 # via nbconvert -nbclient==0.5.1 # via nbconvert +nbclient==0.5.0 # via nbconvert nbconvert==6.0.7 # via jupyter, nbgrader, notebook -nbformat==5.0.8 # via ipywidgets, nbclient, nbconvert, nbgrader, notebook +nbformat==5.0.7 # via ipywidgets, nbclient, nbconvert, nbgrader, notebook git+https://github.com/IllumiDesk/nbgrader#egg=nbgrader-0.64.2 # via illumidesk (setup.py) -nest-asyncio==1.4.3 # via nbclient -notebook==6.1.5 # via jupyter, nbgrader, widgetsnbextension +nest-asyncio==1.4.1 # via nbclient +notebook==6.1.4 # via jupyter, nbgrader, widgetsnbextension oauth2==1.9.0.post1 # via pylti -oauthenticator==0.12.1 # via illumidesk (setup.py) +oauthenticator==0.11.0 # via illumidesk (setup.py) oauthlib==3.1.0 # via jupyterhub, jupyterhub-ltiauthenticator, lti, requests-oauthlib packaging==20.4 # via bleach pamela==1.0.0 # via jupyterhub -pandocfilters==1.4.3 # via nbconvert +pandocfilters==1.4.2 # via nbconvert parso==0.7.1 # via jedi pem==20.1.0 # via illumidesk (setup.py) pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -prometheus-client==0.9.0 # via jupyterhub, notebook +prometheus-client==0.8.0 # via jupyterhub, notebook prompt-toolkit==3.0.7 # via ipython, jupyter-console psycopg2-binary==2.8.6 # via illumidesk (setup.py) ptyprocess==0.6.0 # via pexpect, terminado +pyasn1-modules==0.2.8 # via google-auth +pyasn1==0.4.8 # via pyasn1-modules, rsa pycparser==2.20 # via cffi -pycryptodome==3.9.9 # via illumidesk (setup.py) -pycryptodomex==3.9.9 # via pyjwkest -pygments==2.7.2 # via ipython, jupyter-console, jupyterlab-pygments, nbconvert, qtconsole +pycryptodome==3.9.8 # via illumidesk (setup.py) +pycryptodomex==3.9.8 # via pyjwkest +pygments==2.7.1 # via ipython, jupyter-console, jupyterlab-pygments, nbconvert, qtconsole pyjwkest==1.4.2 # via illumidesk (setup.py) pyjwt==1.7.1 # via illumidesk (setup.py) pylti==0.7.0 # via illumidesk (setup.py) -pyopenssl==20.0.0 # via certipy, josepy +pyopenssl==19.1.0 # via certipy, josepy pyparsing==2.4.7 # via packaging pyrsistent==0.17.3 # via jsonschema -python-dateutil==2.8.1 # via alembic, jupyter-client, jupyterhub, nbgrader +python-dateutil==2.8.1 # via alembic, jupyter-client, jupyterhub, kubernetes, nbgrader python-editor==1.0.4 # via alembic python-json-logger==2.0.0 # via jupyter-telemetry -pyzmq==20.0.0 # via jupyter-client, notebook, qtconsole -qtconsole==5.0.1 # via jupyter +python-slugify==4.0.1 # via jupyterhub-kubespawner +pyyaml==5.3.1 # via jupyterhub-kubespawner, kubernetes +pyzmq==19.0.2 # via jupyter-client, notebook, qtconsole +qtconsole==4.7.7 # via jupyter qtpy==1.9.0 # via qtconsole -rapidfuzz==0.13.3 # via nbgrader -requests-oauthlib==1.3.0 # via lti -requests==2.24.0 # via docker, jupyterhub, nbgrader, pyjwkest, requests-oauthlib +rapidfuzz==0.12.2 # via nbgrader +requests-oauthlib==1.3.0 # via kubernetes, lti +requests==2.24.0 # via jupyterhub, kubernetes, nbgrader, pyjwkest, requests-oauthlib +rsa==4.6 # via google-auth ruamel.yaml.clib==0.2.2 # via ruamel.yaml ruamel.yaml==0.16.12 # via jupyter-telemetry send2trash==1.5.0 # via notebook -six==1.15.0 # via argon2-cffi, bleach, cryptography, docker, josepy, jsonschema, packaging, pyjwkest, pylti, pyopenssl, python-dateutil, sqlalchemy-utils, websocket-client +six==1.15.0 # via argon2-cffi, bleach, cryptography, google-auth, josepy, jsonschema, kubernetes, packaging, pyjwkest, pylti, pyopenssl, python-dateutil, sqlalchemy-utils, websocket-client sqlalchemy-utils==0.36.8 # via illumidesk (setup.py) sqlalchemy==1.3.19 # via alembic, jupyterhub, nbgrader, sqlalchemy-utils terminado==0.9.1 # via notebook testpath==0.4.4 # via nbconvert -tornado==6.1 # via ipykernel, jupyter-client, jupyterhub, nbgrader, notebook, terminado -traitlets==5.0.5 # via ipykernel, ipython, ipywidgets, jupyter-client, jupyter-core, jupyter-telemetry, jupyterhub, nbclient, nbconvert, nbformat, nbgrader, notebook, qtconsole -urllib3==1.26.2 # via requests +text-unidecode==1.3 # via python-slugify +tornado==6.0.4 # via ipykernel, jupyter-client, jupyterhub, nbgrader, notebook, terminado +traitlets==5.0.4 # via ipykernel, ipython, ipywidgets, jupyter-client, jupyter-core, jupyter-telemetry, jupyterhub, nbclient, nbconvert, nbformat, nbgrader, notebook, qtconsole +urllib3==1.25.10 # via jupyterhub-kubespawner, kubernetes, requests wcwidth==0.2.5 # via prompt-toolkit webencodings==0.5.1 # via bleach -websocket-client==0.57.0 # via docker +websocket-client==0.57.0 # via kubernetes widgetsnbextension==3.5.1 # via ipywidgets # The following packages are considered to be unsafe in a requirements file: diff --git a/src/setup.py b/src/illumidesk/setup.py similarity index 91% rename from src/setup.py rename to src/illumidesk/setup.py index eea45dbf..d15f6277 100644 --- a/src/setup.py +++ b/src/illumidesk/setup.py @@ -33,14 +33,14 @@ url='https://github.com/illumidesk/illumidesk', author='The IllumiDesk Team', author_email='hello@illumidesk.com', - license='MIT', + license='Apache 2.0', packages=find_packages(exclude='./tests'), install_requires=[ - 'dockerspawner==0.11.1', 'filelock==3.0.12', 'josepy==1.4.0', - 'jupyterhub==1.2.1', - 'jupyterhub-ltiauthenticator==0.4.0', + 'jupyterhub==1.1.0', + 'jupyterhub-kubespawner==0.14.1', + 'jupyterhub-ltiauthenticator==1.0.0', 'jwcrypto==0.8', 'lti==0.9.5', 'nbgrader@git+https://github.com/IllumiDesk/nbgrader#egg=nbgrader-0.64.2', diff --git a/src/illumidesk/setup_course/app.py b/src/illumidesk/setup_course/app.py deleted file mode 100644 index f234e11b..00000000 --- a/src/illumidesk/setup_course/app.py +++ /dev/null @@ -1,110 +0,0 @@ -import asyncio -import json -import logging -import os -import sys - -from filelock import FileLock -from pathlib import Path - -from quart import Quart -from quart import request -from quart.exceptions import BadRequest - -from .course import Course -from .utils import SetupUtils - - -logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) -logger = logging.getLogger(__name__) - -app = Quart("setup-course-app") - -configs_path = os.environ.get('JUPYTERHUB_CONFIG_PATH', '/srv/jupyterhub') - -Path(configs_path).mkdir(exist_ok=True, parents=True) - - -JSON_FILE_PATH = configs_path + '/jupyterhub_config.json' - -cache = {'services': [], 'load_groups': {}} - -with Path(JSON_FILE_PATH).open('w+') as config: - try: - cache = json.load(config) - except json.JSONDecodeError: - if Path(JSON_FILE_PATH).stat().st_size != 0: - raise - else: - json.dump(cache, config) - - -@app.route("/", methods=['POST']) -async def main(): - data = await request.get_json() - if data is None: - raise BadRequest() - logger.debug('Received data payload %s' % data) - try: - new_course = Course(**data) - await new_course.setup() - update_jupyterhub_config(new_course) - - except Exception as e: - logger.error("Unable to complete course setup", exc_info=True) - return {'error': 500, 'detail': str(e)} - return {'message': 'OK', 'is_new_setup': new_course.is_new_setup} - - -@app.route("/config", methods=['GET']) -def config(): - return json.dumps(cache) - - -@app.route("/rolling-update", methods=['POST']) -async def restart(): - logger.debug('Received request to make a rolling-update.') - utils = SetupUtils() - try: - logger.debug('Restarting jupyterhub...') - await asyncio.sleep(3) - utils.restart_jupyterhub() - except Exception as e: - logger.error("Unable to restart the container", exc_info=True) - return {'error': 500} - return {'message': 'OK'} - - -def update_jupyterhub_config(course: Course): - """ - We can add groups and users with the REST API, but not services. Therefore - add new services to the JupyterHub.services section within the jupyterhub - configuration file (jupyterhub_config.py). - - """ - jupyterhub_config_json = Path(JSON_FILE_PATH) - # Lock file to manage jupyterhub_config.py - jupyterhub_lock = os.environ.get('JUPYTERHUB_CONFIG_PATH') + '/jhub.lock' - new_service_config = course.get_service_config() - load_group = {f'formgrade-{course.course_id}': [course.grader_name]} - logger.debug(f'Course service definition: {new_service_config}') - - # find the service definition - current_service_definition = None - for service in cache['services']: - if service['url'] == new_service_config['url']: - logger.debug(f"service definition with url:{service['url']} found in json file") - current_service_definition = service - - if current_service_definition and course.is_new_setup: - logger.debug(f'Updating the api_token in service definition with: {course.token}') - # update the service definition with the newest token - current_service_definition['api_token'] = course.token - elif current_service_definition is None: - cache['services'].append(new_service_config) - - cache['load_groups'].update(load_group) - lock = FileLock(str(jupyterhub_lock)) - with lock: - with jupyterhub_config_json.open('r+') as config: - json.dump(cache, config) diff --git a/src/illumidesk/setup_course/course.py b/src/illumidesk/setup_course/course.py deleted file mode 100644 index 8cd20176..00000000 --- a/src/illumidesk/setup_course/course.py +++ /dev/null @@ -1,272 +0,0 @@ -import docker -import logging -import os -import shutil -import subprocess -import sys - -from pathlib import Path -from secrets import token_hex - -from illumidesk.apis.jupyterhub_api import JupyterHubAPI -from illumidesk.apis.nbgrader_service import NbGraderServiceHelper - -from .constants import NBGRADER_COURSE_CONFIG_TEMPLATE -from .constants import NBGRADER_HOME_CONFIG_TEMPLATE - - -logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) -logger = logging.getLogger(__name__) - - -class Course: - """ - Class to manage new course setups. - - Attributes: - org: Organization name used in the account's sub-domain - course_id: The normalized course id. Must not contain more than - 30 characters or have special characters. - course_root: Course's root path - domain: Domain name from tool consumer that launched the request - exchange_root: Path for exchange folder - gid: Grader's group id - grader_name: Grader's account name - grader_root: Grader's home path - is_new_setup: True indicates a new setup, False otherwise - token: JupyterHub API token used to authenticat requests with the Hub - uid: Grader's user id - user_role: the user role set in the grader's share notebook - """ - - def __init__(self, org: str, course_id: str, domain: str): - self.org = org - self.course_id = course_id - self.domain = domain - self.exchange_root = Path(os.environ.get('MNT_ROOT'), self.org, 'exchange') - self.grader_name = f'grader-{course_id}' - self.grader_root = Path( - os.environ.get('MNT_ROOT'), - org, - 'home', - self.grader_name, - ) - self.grader_shared_folder = Path(os.environ.get('MNT_ROOT'), org, 'shared', self.course_id) - shared_folder_env = os.environ.get('SHARED_FOLDER_ENABLED') or 'False' - self.is_shared_folder_enabled = True if shared_folder_env.lower() in ('true', '1') else False - self.course_root = self.grader_root / course_id - self.token = token_hex(32) - self.client = docker.from_env() - self.uid = int(os.environ.get('NB_GRADER_UID')) - self.gid = int(os.environ.get('NB_GID')) - self.user_role = 'Grader' - self._is_new_setup = False - self.jupyterhub_api = JupyterHubAPI() - - @property - def jupyter_config_path(self): - return self.grader_root / '.jupyter' - - @property - def nbgrader_home_config_path(self): - return self.jupyter_config_path / 'nbgrader_config.py' - - @property - def nbgrader_course_config_path(self): - return self.course_root / 'nbgrader_config.py' - - @property - def is_new_setup(self): - return self._is_new_setup - - async def setup(self): - """ - Function to bootstrap new course setup - - Returns: - is_new_setup: boolean to indicate whether or not the this setup - function executed the functions to set up a new course. - """ - if self.should_setup(): - self.create_directories() - await self.add_jupyterhub_grader_group() - await self.add_jupyterhub_student_group() - self.run() - - def should_setup(self): - """ - If the grader container exists then the setup_course boolean is set to - false, otherwise true. - - Raises: - docker.errors.NotFound - """ - try: - self.client.containers.get(self.grader_name) - logger.debug('Grader container exists %s' % self.grader_name) - except docker.errors.NotFound: - logger.error('Grader container not found') - self._is_new_setup = True - return True - - return False - - def create_directories(self): - """ - Creates exchange, grader account, and course directories as well - as nbgrader configuration files. All directories and files are updated to have the - UID/GID that belong to the instructor/grader values. Students and Grader/Instructors - should have different UID's but the same GID. - """ - logger.debug('Creating exchange directory %s' % self.exchange_root) - self.exchange_root.mkdir(parents=True, exist_ok=True) - self.exchange_root.chmod(0o777) - logger.debug( - 'Creating grader directory and permissions with path %s to %s:%s ' % (self.grader_root, self.uid, self.gid) - ) - self.grader_root.mkdir(parents=True, exist_ok=True) - shutil.chown(str(self.grader_root), user=self.uid, group=self.gid) - self.course_root.mkdir(parents=True, exist_ok=True) - logger.debug( - 'Changing course directory permissions with path %s to %s:%s ' % (self.course_root, self.uid, self.gid) - ) - shutil.chown(str(self.course_root), user=self.uid, group=self.gid) - - logger.debug('Course jupyter config path %s' % self.jupyter_config_path) - self.jupyter_config_path.mkdir(parents=True, exist_ok=True) - shutil.chown(str(self.jupyter_config_path), user=self.uid, group=self.gid) - logger.debug('Change course jupyter config permissions to %s:%s' % (self.uid, self.gid)) - - logger.debug('Grader home nbgrader_config.py path %s' % self.nbgrader_home_config_path) - # format the config file with current settings/values and append the db_url setting - nbgrader_config = NBGRADER_HOME_CONFIG_TEMPLATE.format( - grader_name=self.grader_name, - course_id=self.course_id, - db_url=NbGraderServiceHelper(self.course_id).db_url, - ) - - self.nbgrader_home_config_path.write_text(nbgrader_config) - shutil.chown(str(self.nbgrader_home_config_path), user=self.uid, group=self.gid) - logger.debug( - 'Added shared grader home nbgrader config %s with permissions %s:%s' - % (nbgrader_config, self.uid, self.gid) - ) - - logger.debug('Grader course nbgrader_config.py path %s' % self.nbgrader_course_config_path) - nbgrader_config = NBGRADER_COURSE_CONFIG_TEMPLATE.format(course_id=self.course_id) - self.nbgrader_course_config_path.write_text(nbgrader_config) - shutil.chown(str(self.nbgrader_course_config_path), user=self.uid, group=self.gid) - logger.debug( - 'Added shared grader course nbgrader config %s with permissions %s:%s' - % (nbgrader_config, self.uid, self.gid) - ) - if self.is_shared_folder_enabled is True: - self.create_shared_directory() - - def create_shared_directory(self): - """ - Creates the shared folder and initialize the git repo - """ - git_init_commands = ''' - git init; - git config --local user.name "illumidesk-grader"; - git config --local user.email "grader@illumidesk.local"; - echo '.ipynb_checkpoints/' >> .gitignore; - git add .; - git commit -m "Initial commit"; - ''' - logger.debug('Creating shared directory %s' % self.grader_shared_folder) - self.grader_shared_folder.mkdir(parents=True, exist_ok=True) - shutil.chown(str(self.grader_shared_folder), user=self.uid, group=self.gid) - # initiate git repo - try: - # run git init - logger.info(f'Initializing git repo in shared directory: {self.grader_shared_folder}') - subprocess.check_output(git_init_commands, cwd=f'{self.grader_shared_folder}', shell=True) - except Exception as er: - logger.debug(f'Error initializing the git repo:{er}') - - async def add_jupyterhub_grader_group(self): - """ - Add formgrader group with JupyterHub's REST API by sending a - POST request to the the endpoint ../groups/formgrade-{course_id}. - - Returns: - Response from JupyterHub's add group endpoint - """ - group_name = f'formgrade-{self.course_id}' - logger.debug(f'Adding grader group {group_name} with JupyterHub REST API') - result = await self.jupyterhub_api.create_group(group_name) - logger.debug('Response object when adding formgrader group: %s' % result) - - async def add_jupyterhub_student_group(self): - """ - Add nbgrader group with JupyterHub's REST API by sending a - POST request to the the endpoint ../groups/nbgrader-{course_id}. - - Returns: - Response from JupyterHub's add group endpoint - """ - group_name = f'nbgrader-{self.course_id}' - logger.debug(f'Adding student group {group_name} with JupyterHub REST API') - result = await self.jupyterhub_api.create_group(group_name) - logger.debug('Response object when adding nbgrader group: %s' % result) - - def run(self): - """ - Create and run a grader notebook with the docker client. This service's settings - should coincide with the grader's JupyterHub.services definition. The JupyterHub.service - is defined as an externally managed service and the docker client is what manages this - grader service. - """ - logger.debug('Running grader container with exchange root %s' % self.exchange_root) - jupyterhub_api_url = os.environ.get('JUPYTERHUB_API_URL') - jupyterhub_api_token = os.environ.get('JUPYTERHUB_API_TOKEN') - base_url = os.environ.get('JUPYTERHUB_BASE_URL') or '' - logger.debug('Grader container JUPYTERHUB_API_URL set to %s' % jupyterhub_api_url) - logger.debug('Grader container JUPYTERHUB_API_TOKEN set to %s' % jupyterhub_api_token) - # set initial volumes dict - docker_volumes = { - str(self.grader_root): {'bind': f'/home/{self.grader_name}'}, - str(self.exchange_root): {'bind': '/srv/nbgrader/exchange'}, - } - if self.is_shared_folder_enabled: - docker_volumes[str(self.grader_shared_folder)] = {'bind': f'/home/{self.grader_name}/shared'} - self.client.containers.run( - detach=True, - image=os.environ.get('DOCKER_GRADER_IMAGE') or 'illumidesk/grader-notebook:latest', - command=['start-notebook.sh', f'--group=formgrade-{self.course_id}'], - environment=[ - f'JUPYTERHUB_SERVICE_NAME={self.course_id}', - f'JUPYTERHUB_API_TOKEN={self.token}', - f'JUPYTERHUB_API_URL={jupyterhub_api_url}', - f'JUPYTERHUB_BASE_URL={base_url}/', - f'JUPYTERHUB_SERVICE_PREFIX={base_url}/services/{self.course_id}/', - f'JUPYTERHUB_CLIENT_ID=service-{self.course_id}', - f'JUPYTERHUB_USER={self.grader_name}', - f'NB_GRADER_UID={self.uid}', - f'NB_GID={self.gid}', - f'NB_USER={self.grader_name}', - f'USER_ROLE={self.user_role}', - ], - volumes=docker_volumes, - name=self.grader_name, - user='root', - working_dir=f'/home/{self.grader_name}', - network=os.environ.get('DOCKER_NETWORK_NAME'), - restart_policy={'Name': 'on-failure', 'MaximumRetryCount': 5}, - ) - - def get_service_config(self) -> dict: - """ - Creates service config definition that is used in jupyterhub's services section - """ - url = f'http://{self.grader_name}:8888' - service_config = { - 'name': self.course_id, - 'url': url, - 'oauth_no_confirm': True, - 'admin': True, - 'api_token': self.token, - } - return service_config diff --git a/src/illumidesk/setup_course/utils.py b/src/illumidesk/setup_course/utils.py deleted file mode 100644 index 8a330c3b..00000000 --- a/src/illumidesk/setup_course/utils.py +++ /dev/null @@ -1,58 +0,0 @@ -import os -import logging -import subprocess -import sys -import time - -import docker -from docker.errors import NotFound - - -logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) -logger = logging.getLogger(__name__) - - -class SetupUtils: - """ - Utils class used to manage course setup configurations and updates. - """ - - def __init__(self): - self.docker_client = docker.from_env() - self.jupyterhub_container_name = os.environ.get('JUPYTERHUB_SERVICE_NAME') or 'jupyterhub' - self.illumidesk_dir = os.environ.get('ILLUMIDESK_DIR') - if not self.illumidesk_dir: - raise EnvironmentError('Missing or null ILLUMIDESK_DIR env var value.') - - def restart_jupyterhub(self) -> None: - """ - Initiates a jupyterhubb rolling update. In order to load changes in configuration file, - the jupyterhub container is replaced with new one, then the older is stopped. - Traefik can redirect the traffic to new one service few seconds later. - """ - logger.debug('Received request to restart JupyterHub') - containers = self.docker_client.containers.list( - filters={'label': [f'com.docker.compose.service={self.jupyterhub_container_name}']} - ) - for container in containers: - logger.debug(f'Found a jupyterhub container (running): {container.id}') - try: - # launch a new one to be attached in the proxy - logger.info('Trying to scale jupyterhub with docker-compose') - subprocess.check_output( - f'docker-compose --compatibility up -d --scale {self.jupyterhub_container_name}=2'.split(), - cwd=f'{self.illumidesk_dir}', - ) - time.sleep(3) - logger.debug(f'The container: {container.id} is stopping...') - container.stop() - time.sleep(1) - except NotFound: - logger.error('Jupyterhub container not found, unable to proceed with rolling update.') - except Exception as er: - logger.error(f'Error trying to scale jupyterhub: {er}') - break - self.docker_client.containers.prune( - filters={'label': [f'com.docker.compose.service={self.jupyterhub_container_name}']} - ) - logger.debug(f'Pruning unused jupyterhub containers {self.jupyterhub_container_name}') diff --git a/src/illumidesk/spawners/__init__.py b/src/illumidesk/tests/__init__.py similarity index 100% rename from src/illumidesk/spawners/__init__.py rename to src/illumidesk/tests/__init__.py diff --git a/src/tests/__init__.py b/src/illumidesk/tests/illumidesk/__init__.py similarity index 100% rename from src/tests/__init__.py rename to src/illumidesk/tests/illumidesk/__init__.py diff --git a/src/tests/illumidesk/__init__.py b/src/illumidesk/tests/illumidesk/apis/__init__.py similarity index 100% rename from src/tests/illumidesk/__init__.py rename to src/illumidesk/tests/illumidesk/apis/__init__.py diff --git a/src/tests/illumidesk/apis/test_announcement_service.py b/src/illumidesk/tests/illumidesk/apis/test_announcement_service.py similarity index 100% rename from src/tests/illumidesk/apis/test_announcement_service.py rename to src/illumidesk/tests/illumidesk/apis/test_announcement_service.py diff --git a/src/tests/illumidesk/apis/test_jupyterhub_api.py b/src/illumidesk/tests/illumidesk/apis/test_jupyterhub_api.py similarity index 100% rename from src/tests/illumidesk/apis/test_jupyterhub_api.py rename to src/illumidesk/tests/illumidesk/apis/test_jupyterhub_api.py diff --git a/src/tests/illumidesk/apis/test_nbgrader_service_helper.py b/src/illumidesk/tests/illumidesk/apis/test_nbgrader_service_helper.py similarity index 51% rename from src/tests/illumidesk/apis/test_nbgrader_service_helper.py rename to src/illumidesk/tests/illumidesk/apis/test_nbgrader_service_helper.py index 55ca70c9..60e37179 100644 --- a/src/tests/illumidesk/apis/test_nbgrader_service_helper.py +++ b/src/illumidesk/tests/illumidesk/apis/test_nbgrader_service_helper.py @@ -1,6 +1,3 @@ -import os -from pathlib import Path -import shutil import pytest from unittest.mock import patch @@ -30,47 +27,6 @@ def test_course_id_is_normalized_in_the_constructor(self): """ assert self.sut.course_id == 'ps-one' - @patch('shutil.chown') - @patch('os.makedirs') - @patch('illumidesk.apis.nbgrader_service.Gradebook') - def test_create_assignment_in_nbgrader_uses_the_assignment_name_normalized( - self, mock_gradebook, mock_makedirs, mock_chown - ): - """ - Does the assignment is created with normalized value? - """ - self.sut.create_assignment_in_nbgrader('LAB 1') - assert mock_gradebook.return_value.__enter__.return_value.update_or_create_assignment.called - assert mock_gradebook.return_value.__enter__.return_value.update_or_create_assignment.call_args[0][0] == 'lab1' - - @patch('os.makedirs') - @patch('pathlib.Path.mkdir') - @patch('illumidesk.apis.nbgrader_service.Gradebook') - def test_create_assignment_in_nbgrader_method_fixes_source_directory_permissions( - self, mock_gradebook, mock_path_mkdir, mock_makedirs - ): - """ - Does the assignment source directory is created and it is fixed with the correct file permissions? - """ - with patch.object(shutil, 'chown') as mock_chown: - self.sut.create_assignment_in_nbgrader('lab-abc') - source_dir = os.path.abspath(Path(self.sut.course_dir, 'source')) - mock_chown.assert_any_call(source_dir, user=10001, group=100) - - @patch('os.makedirs') - @patch('pathlib.Path.mkdir') - @patch('illumidesk.apis.nbgrader_service.Gradebook') - def test_create_assignment_in_nbgrader_method_fixes_assignment_directory_permissions( - self, mock_gradebook, mock_path_mkdir, mock_makedirs - ): - """ - Does the assignment directory is fixed with the correct file permissions? - """ - with patch.object(shutil, 'chown') as mock_chown: - self.sut.create_assignment_in_nbgrader('lab-abc') - assignment_dir = os.path.abspath(Path(self.sut.course_dir, 'source', 'lab-abc')) - mock_chown.assert_any_call(assignment_dir, user=10001, group=100) - @patch('shutil.chown') @patch('pathlib.Path.mkdir') @patch('illumidesk.apis.nbgrader_service.Gradebook') diff --git a/src/tests/illumidesk/apis/__init__.py b/src/illumidesk/tests/illumidesk/apps/__init__.py similarity index 100% rename from src/tests/illumidesk/apis/__init__.py rename to src/illumidesk/tests/illumidesk/apps/__init__.py diff --git a/src/tests/illumidesk/apps/test_jupyterhub_base_config.py b/src/illumidesk/tests/illumidesk/apps/test_jupyterhub_base_config.py similarity index 92% rename from src/tests/illumidesk/apps/test_jupyterhub_base_config.py rename to src/illumidesk/tests/illumidesk/apps/test_jupyterhub_base_config.py index 8602046e..c48529ad 100644 --- a/src/tests/illumidesk/apps/test_jupyterhub_base_config.py +++ b/src/illumidesk/tests/illumidesk/apps/test_jupyterhub_base_config.py @@ -24,14 +24,14 @@ def test_jupyterhub_base_config(setup_jupyterhub_db, setup_jupyterhub_config_bas c.Spawner.image = os.environ.get('DOCKER_END_USER_IMAGE') c.Spawner.cpu_limit = float(os.environ.get('SPAWNER_CPU_LIMIT')) c.Spawner.mem_limit = os.environ.get('SPAWNER_MEM_LIMIT') - c.DockerSpawner.network_name = os.environ.get('DOCKER_NETWORK_NAME') + c.KubeSpawner.network_name = os.environ.get('DOCKER_NETWORK_NAME') docker_spawn_command = os.environ.get('DOCKER_SPAWN_CMD') exchange_dir = os.environ.get('EXCHANGE_DIR') notebook_dir = os.environ.get('DOCKER_NOTEBOOK_DIR') assert c.Authenticator.admin_users == {'admin0'} - assert c.DockerSpawner.network_name == 'test-network' + assert c.KubeSpawner.network_name == 'test-network' assert c.JupyterHub.db_url == 'postgresql://foobar:abc123@jupyterhub-db:5432/jupyterhub' assert c.JupyterHub.shutdown_on_logout == True # noqa: E712 diff --git a/src/tests/illumidesk/apps/__init__.py b/src/illumidesk/tests/illumidesk/authenticators/__init__.py similarity index 100% rename from src/tests/illumidesk/apps/__init__.py rename to src/illumidesk/tests/illumidesk/authenticators/__init__.py diff --git a/src/tests/illumidesk/authenticators/test_lti11_authenticator.py b/src/illumidesk/tests/illumidesk/authenticators/test_lti11_authenticator.py similarity index 95% rename from src/tests/illumidesk/authenticators/test_lti11_authenticator.py rename to src/illumidesk/tests/illumidesk/authenticators/test_lti11_authenticator.py index 532c31d9..606f0b5c 100644 --- a/src/tests/illumidesk/authenticators/test_lti11_authenticator.py +++ b/src/illumidesk/tests/illumidesk/authenticators/test_lti11_authenticator.py @@ -138,33 +138,6 @@ async def test_authenticator_uses_lti_utils_normalize_string( assert mock_normalize_string.called -@pytest.mark.asyncio -async def test_authenticator_uses_lti_utils_normalize_string_for_context_label( - make_lti11_success_authentication_request_args, gradesender_controlfile_mock, mock_nbhelper -): - """ - Ensure that we call the normalize string method with the LTI11Authenticator when the course id is - obtained from the context_label argument. - """ - with patch.object(LTI11LaunchValidator, 'validate_launch_request', return_value=True): - with patch.object(LTIUtils, 'normalize_string', return_value='foobar') as mock_normalize_string: - authenticator = LTI11Authenticator() - handler = Mock(spec=RequestHandler) - request = HTTPServerRequest( - method='POST', - connection=Mock(), - ) - handler.request = request - - handler.request.arguments = make_lti11_success_authentication_request_args('context_label') - handler.request.get_argument = lambda x, strip=True: make_lti11_success_authentication_request_args( - 'context_label' - )[x][0].decode() - - _ = await authenticator.authenticate(handler, None) - assert mock_normalize_string.called - - @pytest.mark.asyncio @patch('pathlib.Path.mkdir') async def test_authenticator_uses_lti_grades_sender_control_file_with_student_role( @@ -612,7 +585,7 @@ async def test_authenticator_returns_username_from_user_id_with_another_lms( ) result = await authenticator.authenticate(handler, None) expected = { - 'name': '185d6c59731a553009ca', + 'name': '185d6c59731a553009ca9b59c', 'auth_state': { 'course_id': 'intro101', 'lms_user_id': '185d6c59731a553009ca9b59ca3a885100000', diff --git a/src/tests/illumidesk/authenticators/test_lti11_handlers.py b/src/illumidesk/tests/illumidesk/authenticators/test_lti11_handlers.py similarity index 100% rename from src/tests/illumidesk/authenticators/test_lti11_handlers.py rename to src/illumidesk/tests/illumidesk/authenticators/test_lti11_handlers.py diff --git a/src/tests/illumidesk/authenticators/test_lti11_validator.py b/src/illumidesk/tests/illumidesk/authenticators/test_lti11_validator.py similarity index 100% rename from src/tests/illumidesk/authenticators/test_lti11_validator.py rename to src/illumidesk/tests/illumidesk/authenticators/test_lti11_validator.py diff --git a/src/tests/illumidesk/authenticators/test_lti13_authenticator.py b/src/illumidesk/tests/illumidesk/authenticators/test_lti13_authenticator.py similarity index 99% rename from src/tests/illumidesk/authenticators/test_lti13_authenticator.py rename to src/illumidesk/tests/illumidesk/authenticators/test_lti13_authenticator.py index a9599b67..43f712ff 100644 --- a/src/tests/illumidesk/authenticators/test_lti13_authenticator.py +++ b/src/illumidesk/tests/illumidesk/authenticators/test_lti13_authenticator.py @@ -82,7 +82,7 @@ async def test_authenticator_invokes_lti_utils_normalize_string( @pytest.mark.asyncio async def test_authenticator_returns_course_id_in_auth_state_with_valid_resource_link_request( - auth_state_dict, + make_auth_state_dict, make_lti13_resource_link_request, build_lti13_jwt_id_token, make_mock_request_handler, @@ -103,7 +103,7 @@ async def test_authenticator_returns_course_id_in_auth_state_with_valid_resource @pytest.mark.asyncio async def test_authenticator_returns_auth_state_with_course_id_normalized( - auth_state_dict, + make_auth_state_dict, make_lti13_resource_link_request, build_lti13_jwt_id_token, make_mock_request_handler, @@ -120,7 +120,7 @@ async def test_authenticator_returns_auth_state_with_course_id_normalized( with patch.object(RequestHandler, 'get_argument', return_value=build_lti13_jwt_id_token(link_request)): with patch.object(LTI13LaunchValidator, 'validate_launch_request', return_value=True): result = await authenticator.authenticate(request_handler, None) - assert result['auth_state']['course_id'] == 'courseid-with_largen' + assert result['auth_state']['course_id'] == 'courseid-with_largename' @pytest.mark.asyncio diff --git a/src/tests/illumidesk/authenticators/test_lti13_handlers.py b/src/illumidesk/tests/illumidesk/authenticators/test_lti13_handlers.py similarity index 100% rename from src/tests/illumidesk/authenticators/test_lti13_handlers.py rename to src/illumidesk/tests/illumidesk/authenticators/test_lti13_handlers.py diff --git a/src/tests/illumidesk/authenticators/test_lti13_validator.py b/src/illumidesk/tests/illumidesk/authenticators/test_lti13_validator.py similarity index 100% rename from src/tests/illumidesk/authenticators/test_lti13_validator.py rename to src/illumidesk/tests/illumidesk/authenticators/test_lti13_validator.py diff --git a/src/tests/illumidesk/authenticators/test_setup_course_hook.py b/src/illumidesk/tests/illumidesk/authenticators/test_setup_course_hook.py similarity index 70% rename from src/tests/illumidesk/authenticators/test_setup_course_hook.py rename to src/illumidesk/tests/illumidesk/authenticators/test_setup_course_hook.py index 5e4c0a56..3c081e16 100644 --- a/src/tests/illumidesk/authenticators/test_setup_course_hook.py +++ b/src/illumidesk/tests/illumidesk/authenticators/test_setup_course_hook.py @@ -1,4 +1,3 @@ -import json import os from jupyterhub.auth import Authenticator @@ -19,7 +18,6 @@ from illumidesk.authenticators.authenticator import LTI11Authenticator from illumidesk.authenticators.authenticator import LTI13Authenticator from illumidesk.authenticators.authenticator import setup_course_hook -from illumidesk.authenticators.utils import LTIUtils @pytest.mark.asyncio @@ -40,73 +38,6 @@ async def test_setup_course_hook_is_assigned_to_lti13_authenticator_post_auth_ho assert authenticator.post_auth_hook == setup_course_hook -@pytest.mark.asyncio() -async def test_setup_course_hook_raises_environment_error_with_missing_org( - monkeypatch, make_auth_state_dict, setup_course_hook_environ, make_mock_request_handler -): - """ - Is an environment error raised when the organization name is missing when calling - the setup_course_hook function? - """ - monkeypatch.setenv('ORGANIZATION_NAME', '') - local_authenticator = Authenticator(post_auth_hook=setup_course_hook) - local_handler = make_mock_request_handler(RequestHandler, authenticator=local_authenticator) - local_authentication = make_auth_state_dict() - with pytest.raises(EnvironmentError): - await local_authenticator.post_auth_hook(local_authenticator, local_handler, local_authentication) - - -@pytest.mark.asyncio() -async def test_setup_course_hook_calls_normalize_strings( - auth_state_dict, - setup_course_environ, - setup_course_hook_environ, - make_mock_request_handler, - make_http_response, - mock_nbhelper, -): - """ - Does the setup_course_hook return normalized strings for the username and the course_id? - """ - local_authenticator = Authenticator(post_auth_hook=setup_course_hook) - local_handler = make_mock_request_handler(RequestHandler, authenticator=local_authenticator) - local_authentication = auth_state_dict - - with patch.object(LTIUtils, 'normalize_string', return_value='intro101') as mock_normalize_string: - with patch.object(JupyterHubAPI, 'add_student_to_jupyterhub_group', return_value=None): - with patch.object( - AsyncHTTPClient, 'fetch', return_value=make_http_response(handler=local_handler.request) - ): - _ = await setup_course_hook(local_authenticator, local_handler, local_authentication) - assert mock_normalize_string.called - - -@pytest.mark.asyncio() -async def test_setup_course_hook_raises_json_decode_error_without_client_fetch_response( - monkeypatch, - setup_course_environ, - setup_course_hook_environ, - make_auth_state_dict, - make_mock_request_handler, - make_http_response, - mock_nbhelper, -): - """ - Does the setup course hook raise a json decode error if the response form the setup course - microservice is null or empty? - """ - local_authenticator = Authenticator(post_auth_hook=setup_course_hook) - local_handler = make_mock_request_handler(RequestHandler, authenticator=local_authenticator) - local_authentication = make_auth_state_dict() - - with patch.object(JupyterHubAPI, 'add_student_to_jupyterhub_group', return_value=None): - with patch.object( - AsyncHTTPClient, 'fetch', return_value=make_http_response(handler=local_handler.request, body=None) - ): - with pytest.raises(json.JSONDecodeError): - await setup_course_hook(local_authenticator, local_handler, local_authentication) - - @pytest.mark.asyncio() async def test_setup_course_hook_calls_add_student_to_jupyterhub_group_when_role_is_learner( setup_course_environ, @@ -339,49 +270,3 @@ async def test_setup_course_hook_calls_announcement_service_when_is_new_setup( await setup_course_hook(local_authenticator, local_handler, local_authentication) assert AnnouncementService.add_announcement.called - - -@pytest.mark.asyncio() -async def test_is_new_course_initiates_rolling_update( - setup_course_environ, - setup_course_hook_environ, - make_auth_state_dict, - make_http_response, - make_mock_request_handler, - mock_nbhelper, -): - """ - If the course is a new setup does it initiate a rolling update? - """ - local_authenticator = Authenticator(post_auth_hook=setup_course_hook) - local_handler = make_mock_request_handler(RequestHandler, authenticator=local_authenticator) - local_authentication = make_auth_state_dict() - - response_args = {'handler': local_handler.request, 'body': {'is_new_setup': True}} - with patch.object(JupyterHubAPI, 'add_student_to_jupyterhub_group', return_value=None): - with patch.object( - AsyncHTTPClient, - 'fetch', - side_effect=[ - make_http_response(**response_args), - None, - ], # noqa: E231 - ) as mock_client: - AnnouncementService.add_announcement = AsyncMock(return_value=None) - - await setup_course_hook(local_authenticator, local_handler, local_authentication) - assert mock_client.called - - mock_client.assert_any_call( - 'http://setup-course:8000/rolling-update', - headers={'Content-Type': 'application/json'}, - body='', - method='POST', - ) - - mock_client.assert_any_call( - 'http://setup-course:8000', - headers={'Content-Type': 'application/json'}, - body='{"org": "test-org", "course_id": "intro101", "domain": "127.0.0.1"}', - method='POST', - ) diff --git a/src/tests/illumidesk/authenticators/test_utils.py b/src/illumidesk/tests/illumidesk/authenticators/test_utils.py similarity index 99% rename from src/tests/illumidesk/authenticators/test_utils.py rename to src/illumidesk/tests/illumidesk/authenticators/test_utils.py index 3d0cba7f..9b09e5d3 100644 --- a/src/tests/illumidesk/authenticators/test_utils.py +++ b/src/illumidesk/tests/illumidesk/authenticators/test_utils.py @@ -27,7 +27,7 @@ def test_normalize_string_with_long_name(): utils = LTIUtils() normalized_container_name = utils.normalize_string(container_name) - assert len(normalized_container_name) <= 20 + assert len(normalized_container_name) <= 25 def test_normalize_string_with_special_characters(): diff --git a/src/tests/illumidesk/conftest.py b/src/illumidesk/tests/illumidesk/conftest.py similarity index 99% rename from src/tests/illumidesk/conftest.py rename to src/illumidesk/tests/illumidesk/conftest.py index 315a3fa4..d6c91373 100644 --- a/src/tests/illumidesk/conftest.py +++ b/src/illumidesk/tests/illumidesk/conftest.py @@ -72,7 +72,7 @@ def mock_nbhelper(): update_course=Mock(return_value=None), create_database_if_not_exists=Mock(), add_user_to_nbgrader_gradebook=Mock(return_value=None), - create_assignment_in_nbgrader=Mock(return_value=None), + register_assignment=Mock(return_value=None), get_course=Mock( return_value=Course( id='123', lms_lineitems_endpoint='canvas.docker.com/api/lti/courses/1/line_items' @@ -343,8 +343,8 @@ def setup_course_hook_environ(monkeypatch, jupyterhub_api_environ): Set the environment variables used in the setup_course_hook function """ monkeypatch.setenv('ANNOUNCEMENT_SERVICE_PORT', '8889') - monkeypatch.setenv('DOCKER_SETUP_COURSE_SERVICE_NAME', 'setup-course') - monkeypatch.setenv('DOCKER_SETUP_COURSE_PORT', '8000') + monkeypatch.setenv('SETUP_COURSE_SERVICE_NAME', 'grader-setup-service') + monkeypatch.setenv('SETUP_COURSE_PORT', '8000') monkeypatch.setenv('ORGANIZATION_NAME', 'test-org') @@ -539,7 +539,7 @@ def _make_lti11_basic_launch_args( @pytest.fixture(scope='function') def make_lti11_success_authentication_request_args(): def _make_lti11_success_authentication_request_args( - lms_vendor: str = 'canvas', role: str = 'Instructor', + lms_vendor: str = 'canvas', role: str = 'Instructor' ) -> Dict[str, str]: """ Return a valid request arguments make from LMS to our tool (when authentication steps were success) diff --git a/src/tests/illumidesk/authenticators/__init__.py b/src/illumidesk/tests/illumidesk/grades/__init__.py similarity index 100% rename from src/tests/illumidesk/authenticators/__init__.py rename to src/illumidesk/tests/illumidesk/grades/__init__.py diff --git a/src/tests/illumidesk/grades/test_lms_grades_handler.py b/src/illumidesk/tests/illumidesk/grades/test_lms_grades_handler.py similarity index 100% rename from src/tests/illumidesk/grades/test_lms_grades_handler.py rename to src/illumidesk/tests/illumidesk/grades/test_lms_grades_handler.py diff --git a/src/tests/illumidesk/grades/test_sender_controlfile.py b/src/illumidesk/tests/illumidesk/grades/test_sender_controlfile.py similarity index 100% rename from src/tests/illumidesk/grades/test_sender_controlfile.py rename to src/illumidesk/tests/illumidesk/grades/test_sender_controlfile.py diff --git a/src/tests/illumidesk/grades/test_senders.py b/src/illumidesk/tests/illumidesk/grades/test_senders.py similarity index 100% rename from src/tests/illumidesk/grades/test_senders.py rename to src/illumidesk/tests/illumidesk/grades/test_senders.py diff --git a/src/tests/illumidesk/grades/__init__.py b/src/illumidesk/tests/illumidesk/lti13/__init__.py similarity index 100% rename from src/tests/illumidesk/grades/__init__.py rename to src/illumidesk/tests/illumidesk/lti13/__init__.py diff --git a/src/tests/illumidesk/lti13/test_auth.py b/src/illumidesk/tests/illumidesk/lti13/test_auth.py similarity index 100% rename from src/tests/illumidesk/lti13/test_auth.py rename to src/illumidesk/tests/illumidesk/lti13/test_auth.py diff --git a/src/tests/illumidesk/lti13/test_lti13_config_handler.py b/src/illumidesk/tests/illumidesk/lti13/test_lti13_config_handler.py similarity index 100% rename from src/tests/illumidesk/lti13/test_lti13_config_handler.py rename to src/illumidesk/tests/illumidesk/lti13/test_lti13_config_handler.py diff --git a/src/tests/illumidesk/lti13/test_lti13_jwks_handler.py b/src/illumidesk/tests/illumidesk/lti13/test_lti13_jwks_handler.py similarity index 100% rename from src/tests/illumidesk/lti13/test_lti13_jwks_handler.py rename to src/illumidesk/tests/illumidesk/lti13/test_lti13_jwks_handler.py diff --git a/src/tests/illumidesk/lti13/__init__.py b/src/illumidesk/tests/illumidesk/spawners/__init__.py similarity index 100% rename from src/tests/illumidesk/lti13/__init__.py rename to src/illumidesk/tests/illumidesk/spawners/__init__.py diff --git a/src/tests/illumidesk/setup_course/__init__.py b/src/tests/illumidesk/setup_course/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/tests/illumidesk/setup_course/test_app.py b/src/tests/illumidesk/setup_course/test_app.py deleted file mode 100644 index 2c609fb6..00000000 --- a/src/tests/illumidesk/setup_course/test_app.py +++ /dev/null @@ -1,118 +0,0 @@ -import json - -import pytest - -from unittest.mock import patch, MagicMock - -from illumidesk.setup_course.course import Course - - -@pytest.mark.asyncio -async def test_config_path_returns_empty_dict(test_quart_client): - """ - Does the config endpoint return a non-empty json when app starts? - """ - response = await test_quart_client.get('/config') - assert response.status_code == 200 - data = await response.get_data(raw=False) - data_as_json = json.loads(data) - assert data_as_json['services'] is not None - assert data_as_json['load_groups'] is not None - - -@pytest.mark.asyncio -async def test_post_method_returns_BadRequest_without_data(test_quart_client): - """ - Does the creation endpoint return 400 as BadRequest when data is None? - """ - response = await test_quart_client.post('/') - assert response.status_code == 400 - - -@pytest.mark.asyncio -async def test_post_method_result_contains_is_new_setup_as_bool( - setup_course_environ, test_quart_client, jupyterhub_api_environ -): - """ - Does the POST endpoint return a boolean value to indicate if new setup was occured? - """ - - async def return_async_value(): - return True - - with patch.object(Course, 'setup', side_effect=return_async_value): - data = { - 'org': 'my_company', - 'course_id': 'course01', - 'domain': 'example.com', - } - - response = await test_quart_client.post('/', json=data) - resp_data = await response.get_json() - assert 'is_new_setup' in resp_data - assert isinstance(resp_data['is_new_setup'], bool) - - -@pytest.mark.asyncio -async def test_post_method_result_indicates_when_a_new_setup_was_created( - setup_course_environ, test_quart_client, jupyterhub_api_environ -): - """ - Does the creation endpoint return 400 as BadRequest when data is None? - """ - - async def return_async_value(): - return True - - with patch.multiple( - 'illumidesk.setup_course.course.Course', - should_setup=MagicMock(return_value=True), - create_directories=MagicMock(return_value=None), - is_new_setup=True, - add_jupyterhub_grader_group=MagicMock(side_effect=return_async_value), - add_jupyterhub_student_group=MagicMock(side_effect=return_async_value), - run=MagicMock(), - ) as MockCourse: - - data = { - 'org': 'my_company', - 'course_id': 'course01', - 'domain': 'example.com', - } - - response = await test_quart_client.post('/', json=data) - resp_data = await response.get_json() - assert 'is_new_setup' in resp_data - assert isinstance(resp_data['is_new_setup'], bool) - assert resp_data['is_new_setup'] is True - - -@pytest.mark.asyncio -async def test_post_method_creates_new_service_definition_in_config(setup_course_environ, test_quart_client): - """ - Does the new course is returned by the config endpoint? - """ - - async def return_async_value(): - return True - - with patch.object(Course, 'setup', side_effect=return_async_value): - - data = { - 'org': 'my_company', - 'course_id': 'course01', - 'domain': 'example.com', - } - # create the course - _ = await test_quart_client.post('/', json=data) - # check the jupyterhub config file - config_response = await test_quart_client.get('/config') - response_data = await config_response.get_data(raw=False) - data_as_json = json.loads(response_data) - - exists = True - for service in data_as_json['services']: - if data['course_id'] in service: - exists = True - - assert exists is True diff --git a/src/tests/illumidesk/setup_course/test_course.py b/src/tests/illumidesk/setup_course/test_course.py deleted file mode 100644 index 84fc06fe..00000000 --- a/src/tests/illumidesk/setup_course/test_course.py +++ /dev/null @@ -1,261 +0,0 @@ -import os -from pathlib import Path - -import pytest - -from unittest.mock import patch - -from docker.errors import NotFound -from illumidesk.apis.nbgrader_service import NbGraderServiceHelper - -from illumidesk.setup_course.course import Course -from illumidesk.setup_course.constants import NBGRADER_HOME_CONFIG_TEMPLATE -from illumidesk.setup_course.constants import NBGRADER_COURSE_CONFIG_TEMPLATE - - -def test_initializer_requires_arguments(): - """ - Do we get a type error if we try to create a course instance without all required initialization - variables? - """ - with pytest.raises(TypeError): - Course() - - -def test_initializer_set_course_id(setup_course_environ): - """ - Does the initializer properly set the course_id property? - """ - course = Course(org='org1', course_id='example', domain='example.com') - assert course.course_id is not None - assert course.course_id == 'example' - - -def test_initializer_set_org(setup_course_environ): - """ - Does the initializer properly set the organization property? - """ - course = Course(org='org1', course_id='example', domain='example.com') - assert course.org is not None - assert course.org == 'org1' - - -def test_initializer_set_domain(setup_course_environ): - """ - Does the initializer properly set the domain property? - """ - course = Course(org='org1', course_id='example', domain='example.com') - assert course.domain is not None - assert course.domain == 'example.com' - - -def test_grader_name_is_correct(setup_course_environ): - """ - Is the grader_name well formed? - """ - course = Course(org='org1', course_id='example', domain='example.com') - assert course.grader_name is not None - assert course.grader_name == f'grader-{course.course_id}' - - -def test_grader_root_path_is_valid(setup_course_environ): - """ - Is the grader_root well formed? - """ - course = Course(org='org1', course_id='example', domain='example.com') - assert course.grader_root is not None - assert course.grader_root == Path( - os.environ.get('MNT_ROOT'), - course.org, - 'home', - course.grader_name, - ) - - -def test_course_path_is_a_grader_root_subfolder(setup_course_environ): - """ - Is the course path a grader subfolder? - """ - course = Course(org='org1', course_id='example', domain='example.com') - assert course.course_root is not None - assert course.course_root == Path(course.grader_root, course.course_id) - - -def test_new_course_has_a_token(setup_course_environ): - """ - Does the initializer set token property? - """ - course = Course(org='org1', course_id='example', domain='example.com') - assert course.token is not None - - -def test_a_course_contains_service_config_well_formed(setup_course_environ): - """ - Does the get_service_config method return a valid config? - """ - course = Course(org='org1', course_id='example', domain='example.com') - service_config = course.get_service_config() - assert type(service_config) == dict - assert 'name' in service_config - assert 'url' in service_config - assert 'admin' in service_config - assert 'api_token' in service_config - - -def test_a_course_contains_service_config_with_correct_values(setup_course_environ): - """ - Does the get_service_config method return a config with valid values? - """ - course = Course(org='org1', course_id='example', domain='example.com') - service_config = course.get_service_config() - assert service_config['name'] == course.course_id - assert service_config['url'] == f'http://{course.grader_name}:8888' - assert service_config['admin'] is True - assert service_config['api_token'] == course.token - - -def test_grader_user_role(setup_course_environ): - """ - Is the grader's user_role set to Grader? - """ - course = Course(org='org1', course_id='example', domain='example.com') - assert course.user_role is not None - assert course.user_role == 'Grader' - - -@patch('docker.DockerClient.containers') -def test_should_setup_method_returns_true_if_container_does_not_exist(mock_docker, setup_course_environ): - """ - Does the should_setup method return True when the container not was found? - """ - course = Course(org='org1', course_id='example', domain='example.com') - - def _container_not_exists(name): - raise NotFound(f'container: {name} not exists') - - mock_docker.get.side_effect = lambda name: _container_not_exists(name) - assert course.should_setup() is True - - -def test_course_exchange_root_directory_is_created(setup_course_environ): - """ - Is the exchange directory created as part of setup? - """ - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - assert course.exchange_root.exists() - - -def test_course_grader_root_directory_is_created(setup_course_environ): - """ - Is the exchange directory created as part of setup? - """ - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - assert course.grader_root.exists() - - -def test_course_root_directory_is_created(setup_course_environ): - """ - Is the course directory created as part of setup? - """ - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - assert course.course_root.exists() - - -def test_course_shared_folder_is_not_created_if_env_var_was_not_set(setup_course_environ, monkeypatch): - """ - shared directory is not created when env var is set to false? - """ - monkeypatch.setenv('SHARED_FOLDER_ENABLED', '') - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - assert course.is_shared_folder_enabled is False - assert not course.grader_shared_folder.exists() - - -def test_course_shared_folder_is_created_if_env_var_was_set(setup_course_environ, monkeypatch): - """ - Is the shared course directory created as part of setup? - """ - monkeypatch.setenv('SHARED_FOLDER_ENABLED', 'True') - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - assert course.is_shared_folder_enabled is True - assert course.grader_shared_folder.exists() - - -def test_course_shared_folder_is_initialized_as_git_repo(setup_course_environ, monkeypatch): - """ - Is the shared directory initialized with git? - """ - monkeypatch.setenv('SHARED_FOLDER_ENABLED', 'True') - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - assert course.grader_shared_folder.joinpath('.git').exists() - - -def test_course_shared_folder_contains_gitignore_file(setup_course_environ, monkeypatch): - """ - does the shared directory contain gitignore file? - """ - monkeypatch.setenv('SHARED_FOLDER_ENABLED', 'True') - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - assert course.grader_shared_folder.joinpath('.gitignore').exists() - - -def test_course_jupyter_config_path_is_created(setup_course_environ): - """ - Is the jupyter config directory created as part of setup? - """ - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - assert course.jupyter_config_path.exists() - - -def test_course_nbgrader_home_config_path_is_created(setup_course_environ): - """ - Is the nbgrader directory created as part of setup? - """ - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - assert course.nbgrader_home_config_path.exists() - - -def test_nbgrader_home_config_path_is_created_with_template(setup_course_environ): - """ - Is the nbgrader directory created as part of setup? - """ - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - with course.nbgrader_home_config_path.open('r') as nbgrader_template: - content = nbgrader_template.read() - assert content == NBGRADER_HOME_CONFIG_TEMPLATE.format( - grader_name=course.grader_name, - course_id=course.course_id, - db_url=NbGraderServiceHelper(course.course_id).db_url, - ) - - -def test_nbgrader_course_config_path_is_created_with_template(setup_course_environ): - """ - Is the grader's home directory nbgrader config created with the setup-course service? - """ - course = Course(org='org1', course_id='example', domain='example.com') - with patch('shutil.chown', autospec=True): - course.create_directories() - with course.nbgrader_course_config_path.open('r') as nbgrader_template: - content = nbgrader_template.read() - assert content == NBGRADER_COURSE_CONFIG_TEMPLATE.format(course_id=course.course_id) diff --git a/src/tests/illumidesk/setup_course/test_utils.py b/src/tests/illumidesk/setup_course/test_utils.py deleted file mode 100644 index 96cdccb5..00000000 --- a/src/tests/illumidesk/setup_course/test_utils.py +++ /dev/null @@ -1,42 +0,0 @@ -import pytest - -from illumidesk.setup_course.utils import SetupUtils - - -def test_setup_utils_properties_after_initialization(setup_utils_environ): - """ - Does the initializer properly set the illumidesk directory property? - """ - setup_utils = SetupUtils() - - assert setup_utils.docker_client is not None - assert setup_utils.jupyterhub_container_name == 'jupyterhub' - assert setup_utils.illumidesk_dir == '/home/foo/illumidesk_deployment' - - -@pytest.mark.asyncio -async def test_create_setup_utils_without_illumidesk_dir_env_var(): - """ - Do we get an environment error when attempting to create a SetupUtils instance without the illumidesk_dir - environment variable set to a proper value? - """ - with pytest.raises(EnvironmentError): - SetupUtils() - - -@pytest.mark.asyncio -async def test_create_setup_utils_without_illumidesk_dir_env_var(): - """ - Do we get an environment error when attempting to create a SetupUtils instance without the illumidesk_dir - environment variable set to a proper value? - """ - with pytest.raises(EnvironmentError): - SetupUtils() - - -def test_initializer_setup_utils(setup_utils_environ): - """ - Does the initializer properly set the illumidesk directory property? - """ - setup_utils = SetupUtils() - assert setup_utils.illumidesk_dir == '/home/foo/illumidesk_deployment' diff --git a/src/tests/illumidesk/spawners/__init__.py b/src/tests/illumidesk/spawners/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/tests/illumidesk/spawners/test_hooks.py b/src/tests/illumidesk/spawners/test_hooks.py deleted file mode 100644 index 45b73628..00000000 --- a/src/tests/illumidesk/spawners/test_hooks.py +++ /dev/null @@ -1,64 +0,0 @@ -from illumidesk.spawners.hooks import custom_auth_state_hook -from illumidesk.spawners.spawners import IllumiDeskDockerSpawner - - -def test_ensure_environment_assigned_to_user_role_from_auth_state_in_spawner_environment(auth_state_dict): - """ - Does the user's docker container environment reflect his/her role? - """ - sut = IllumiDeskDockerSpawner() - custom_auth_state_hook(sut, auth_state_dict['auth_state']) - # make sure the hook set the environment variables - assert sut.environment['USER_ROLE'] == 'Learner' - - -def test_auth_state_hook_does_not_add_shared_folder_in_volumes_when_this_feature_is_disabled( - auth_state_dict, monkeypatch -): - """ - Does the auth_state_hook ignore the shared-folder when the SHARED_FOLDER_ENABLED is false or empty? - """ - monkeypatch.setenv('SHARED_FOLDER_ENABLED', '') - sut = IllumiDeskDockerSpawner() - custom_auth_state_hook(sut, auth_state_dict['auth_state']) - # make sure the hook set the environment variables - assert 'shared' not in sut.volumes - - monkeypatch.setenv('SHARED_FOLDER_ENABLED', 'False') - sut = IllumiDeskDockerSpawner() - custom_auth_state_hook(sut, auth_state_dict['auth_state']) - # make sure the hook set the environment variables - assert len([v for v in sut.volumes if '/shared' in v]) == 0 - - -def test_auth_state_hook_adds_shared_folder_in_volumes_when_the_feat_is_enabled( - auth_state_dict, monkeypatch, tmp_path -): - """ - Does the auth_state_hook add the shared-folder when the SHARED_FOLDER_ENABLED is True - """ - monkeypatch.setenv('SHARED_FOLDER_ENABLED', 'true') - monkeypatch.setenv('MNT_ROOT', str(tmp_path)) - monkeypatch.setenv('DOCKER_NOTEBOOK_DIR', '/home/jovyan') - - sut = IllumiDeskDockerSpawner() - custom_auth_state_hook(sut, auth_state_dict['auth_state']) - # make sure the hook set the environment variables - assert len([v for v in sut.volumes if '/shared' in v]) > 0 - - -def test_auth_state_hook_does_not_add_shared_folder_with_instructor(auth_state_dict, monkeypatch, tmp_path): - """ - Does the auth_state_hook ignore the shared folder for instructors? - """ - monkeypatch.setenv('SHARED_FOLDER_ENABLED', 'true') - monkeypatch.setenv('MNT_ROOT', str(tmp_path)) - monkeypatch.setenv('DOCKER_NOTEBOOK_DIR', '/home/jovyan') - - sut = IllumiDeskDockerSpawner() - auth_state_dict['auth_state']['user_role'] = 'Instructor' - sut.load_shared_folder_with_instructor = False - - custom_auth_state_hook(sut, auth_state_dict['auth_state']) - # make sure the hook set the environment variables - assert len([v for v in sut.volumes if '/shared' in v]) == 0 diff --git a/src/tests/illumidesk/spawners/test_illumidesk_dockerspawner.py b/src/tests/illumidesk/spawners/test_illumidesk_dockerspawner.py deleted file mode 100644 index 27045bad..00000000 --- a/src/tests/illumidesk/spawners/test_illumidesk_dockerspawner.py +++ /dev/null @@ -1,16 +0,0 @@ -import types - -from dockerspawner.dockerspawner import DockerSpawner - - -def test_dockerspawner_uses_raw_username_in_format_volume_name(): - """ - Does the correctly use the username? - """ - d = DockerSpawner() - # notice we're not using variable for username, - # it helps understanding how volumes are binding - d.user = types.SimpleNamespace(name='dbs__user5') - d.volumes = {'data/{raw_username}': {'bind': '/home/{raw_username}'}} - assert d.volume_binds == {'data/dbs__user5': {'bind': '/home/dbs__user5', 'mode': 'rw'}} - assert d.volume_mount_points == ['/home/dbs__user5']