From 55b7e8d6dcc374275b7bdb872fe4d8e58e92920c Mon Sep 17 00:00:00 2001 From: cal Date: Thu, 19 Sep 2024 16:20:57 +1200 Subject: [PATCH 01/31] remove supported apps page --- overrides/supported_apps.html | 54 ----------------------------------- 1 file changed, 54 deletions(-) delete mode 100644 overrides/supported_apps.html diff --git a/overrides/supported_apps.html b/overrides/supported_apps.html deleted file mode 100644 index c15aa37..0000000 --- a/overrides/supported_apps.html +++ /dev/null @@ -1,54 +0,0 @@ -{% extends "base.html" -%} -{% block libs %} - {{ super() }} - - - -{% endblock -%} -{% block styles %} -{{ super() }} - -{% endblock -%} - -{% block content -%} - {{ super() }} -
-
- - -
-
-
-
- {% if applications|length > 1 %} - {% for app_name in applications %} - {% set app = applications[app_name] %} - {% include "partials/app/app_card.html" %} - {% endfor %} - {% else %} -

Whoops, something has gone wrong loading the software list. Please contact support.

- {% endif %} -
-
-
-
-
-

If the application you are looking for is not here you can make software - installation request or try compiling it - yourself.

- You can help contribute to this list by tagging software here. - -{% endblock %} From 27acdef5949de253e514d029898ae839e1cebe63 Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 20 Sep 2024 17:07:01 +1200 Subject: [PATCH 02/31] Add pymarkdown extensions to requirements --- requirements.in | 2 ++ 1 file changed, 2 insertions(+) diff --git a/requirements.in b/requirements.in index f71ba65..306578a 100644 --- a/requirements.in +++ b/requirements.in @@ -12,6 +12,8 @@ mkdocs-git-revision-date-localized-plugin mkdocs-redirects @ git+https://github.com/CallumWalley/mkdocs-redirects.git@map_file mkdocs-awesome-pages-plugin +pymdown-extensions + # checkers neoteroi-mkdocs codespell From 127821e0ad23b82999f303e93d6ece2382e7823b Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 20 Sep 2024 17:17:06 +1200 Subject: [PATCH 03/31] remove pymdown-extensions --- requirements.in | 2 -- 1 file changed, 2 deletions(-) diff --git a/requirements.in b/requirements.in index 306578a..f71ba65 100644 --- a/requirements.in +++ b/requirements.in @@ -12,8 +12,6 @@ mkdocs-git-revision-date-localized-plugin mkdocs-redirects @ git+https://github.com/CallumWalley/mkdocs-redirects.git@map_file mkdocs-awesome-pages-plugin -pymdown-extensions - # checkers neoteroi-mkdocs codespell From eed4c37aa0aa6b223f4d9dcd98119bf0c3b81db4 Mon Sep 17 00:00:00 2001 From: cal Date: Mon, 23 Sep 2024 16:52:54 +1200 Subject: [PATCH 04/31] consolidated and simplified somew stylesheets --- CONTRIBUTING.md => SETUP.md | 0 SPECIFICATION.md | 70 ---------------- docs/assets/css/footer.css | 31 ------- docs/assets/stylesheets/theme.css | 132 ++++++++++++++++++++++++++++++ mkdocs.yml | 6 -- 5 files changed, 132 insertions(+), 107 deletions(-) rename CONTRIBUTING.md => SETUP.md (100%) delete mode 100644 SPECIFICATION.md delete mode 100644 docs/assets/css/footer.css create mode 100644 docs/assets/stylesheets/theme.css diff --git a/CONTRIBUTING.md b/SETUP.md similarity index 100% rename from CONTRIBUTING.md rename to SETUP.md diff --git a/SPECIFICATION.md b/SPECIFICATION.md deleted file mode 100644 index ff850d4..0000000 --- a/SPECIFICATION.md +++ /dev/null @@ -1,70 +0,0 @@ -# Writing Articles - -## Structure - -Public facing articles are found in the `docs` folder. Any markdown files inside will be rendered, any directory will be subcategories. -Pages can be excluded from being shown in the nav by adding them to `mkdocs.yml: not_in_nav`, as in the case of `includes`. - -By default, all categories are a group only (e.g. they have nothing rendered, only children), -However, if the folder contains an `index.md` file, it will be rendered instead. - -## Article Name/Location - -An articles location is determined by its location in the `docs` directory. -Article file can be nested up to two folders deep, and use the title name, in snake_case. - -### Title - -Article title is determined in order of preference, - -- A title defined in the 'title' meta-data. -- A level 1 Markdown header on the first line of the document body. -- The filename of a document. - -## Templates - -Template can be set in article meta. - -- `main` : Used for regular pages (default). -- `application` : Used for 'application' pages, will include software details header (and be linked in supported apps page). -- `supportedApplication`: For supported applications page. -- `home` : Homepage. - -By default, the `main` theme will be used. template of a theme to render Markdown pages. You can use the template meta-data key to define a different template file for that specific page. The template file must be available on the path(s) defined in the theme's environment. - -## Meta - -Article metadata is yaml format at the top of the page between two `---` - -### Mkdocs Parameters - -- `template` : which template to use. -- `title` : title, see above. - -### Material Parameters - -- `description` : used for site meta. -- `icon` : page icon. -- `status` : `new`, `deprecated`. - -### Custom Parameters - -- `prereq` : List of prerequisites. Formatted in markdown. Will be rendered inside a admonation. -- `postreq` : List of what next. Formatted in markdown. Will be rendered inside a admonation. - -### Zendesk Imported - -Not used for anything currently. Info imported from Zendesk Page. - -- `created_at`: -- `hidden`: -- `label_names`: [] -- `position`: -- `vote_count`: -- `vote_sum`: -- `zendesk_article_id`: -- `zendesk_section_id`: - -## Formatting - -Check docs/format.md \ No newline at end of file diff --git a/docs/assets/css/footer.css b/docs/assets/css/footer.css deleted file mode 100644 index 14dc199..0000000 --- a/docs/assets/css/footer.css +++ /dev/null @@ -1,31 +0,0 @@ -#new-footer { - font-family: Lato; - font-size: 12px; - font-weight: 400; - background-color: #101010; -} - -#partners { - height: auto; - background-color: #101010; -} - -#partners #logos img { - height: 40px; - margin: 10px; -} - -#partners #logos { - padding: 10px; - text-align: center; -} - -#partners #logos .nesi-footer-logo img { - margin-right: 100px; - height: 60px; -} - -#partners #logos img { - height: 40px; - margin: 10px; -} \ No newline at end of file diff --git a/docs/assets/stylesheets/theme.css b/docs/assets/stylesheets/theme.css new file mode 100644 index 0000000..bae2876 --- /dev/null +++ b/docs/assets/stylesheets/theme.css @@ -0,0 +1,132 @@ +:root{ + --nesi-grey : #414f5c; + --nesi-grey--light: #94a5ad; + --nesi-yellow :#fcce06; + --nesi-purple: rgb(202, 159, 213); + --nesi-orange : rgb(244, 121, 37); + --nesi-blue : #4fbaed; + --nesi-red:#ef315e; + --nesi-green: #cce310; + + [data-md-color-scheme="default"]{ + --md-primary-fg-color: var(--nesi-red); + + --md-accent-fg-color: var(--nesi-orange);; + --md-accent-fg-color--transparent: rgb(244, 121, 37, 0.25); + } + + /* --md-accent-bg-color: rgb(210,227,235); */ + [data-md-color-scheme="slate"] { + --md-primary-fg-color: var(--nesi-red); + --md-accent-fg-color: var(--nesi-orange);; + --md-accent-fg-color--transparent: rgb(244, 121, 37, 0.25); + + .nt-card-image>img { + filter: brightness(0) invert(1); + } + } +} +/* Logo biggification */ +.md-header__button.md-logo img, .md-header__button.md-logo svg { + height: 4rem; + margin: -2rem; +} + +/* Version table stuff */ +.md-tag.md-tag-ver{ + color: var(--md-code-fg-color); +} +.md-tag.md-tag-ver-shown { + outline: var(--md-primary-fg-color) 2px solid; +} + +.md-tag-ver-warn { + text-decoration: line-through; +} +.md-typeset__table { + width: 100%; +} +.md-typeset__table table:not([class]) { + display: table +} +/* convenience class. Not sure if it is used */ +.hidden{ + display: none; +} +/* Get support button */ +.md-button-support{ + position: absolute; + margin: -2rem 0 0 1rem; + width: 80%; + text-align: center; + font-size: 0.7rem; +} +/* Don't duplicate header title */ +.md-nav--primary > .md-nav__title { + display: none; +} +/* fix neotiri card colors */ + +/* Fix codeblock formatting change.*/ +.md-typeset .md-code__content { + display: inline; +} + + /* Make button more buttony */ +.md-button--primary { + box-shadow: grey 2px 2px 2px; +} + + +/* prerequisite custom admonition */ +:root { + --md-admonition-icon--prerequisite: url('data:image/svg+xml;charset=utf-8,') +} + +.md-typeset .admonition.prerequisite, +.md-typeset details.prerequisite { + border-color: rgb(170, 170, 60); +} +.md-typeset .prerequisite > .admonition-title, +.md-typeset .prerequisite > summary { + background-color: rgba(170, 170, 60, 0.1); +} +.md-typeset .prerequisite > .admonition-title::before, +.md-typeset .prerequisite > summary::before { + background-color: rgb(170, 170, 60); + -webkit-mask-image: var(--md-admonition-icon--prerequisite); + mask-image: var(--md-admonition-icon--prerequisite); +} +/* Footer */ +#new-footer { + font-family: Lato; + font-size: 12px; + font-weight: 400; + /* MB logo has black border */ + background-color: #101010; +} + +#partners { + height: auto; + background-color: #101010; +} + +#partners #logos img { + height: 40px; + margin: 10px; +} + +#partners #logos { + padding: 10px; + text-align: center; +} + +#partners #logos .nesi-footer-logo img { + margin-right: 100px; + height: 60px; +} + +#partners #logos img { + height: 40px; + margin: 10px; +} diff --git a/mkdocs.yml b/mkdocs.yml index f97e783..a77010c 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -39,7 +39,6 @@ markdown_extensions: - admonition - attr_list - footnotes - - toc: baselevel: 1 permalink: true @@ -52,8 +51,6 @@ markdown_extensions: - pymdownx.tabbed: alternate_style: true - pymdownx.snippets: null - - neoteroi.cards - - neoteroi.timeline extra: analytics: provider: google @@ -75,9 +72,6 @@ plugins: on_error_fail: true verbose: false extra_css: - - assets/stylesheets/neoteroi-mkdocs.css - - assets/stylesheets/footer.css - - assets/stylesheets/custom_admonations.css - assets/stylesheets/theme.css extra_javascript: - assets/javascripts/general.js From 3b89aa7eff4f6d9084e9bb9927a3a0af097815ce Mon Sep 17 00:00:00 2001 From: cal Date: Tue, 24 Sep 2024 10:13:18 +1200 Subject: [PATCH 05/31] add prose and spellcheck config --- .proselint.json | 6 ++++++ .spellcheck.yml | 45 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+) create mode 100644 .proselint.json create mode 100644 .spellcheck.yml diff --git a/.proselint.json b/.proselint.json new file mode 100644 index 0000000..9bbdcc5 --- /dev/null +++ b/.proselint.json @@ -0,0 +1,6 @@ +{ + "checks":{ + "hyperbole.misc": false, + "typography.exclamation": false, + "typography.symbols": false +}} \ No newline at end of file diff --git a/.spellcheck.yml b/.spellcheck.yml new file mode 100644 index 0000000..ae4b852 --- /dev/null +++ b/.spellcheck.yml @@ -0,0 +1,45 @@ +matrix: +- name: Markdown + aspell: + lang: en + dictionary: + wordlists: + - docs/assets/glossary/dictionary.txt + encoding: utf-8 + pipeline: + - pyspelling.filters.url: + - pyspelling.filters.context: + context_visible_first: true + escapes: '\\[\\`~]' + delimiters: + # Ignore multiline content between fences + # ```md + # content + # ``` + - open: '(?s)^(?P *`{3}\s?[a-zA-Z]*)$' + close: '^(`{3})$' + # Ignore the content in meta + # --- + # content + # --- + - open: '(?s)^(?P *-{3})$' + close: '^(?P=open)$' + # Ignore content between inline back ticks + # `content` + - open: '(?P`+)' + close: '(?P=open)' + - pyspelling.filters.markdown: + markdown_extensions: + - markdown.extensions.toc + - markdown.extensions.admonition + - markdown.extensions.attr_list + - markdown.extensions.abbr + - markdown.extensions.tables + - pyspelling.filters.html: + comments: false + ignores: + - code + - pre + sources: + - 'docs/**/*.md' + default_encoding: utf-8 \ No newline at end of file From 4cc6884c525e76a73fee762a1982720665226150 Mon Sep 17 00:00:00 2001 From: cal Date: Wed, 25 Sep 2024 16:02:00 +1200 Subject: [PATCH 06/31] Some improvements to problem matcher, added test fail file. --- .vscode/launch.json | 25 ++++++ .vscode/tasks.json | 3 +- checks/run_meta_check.py | 3 +- checks/run_proselint.py | 4 +- checks/run_test_build.py | 13 +++- docs/fail_checks.md | 163 +++++++++++++++++++++++++++++++++++++++ docs/index.md | 159 ++++++++++++++++++++++++++++++++++++++ redirect_map.yml | 2 +- 8 files changed, 367 insertions(+), 5 deletions(-) create mode 100644 .vscode/launch.json create mode 100644 docs/fail_checks.md diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..7dad36a --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,25 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Debug Proselint", + "type": "debugpy", + "request": "launch", + "program": "checks/run_proselint.py", + "args": ["docs/index.md"], + "console": "integratedTerminal", + "justMyCode": false + }, + { + "name": "Debug Testbuild", + "type": "debugpy", + "request": "launch", + "program": "checks/run_test_build.py", + "console": "integratedTerminal", + "justMyCode": false + } + ] +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 47a4a27..ee48d21 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -48,6 +48,7 @@ "column": 4, "endLine": 5, "message": 7, + "loop": true }, "owner": "proselint", "fileLocation": "autoDetect", @@ -94,7 +95,7 @@ "column": 4, "endColumn": 5, "line": 6, - "message": 7 + "message": 7, }, "owner": "test-build", "fileLocation": [ diff --git a/checks/run_meta_check.py b/checks/run_meta_check.py index 0879620..fe57ec0 100755 --- a/checks/run_meta_check.py +++ b/checks/run_meta_check.py @@ -100,6 +100,8 @@ def _run_check(f): for r in f(): print(f"::{r.get('level', 'warning')} file={input_path},title={f.__name__},col={r.get('col', 0)},endColumn={r.get('endColumn', 99)},line={r.get('line', 1)}::{r.get('message', 'something wrong')}") sys.stdout.flush() + time.sleep(0.01) + def _title_from_filename(): @@ -268,4 +270,3 @@ def _count_children(d): # FIXME terrible hack to make VSCode in codespace capture the error messages # see https://github.com/microsoft/vscode/issues/92868 as a tentative explanation - time.sleep(5) diff --git a/checks/run_proselint.py b/checks/run_proselint.py index 014f4ca..787220e 100755 --- a/checks/run_proselint.py +++ b/checks/run_proselint.py @@ -6,6 +6,7 @@ import sys from pathlib import Path +import time import proselint from proselint import config, tools @@ -22,10 +23,11 @@ for file in files: print(f"Running proselint on {file}") content = Path(file).read_text(encoding="utf8") + fails = proselint.tools.lint(content, config=config_custom) for notice in proselint.tools.lint(content, config=config_custom): print( f"::{notice[7]} file={file},line={notice[2]+1}," f"col={notice[3]+2},endColumn={notice[2]+notice[6]+1}," f"title={notice[0]}::'{notice[1]}'", - flush=True, ) + time.sleep(0.01) diff --git a/checks/run_test_build.py b/checks/run_test_build.py index 1e60509..0d962f0 100755 --- a/checks/run_test_build.py +++ b/checks/run_test_build.py @@ -6,11 +6,14 @@ import logging import sys import re +import time + """ -This doesnt work and I have no idea why. +This works but is a bit messy """ + def parse_macro(record): # These are not useful messages @@ -29,6 +32,13 @@ def parse_macro(record): record.name = g["title"] record.filename = g["file"] record.msg = g["message"] + + # Does not give correct path to file in question in 'title'. + # Infer from message. + m = re.search(r"'(.*?\.md)'", record.msg) + if m: + record.filename = m.group(1) + return True @@ -42,3 +52,4 @@ def parse_macro(record): log.addHandler(sh) config = load_config(config_file_path="./mkdocs.yml") build.build(config) + time.sleep(5) diff --git a/docs/fail_checks.md b/docs/fail_checks.md new file mode 100644 index 0000000..e2140b7 --- /dev/null +++ b/docs/fail_checks.md @@ -0,0 +1,163 @@ +--- +template: main.html +hide: toc +title: Fail Checks +invalid-entry: something +--- + +# Fail Checks + +Designed to trigger as many fails as possible. + +## Spelling Checks + +speling checkces arre gooder + +## Link checks + +### Missing image + +![fake image](../docs/assets/fake_image.png) + +### Missing link + +[fake link](../docs/fake_page.md) + +### Missing Anchor + +[missing anchor](#missinganchor) (Correct would be `#missing-anchor`) + +typos in cobedlocks or links should be allowed. + +```md +Thats not how you spell cobedlocks at all. +``` + +[tpyos should be picked up here](www.butnothere.com) + +### Naked links + +www.nesi.org.nz + + +## Structure + +#### Skipping a level + +## Only child + + +## Prose checks + +The 1950's were a swell time. +The 50's were a swell time. +Things happened from 1980-1999 and from 240-398 A.D. +March, 2013 was notable in that +In February of 2010, the mayor considered +It's 5 pm somewhere. +It's 12 a.m., time to eat lunch. +It's 11 p.m. at night. +This is a sentence. Two spaces after a period. +centre centre center +$1000 USD +I hit him over the head with a 2 x 4. +A bunch of antelopes walked by the road. +A group of emus attacked me. +She swam by a bunch of oysters. +They hae slain the Earl o' Moray and Lady Mondegreen. +A girl with colitis goes by. +The building is deceptively large. +The project would decimate the fragile wetland wilderness. +Hopefully, one day we will all grow older. +and so I said PLEASE STOP YELLING +so excited! +so excited!! +so excited!!! +so excited!!!! +really?? +and so I said PLEASE STOP YELLING +and so I said PLEASE STOP YELLING okay? +THESE ARE SMALL CAPS at the beginning of a new line. +abbreviatable to NASA +academicly superior. +transhipped +an aider and abbeter +it's adducable +let's look for an acquiror +i wonder what tort-feasor means +Get that off of me before I catch on fire! +There are many a person I have met and worked with who simply deride themselves into taking some action +In the meanwhile, he looked loving at the sheep. +Suddenly, I see. +Get ready: button your seatbelts. +The cream rises to the crop. +The biggest bottleneck is that... +he is very smart +approximately about 5 atm machines +atm machine +we associate together +it's blatantly obvious that this obviously works. +a very unique idea + +a more perfect union +the surrounding circumstances +he filled a much-needed gap +To coin a phrase from the movie, +Suddenly, the car came to a stop. +All hell broke loose on the farm. + + +under the weather +He's a chip off the old block +a quantum leap +Our cutting edge decision-making process will make your life crystal clear. +He's a thought leader. +John's cc#: +378282246310005 +the password is tnoernturpn +my password is long and 'long' +my password is amazing +inst. +please be advised that +boughten +this obviously works +brb +rofl +We'll need to reconceptualize this sentence. +enplanement +We'll be taking off momentarily. +Save up to 50% or More! +between you and i +I did it on accident +I feel nauseous +It was a highly-anticipated event. +The English speaking people speak English. +A big ticket item. +A right wing militia. +highfaluting +the statement was inadmissable in court +Nikita Khruschev +I feel innundated with email +Nicknack +He's a shoe-in +Brett Farve and Dwayne Wade are good friends. +The Chronicals of Narnia +I did not pay for the check. Honestly, attention to detail is useful. +I did not pay attention to detail. +I did not pay any attention to detail. +The Manchesterian was a good Brit. +One from Michigan is not a Michiganite but a Michigander. +One from Colorado is not a Coloradoan but a Coloradan. +The lady lawyer handled my case. +John and Mary married. Now they are man and wife. +Chairman Mao was the chairman of the communist party. +Smith, et. al (2006) said +John said that I am "very unique." +John knows that I am very unique. +John knows every unique snowflake is cold. +The preceeding discussion +The 'take-home message' is that +more research is needed +The rest of this article argues that, to a certain degree +in recent years, an increasing number of psychologists have +mutatis mutandis diff --git a/docs/index.md b/docs/index.md index f40b93e..1ce293a 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,8 +1,167 @@ --- template: main.html hide: toc +title: Home Page +invalid-entry: something --- # Home Page + Replace me + +Designed to trigger as many fails as possible. + +## Spelling Checks + +speling checkces arre gooder + +## Link checks + + + +### Missing image + +![fake image](../docs/assets/fake_image.png) + +### Missing link + + +[fake link](../docs/fake_page.md) + +### Missing Anchor + +[missing anchor](#missinganchor) (Correct would be `#missing-anchor`) + +### Naked links: + +www.nesi.org.nz + +## Prose checks + +The 1950's were a swell time. +The 50's were a swell time. +Things happened from 1980-1999 and from 240-398 A.D. +March, 2013 was notable in that +In February of 2010, the mayor considered +It's 5 pm somewhere. +It's 12 a.m., time to eat lunch. + + + +It's 11 p.m. at night. +This is a sentence! One space after a period. +This is a sentence. Two spaces after a period. +This is a sentence? Two spaces after a period. +This is a sentence. One space after a period. +This is a sentence. One space after a period. +This is a sentence. One space after a period. +This is a sentence. One space after a period. +centre centre center +organize, organized, organizing, organise +recognize, recognise, recognise, recognise +$1000 USD +I hit him over the head with a 2 x 4. +A bunch of antelopes walked by the road. +A group of emus attacked me. +She swam by a bunch of oysters. +They hae slain the Earl o' Moray and Lady Mondegreen. +A girl with colitis goes by. +The building is deceptively large. +The project would decimate the fragile wetland wilderness. +Hopefully, one day we will all grow older. +and so I said PLEASE STOP YELLING +so excited! +so excited!! +so excited!!! +so excited!!!! +really?? +and so I said PLEASE STOP YELLING +and so I said PLEASE STOP YELLING okay? +THESE ARE SMALL CAPS at the beginning of a new line. +abbreviatable to NASA +academicly superior. +transhipped +an aider and abbeter +it's adducable +let's look for an acquiror +i wonder what tort-feasor means +Get that off of me before I catch on fire! +There are many a person I have met and worked with who simply deride themselves into taking some action +In the meanwhile, he looked loving at the sheep. +Suddenly, I see. +Get ready: button your seatbelts. +The cream rises to the crop. +The biggest bottleneck is that... +he is very smart +approximately about 5 atm machines +atm machine +we associate together +it's blatantly obvious that this obviously works. +a very unique idea + +a more perfect union +the surrounding circumstances +he filled a much-needed gap +To coin a phrase from the movie, +Suddenly, the car came to a stop. +All hell broke loose on the farm. + + +under the weather +He's a chip off the old block +a quantum leap +Our cutting edge decision-making process will make your life crystal clear. +He's a thought leader. +John's cc#: +378282246310005 +the password is tnoernturpn +my password is long and 'long' +my password is amazing +inst. +please be advised that +boughten +this obviously works +brb +rofl +We'll need to reconceptualize this sentence. +enplanement +We'll be taking off momentarily. +Save up to 50% or More! +between you and i +I did it on accident +I feel nauseous +It was a highly-anticipated event. +The English speaking people speak English. +A big ticket item. +A right wing militia. +highfaluting +the statement was inadmissable in court +Nikita Khruschev +I feel innundated with email +Nicknack +He's a shoe-in +Brett Farve and Dwayne Wade are good friends. +The Chronicals of Narnia +I did not pay for the check. Honestly, attention to detail is useful. +I did not pay attention to detail. +I did not pay any attention to detail. +(c) 2015 +(R) The Corporation +Use ellipsis not three dots... +The Manchesterian was a good Brit. +One from Michigan is not a Michiganite but a Michigander. +One from Colorado is not a Coloradoan but a Coloradan. +The lady lawyer handled my case. +John and Mary married. Now they are man and wife. +Chairman Mao was the chairman of the communist party. +Smith, et. al (2006) said +John said that I am "very unique." +John knows that I am very unique. +John knows every unique snowflake is cold. +The preceeding discussion +The 'take-home message' is that +more research is needed +The rest of this article argues that, to a certain degree +in recent years, an increasing number of psychologists have +mutatis mutandis diff --git a/redirect_map.yml b/redirect_map.yml index 3f1d707..2a26346 100644 --- a/redirect_map.yml +++ b/redirect_map.yml @@ -1 +1 @@ -# oldpage.md: newpage.md +oldpage.md: newpage.md From 328a2a58ab6d9ccbbe8837cd0438e6d625ee1eb0 Mon Sep 17 00:00:00 2001 From: cal Date: Wed, 25 Sep 2024 16:13:54 +1200 Subject: [PATCH 07/31] added extra catch to meta check --- .vscode/launch.json | 11 ++++++++++- checks/run_meta_check.py | 39 +++++++++++++++++++++------------------ 2 files changed, 31 insertions(+), 19 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 7dad36a..7a0f6fb 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -9,7 +9,16 @@ "type": "debugpy", "request": "launch", "program": "checks/run_proselint.py", - "args": ["docs/index.md"], + "args": ["docs/fail_checks.md"], + "console": "integratedTerminal", + "justMyCode": false + }, + { + "name": "Debug Meta Check", + "type": "debugpy", + "request": "launch", + "program": "checks/run_meta_check.py", + "args": ["docs/fail_checks.md"], "console": "integratedTerminal", "justMyCode": false }, diff --git a/checks/run_meta_check.py b/checks/run_meta_check.py index fe57ec0..b532567 100755 --- a/checks/run_meta_check.py +++ b/checks/run_meta_check.py @@ -62,7 +62,7 @@ def main(): _nav_check() with open(input_path, "r") as f: print(f"Checking meta for {f.name}") - try: + if 1: contents = f.read() match = re.match(r"---\n([\s\S]*?)---", contents, re.MULTILINE) if not match: @@ -93,8 +93,8 @@ def main(): _run_check(check) for check in ENDCHECKS: _run_check(check) - except Exception as e: - print(f"::error file={input_path},title=misc,col=0,endColumn=0,line=1 ::{e}") + # except Exception as e: + # print(f"::error file={input_path},title=misc,col=0,endColumn=0,line=1 ::{e}") def _run_check(f): for r in f(): @@ -138,26 +138,29 @@ def _unpack(toc, a): return toc[a[0]] return _unpack(toc[a[0]]["children"], a[1:]) - if in_code_block: - return + try: + if in_code_block: + return - header_match = re.match(r"^(#+)\s*(.*)$", line) + header_match = re.match(r"^(#+)\s*(.*)$", line) - if not header_match: - return - - header_level = len(header_match.group(1)) - header_name = header_match.group(2) + if not header_match: + return + + header_level = len(header_match.group(1)) + header_name = header_match.group(2) - if header_level == 1: - toc = {header_name: {"lineno": lineno, "children": {}}} - toc_parents = [header_name] + if header_level == 1: + toc = {header_name: {"lineno": lineno, "children": {}}} + toc_parents = [header_name] - while header_level < len(toc_parents)+1: - toc_parents.pop(-1) + while header_level < len(toc_parents)+1: + toc_parents.pop(-1) - _unpack(toc, toc_parents)["children"][header_name] = {"level": header_level, "lineno": lineno, "children": {}} - toc_parents += [header_name] + _unpack(toc, toc_parents)["children"][header_name] = {"level": header_level, "lineno": lineno, "children": {}} + toc_parents += [header_name] + except Exception: + print(f"::error file={input_path},title=misc-nav,col=0,endColumn=0,line=1 ::Failed to parse Nav tree. Something is wrong.") def _nav_check(): From 5a0046a6e41f02fe79414afecf644aed2145c25f Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 27 Sep 2024 12:10:58 +1200 Subject: [PATCH 08/31] Remove fetch includes as that only really needed in support docs. --- .github/workflows/README.md | 14 ------- .github/workflows/fetch_includes.yml | 55 ---------------------------- 2 files changed, 69 deletions(-) delete mode 100644 .github/workflows/fetch_includes.yml diff --git a/.github/workflows/README.md b/.github/workflows/README.md index 65b4b4e..1f53b57 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -2,20 +2,6 @@ Description of current CI workflow. -## [fetch_includes.yml](fetch_includes.yml) - -Retrieves dynamically generated content from external sources. - -Currently retrieves: -- Software module list from [modules-list](https://github.com/nesi/modules-list). -- Glossary, spellcheck dictionary and snippets from [nesi-wordlist](https://github.com/nesi/nesi-wordlist) - -It then runs [link_apps_pages.py](#link_apps_pagespy). - -All modified files are added to a new branch called `new-assets` and merged into main. - -In theory, all this could be done at deployment, but I wanted to make sure that changes to these remote files didn't break anything. - ## [checks.yml](checks.yml) A series of QA checks run on the documentation. diff --git a/.github/workflows/fetch_includes.yml b/.github/workflows/fetch_includes.yml deleted file mode 100644 index f599300..0000000 --- a/.github/workflows/fetch_includes.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Fetch Remote Assets -on: - workflow_dispatch: - schedule: - - cron: '00 12 * * *' # Should run 1 hour after module list update. -env: - MODULES_LIST_URL: "https://raw.githubusercontent.com/nesi/modules-list/main/module-list.json" - GLOSSARY_URL: "https://raw.githubusercontent.com/nesi/nesi-wordlist/main/outputs/glossary.md" - DICTIONARY_URL: "https://raw.githubusercontent.com/nesi/nesi-wordlist/main/outputs/dictionary.txt" - SNIPPETS_URL: "https://raw.githubusercontent.com/nesi/nesi-wordlist/main/outputs/snippets.md" - BRANCH_NAME: "assets-update" -jobs: - fetch-includes: - runs-on: ubuntu-latest - steps: - - name: "Set Up Env" - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - run: | - git config --global user.name "${GITHUB_ACTOR}" - git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com" - git config pull.rebase true - # git checkout ${GITHUB_REF} - # git pull origin ${GITHUB_REF} - git branch -D ${BRANCH_NAME} || true - git checkout -b ${BRANCH_NAME} - mkdir -pv docs/assets/glossary - - name: Module List - run: | - wget -O docs/assets/module-list.json ${MODULES_LIST_URL} - python3 .github/workflows/link_apps_pages.py - git add --all - git commit -m "Updated Module List" || (echo "No Changes";exit 0) - - name: Fetch Wordlist - run: | - wget -O overrides/partials/glossary.html ${GLOSSARY_URL} - wget -O docs/assets/glossary/dictionary.txt ${DICTIONARY_URL} - wget -O docs/assets/glossary/snippets.md ${SNIPPETS_URL} - git add --all - git commit -m "Updated Glossary" || (echo "No Changes";exit 0) - - name: Commit to Branch - id: commit - run: | - body_message="This Merge Request was triggered by an update to $(git diff --name-only ${GITHUB_REF} | tr '\n' ', ')." - if [[ $(git log ${GITHUB_REF}..HEAD) ]]; then - git pull origin ${BRANCH_NAME} - git push origin ${BRANCH_NAME} - gh pr create -B ${GITHUB_REF} -H ${BRANCH_NAME} --title "Automatic Asset Update - $(date +'%Y-%m-%d')" --body "${body_message}" -l auto_merge - gh workflow run -R nesi/support-docs -r assets-update -f fileList=" " checks.yml - else - echo "no changes" - fi - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 59202d2937ace052e82d5939ebc67be7c8cfba52 Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 27 Sep 2024 12:44:36 +1200 Subject: [PATCH 09/31] Updated some stuff around dictionary updating --- .gitignore | 1 + checks/README.md | 6 + docs/assets/glossary/Glossary.md | 14 - docs/assets/glossary/README.md | 1 - docs/assets/glossary/dictionary.md | 57 - docs/assets/glossary/dictionary.txt | 2274 ++++++++++++++++++++- docs/assets/glossary/dictionary.yaml | 41 - docs/assets/glossary/snippets.md | 7 - docs/assets/glossary/update_dictionary.sh | 3 + docs/fail_checks.md | 2 +- 10 files changed, 2241 insertions(+), 165 deletions(-) delete mode 100644 docs/assets/glossary/Glossary.md delete mode 100644 docs/assets/glossary/README.md delete mode 100644 docs/assets/glossary/dictionary.md delete mode 100644 docs/assets/glossary/dictionary.yaml delete mode 100644 docs/assets/glossary/snippets.md create mode 100644 docs/assets/glossary/update_dictionary.sh diff --git a/.gitignore b/.gitignore index 5e69864..c19f274 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ public/* production/* **.pyc .venv/* +dictionary.dic diff --git a/checks/README.md b/checks/README.md index 29daf11..40408b5 100644 --- a/checks/README.md +++ b/checks/README.md @@ -16,6 +16,12 @@ Spellcheck pipeline settings can be modified in [.spellcheck.yml](../.spellcheck List of custom words can be found in [dictionary.txt](../docs/assets/glossary/dictionary.txt), however you **should not edit this manually**, see [adding-words-to-dictionary](../docs/CONTRIBUTING.md#adding-words-to-dictionary). +This list is automatically updated daily in the case of docs.nesi, but not the other sites. +You should occasionally run +`wget -O docs/assets/glossary/dictionary.txt https://raw.githubusercontent.com/nesi/nesi-wordlist/main/outputs/dictionary.txt` +to keep it up to date. + + ### Limitations Spellchecker does not provide output lineumber / column. diff --git a/docs/assets/glossary/Glossary.md b/docs/assets/glossary/Glossary.md deleted file mode 100644 index 6b5219d..0000000 --- a/docs/assets/glossary/Glossary.md +++ /dev/null @@ -1,14 +0,0 @@ -### NeSI's: - New Zealand national high performance computing platform. -### NeSI: - New Zealand national high performance computing platform. -### HPCs: - Like a regular computer, but larger. Primarily used for heating data centers. -### HPC: - Like a regular computer, but larger. Primarily used for heating data centers. -### supercomputers: - Like a regular computer, but larger. Primarily used for heating data centers. -### supercomputer: - Like a regular computer, but larger. Primarily used for heating data centers. -### MPI: - A standardised message-passing standard designed to function on parallel computing architectures. diff --git a/docs/assets/glossary/README.md b/docs/assets/glossary/README.md deleted file mode 100644 index 7bcd952..0000000 --- a/docs/assets/glossary/README.md +++ /dev/null @@ -1 +0,0 @@ -Now fetching from https://github.com/nesi/nesi-wordlist diff --git a/docs/assets/glossary/dictionary.md b/docs/assets/glossary/dictionary.md deleted file mode 100644 index 36f8050..0000000 --- a/docs/assets/glossary/dictionary.md +++ /dev/null @@ -1,57 +0,0 @@ -lockdown -onboarding -rollout -deprioritised -NeSI's -NeSI -NZ -HTTPS -CUDA -HPCs -HPC -supercomputers -supercomputer -GPUs -GPU -MPI -srun -Nano -Māui -Mahuika -ngā -nga -mihi -kia -ora -eScience -nearline -nobackup -precompiled -namespaces -namespace -runtime -sudo -walltimes -walltime -libio -inodes -inode -parallelisation -Globus's -Globus -Jupyter's -Jupyter -Slurm's -Slurm -Glib -Skylake -Lmod's -Lmod -JupyterHub's -JupyterHub -GitHub's -GitHub -OpenMP's -OpenMP -OMP's -OMP diff --git a/docs/assets/glossary/dictionary.txt b/docs/assets/glossary/dictionary.txt index 36f8050..a4f19d9 100644 --- a/docs/assets/glossary/dictionary.txt +++ b/docs/assets/glossary/dictionary.txt @@ -1,57 +1,2243 @@ -lockdown -onboarding -rollout -deprioritised -NeSI's -NeSI -NZ -HTTPS +ABAQUS's +ABAQUS +ABRicate's +ABRicate +ABySS's +ABySS +ACLs +ACL +ACTC's +ACTC +ADAM's +ADAM +AGAT's +AGAT +AGDR +AGE's +AGE +AMOS's +AMOS +AMRFinderPlus's +AMRFinderPlus +ANIcalculator's +ANIcalculator +ANNOVAR's +ANNOVAR +ANSYS's +ANSYS +ANTLR's +ANTLR +ANTs's +ANTs +AOCC's +AOCC +AOCL +AOCL-BLIS's +AOCL-BLIS +AOCL-FFTW's +AOCL-FFTW +AOCL-ScaLAPACK's +AOCL-ScaLAPACK +APR's +APR +APR-util's +APR-util +ARCSI's +ARCSI +ARIBA's +ARIBA +ASAGI's +ASAGI +ATK's +ATK +ATL +AUGUSTUS's +AUGUSTUS +Abseil's +Abseil +AdapterRemoval's +AdapterRemoval +AdaptiveCpp's +AdaptiveCpp +Advisor's +Advisor +AgResearch +Allinea +AlphaFold's +AlphaFold +AlphaFold2DB's +AlphaFold2DB +AlwaysIntelMKL's +AlwaysIntelMKL +Amber's +Amber +Amdahl +Anaconda3's +Anaconda3 +Anaconda3-Geo's +Anaconda3-Geo +Anaconda3-Geo2's +Anaconda3-Geo2 +Anaconda3_Geo2's +Anaconda3_Geo2 +Aotearoa +Apache +Apptainer's +Apptainer +Armadillo's +Armadillo +Arrow's +Arrow +Aspera +Aspera-CLI's +Aspera-CLI +Augustus's +Augustus +AutoDock +AutoDock-GPU's +AutoDock-GPU +AutoDock_Vina's +AutoDock_Vina +Autoconf-archive's +Autoconf-archive +BBMap's +BBMap +BCFtools's +BCFtools +BCL-Convert's +BCL-Convert +BEAST's +BEAST +BEDOPS's +BEDOPS +BEDTools's +BEDTools +BEEF's +BEEF +BGC-Bayesian-genomic-clines's +BGC-Bayesian-genomic-clines +BLAS +BLASR's +BLASR +BLAST's +BLAST +BLASTDB's +BLASTDB +BLAT's +BLAT +BLIS's +BLIS +BOLT-LMM's +BOLT-LMM +BRAKER's +BRAKER +BUSCO's +BUSCO +BWA's +BWA +BamTools's +BamTools +Bandage's +Bandage +Basilisk's +Basilisk +BayPass's +BayPass +BayeScan's +BayeScan +BayesAss's +BayesAss +Bazel's +Bazel +Beagle's +Beagle +BerkeleyGW's +BerkeleyGW +Berkeley_UPC's +Berkeley_UPC +BiG-SCAPE's +BiG-SCAPE +Bifrost's +Bifrost +Bio-DB-BigFile's +Bio-DB-BigFile +Bio-DB-HTS's +Bio-DB-HTS +BioConductor's +BioConductor +BioPP's +BioPP +Bioconductor +Bismark's +Bismark +Bison's +Bison +BlenderPy's +BlenderPy +Boost's +Boost +Bourne +Bowtie's +Bowtie +Bowtie2's +Bowtie2 +Bpipe's +Bpipe +Bracken's +Bracken +BreakDancer's +BreakDancer +BreakSeq2's +BreakSeq2 +Broadwell +Bruijn +CAE +CCL's +CCL +CD-HIT's +CD-HIT +CDO's +CDO +CESM +CFD +CFITSIO's +CFITSIO +CGAL's +CGAL +CLI +CMIP +CMake's +CMake +CNVnator's +CNVnator +CNVpytor's +CNVpytor +COMSOL's +COMSOL +CONCOCT's +CONCOCT +CP2K's +CP2K +CPE +CPMD's +CPMD +CPUs's +CPUs +CPU's +CPU +CRAMINO's +CRAMINO +CRI +CRLF +CTPL's +CTPL +CUDA's CUDA -HPCs -HPC -supercomputers -supercomputer +CUnit's +CUnit +CWL +Cannoli's +Cannoli +Canu's +Canu +CapnProto's +CapnProto +Catch2's +Catch2 +CellRanger's +CellRanger +CentOS +Centos +Centrifuge's +Centrifuge +Cereal's +Cereal +CheckM's +CheckM +CheckM2's +CheckM2 +CheckV's +CheckV +Circlator's +Circlator +Circos's +Circos +Clair3's +Clair3 +Clang's +Clang +Clustal-Omega's +Clustal-Omega +ClustalW2's +ClustalW2 +Conda +Corset's +Corset +CoverM's +CoverM +CppUnit's +CppUnit +CubeGUI's +CubeGUI +CubeLib's +CubeLib +CubeWriter's +CubeWriter +Cufflinks's +Cufflinks +Cylc +Cytoscape's +Cytoscape +D-Genies's +D-Genies +DAS_Tool's +DAS_Tool +DB's +DB +DBG2OLC's +DBG2OLC +DBus's +DBus +DCV +DFT-D4's +DFT-D4 +DIAMOND's +DIAMOND +DISCOVARdenovo's +DISCOVARdenovo +DMTCP's +DMTCP +DOI +DRAM's +DRAM +DTNs's +DTNs +DTN's +DTN +DVS +DaliLite's +DaliLite +Dask +DeconSeq's +DeconSeq +DeePMD-kit's +DeePMD-kit +DeepLabCut's +DeepLabCut +Delft3D's +Delft3D +Delft3D_FM's +Delft3D_FM +Delly's +Delly +DendroPy's +DendroPy +Dorado's +Dorado +Doxygen's +Doxygen +Dsuite's +Dsuite +EDTA's +EDTA +EIGENSOFT's +EIGENSOFT +ELPA's +ELPA +EMAN's +EMAN +EMAN2's +EMAN2 +EMBOSS's +EMBOSS +EMIRGE's +EMIRGE +ENMTML's +ENMTML +EOL +ESMF's +ESMF +ETE's +ETE +EasyBuild's +EasyBuild +Easybuild +Eigen's +Eigen +Elmer's +Elmer +Embree's +Embree +EnergyPlus's +EnergyPlus +ErlangOTP's +ErlangOTP +EukRep-EukCC's +EukRep-EukCC +ExaBayes's +ExaBayes +ExaML's +ExaML +ExpansionHunter's +ExpansionHunter +Extrae's +Extrae +FALCON's +FALCON +FASTA +FASTQ +FASTX-Toolkit's +FASTX-Toolkit +FCM's +FCM +FDS's +FDS +FEA +FENSAP +FFTW's +FFTW +FFTW.MPI's +FFTW.MPI +FFmpeg's +FFmpeg +FIGARO's +FIGARO +FLTK's +FLTK +FPM's +FPM +FTE +FTGL's +FTGL +FastANI's +FastANI +FastME's +FastME +FastQC's +FastQC +FastQ_Screen's +FastQ_Screen +FastTree's +FastTree +File-Rename's +File-Rename +Filtlong's +Filtlong +FimTyper's +FimTyper +FlexiBLAS's +FlexiBLAS +Flye's +Flye +FoX's +FoX +Fortran +FragGeneScan's +FragGeneScan +FreeBayes's +FreeBayes +FreeSurfer's +FreeSurfer +FreeXL's +FreeXL +FriBidi's +FriBidi +GATK's +GATK +GCC's +GCC +GCCcore's +GCCcore +GD's +GD +GDAL's +GDAL +GDB +GDC +GEMMA's +GEMMA +GEOS's +GEOS +GFortran +GLM's +GLM +GLPK's +GLPK +GLib's +GLib +GLibmm's +GLibmm +GLnexus's +GLnexus +GMAP-GSNAP's +GMAP-GSNAP +GMP's +GMP +GNU +GOLD's +GOLD +GObject-Introspection's +GObject-Introspection +GPAW's +GPAW +GPFS's +GPFS +GPG +GPGPUs +GPGPU GPUs GPU +GRASS's +GRASS +GRIDSS's +GRIDSS +GROMACS's +GROMACS +GSL's +GSL +GST-plugins-base's +GST-plugins-base +GStreamer's +GStreamer +GTDB-Tk's +GTDB-Tk +GTK +GTK+'s +GTK+ +GTS's +GTS +GUIs +GUI +GUSHR's +GUSHR +Gaussian's +Gaussian +Gbps +Gdk-Pixbuf's +Gdk-Pixbuf +GeneMark +GeneMark-ES's +GeneMark-ES +GenoVi's +GenoVi +GenomeThreader's +GenomeThreader +Gerris's +Gerris +GetOrganelle's +GetOrganelle +GitHub +GitLab +Glib +GlimmerHMM's +GlimmerHMM +Globus +GlobusID +Go's +Go +Graphviz's +Graphviz +GridRunner +Gubbins's +Gubbins +Guile's +Guile +HCS +HDF's +HDF +HDF5's +HDF5 +HGX +HISAT2's +HISAT2 +HMMER's +HMMER +HMMER2's +HMMER2 +HOPS's +HOPS +HPCs +HPC +HTSeq's +HTSeq +HTSlib's +HTSlib +HTTPS +HarfBuzz's +HarfBuzz +HpcGridRunner's +HpcGridRunner +Humann's +Humann +HybPiper's +HybPiper +Hypre's +Hypre +IBM +ICU's +ICU +IDBA's +IDBA +IDBA-UD's +IDBA-UD +IGV's +IGV +IMPUTE's +IMPUTE +IOBUF +IQ-TREE's +IQ-TREE +IQmol's +IQmol +IRkernel's +IRkernel +ISA-L's +ISA-L +IVE's +IVE +Illumina +ImagMagick +ImageMagick's +ImageMagick +Infernal's +Infernal +InfiniBand +Inspector's +Inspector +InterProScan's +InterProScan +JAGS's +JAGS +JSON +JUnit's +JUnit +JasPer's +JasPer +Java's +Java +Jellyfish's +Jellyfish +Jinja +JsonCpp's +JsonCpp +Julia's +Julia +Jupyter +JupyterHub's +JupyterHub +JupyterLab's +JupyterLab +KAT's +KAT +KEALib's +KEALib +KMC's +KMC +KNP +Kaiju's +Kaiju +Kent_tools's +Kent_tools +Keras +Keycloak +Kibana +KmerGenie's +KmerGenie +KorfSNAP's +KorfSNAP +Kraken +Kraken2's +Kraken2 +KronaTools's +KronaTools +KyotoCabinet's +KyotoCabinet +LAME's +LAME +LAMMPS's +LAMMPS +LAPACK +LAST's +LAST +LASTZ's +LASTZ +LDC's +LDC +LEfSe's +LEfSe +LF +LINKS's +LINKS +LLVM's +LLVM +LMDB's +LMDB +LSD2's +LSD2 +LTR_retriever's +LTR_retriever +LTS +LUMPY's +LUMPY +LZO's +LZO +Landcare +LegacySystemLibs's +LegacySystemLibs +LibTIFF's +LibTIFF +Libav's +Libav +Libint's +Libint +Liftoff's +Liftoff +LittleCMS's +LittleCMS +Lmod +Loki's +Loki +LongStitch's +LongStitch +M4's +M4 +MAFFT's +MAFFT +MAGMA's +MAGMA +MAKER's +MAKER +MATIO's +MATIO +MATLAB's +MATLAB +MBIE's +MBIE +MCL's +MCL +MCR's +MCR +MD5 +MEGAHIT's +MEGAHIT +METABOLIC's +METABOLIC +METIS's +METIS +MKL +MMseqs2's +MMseqs2 +MOB-suite's +MOB-suite +MODFLOW's +MODFLOW +MPFR's +MPFR MPI -srun -Nano -Māui +MPICH's +MPICH +MSMC's +MSMC +MUMPS's +MUMPS +MUMmer's +MUMmer +MUSCLE's +MUSCLE +MUST's +MUST +MWLR +MaSuRCA's +MaSuRCA +MacOS +Magma's +Magma +Mahuika's Mahuika -ngā -nga -mihi -kia -ora +Mamba's +Mamba +Manaaki +MarkerMiner's +MarkerMiner +Marsden +Mash's +Mash +MashMap's +MashMap +Maven's +Maven +MaxBin's +MaxBin +Meraculous's +Meraculous +Merqury's +Merqury +Mesa's +Mesa +Meson's +Meson +MetaBAT's +MetaBAT +MetaEuk's +MetaEuk +MetaGeneAnnotator's +MetaGeneAnnotator +MetaPhlAn's +MetaPhlAn +MetaPhlAn2's +MetaPhlAn2 +MetaSV's +MetaSV +MetaVelvet's +MetaVelvet +Metashape's +Metashape +Metaxa2's +Metaxa2 +MiMiC's +MiMiC +MiMiC-CommLib's +MiMiC-CommLib +Milan +Miniconda +Miniconda3's +Miniconda3 +Miniforge3's +Miniforge3 +Minimac3's +Minimac3 +Minimac4's +Minimac4 +MitoZ's +MitoZ +MobaXterm +ModDotPlot's +ModDotPlot +ModelTest-NG's +ModelTest-NG +Molcas's +Molcas +Molpro's +Molpro +Mono's +Mono +Monocle3's +Monocle3 +Mothur's +Mothur +MotionCorr's +MotionCorr +MrBayes's +MrBayes +MultiQC's +MultiQC +Māui's +Māui +NAMD's +NAMD +NASM's +NASM +NCARG's +NCARG +NCCL's +NCCL +NCL's +NCL +NCO's +NCO +NECAT's +NECAT +NGS's +NGS +NIWA's +NIWA +NLopt's +NLopt +NONMEM's +NONMEM +NSPR's +NSPR +NSS's +NSS +NVHPC's +NVHPC +NVLink +NWChem's +NWChem +NZ +NZD +NZDT +NZST +Nano +NanoComp's +NanoComp +NanoLyse's +NanoLyse +NanoPlot's +NanoPlot +NanoStat's +NanoStat +Nanopore +NeSI's +NeSI +New Zealander +NewHybrids's +NewHybrids +Newton-X's +Newton-X +NextGenMap's +NextGenMap +Nextflow's +Nextflow +Nim's +Nim +Ninja's +Ninja +Nsight +Nsight-Compute's +Nsight-Compute +Nsight-Systems's +Nsight-Systems +Nvidia +OBITools's +OBITools +OMA's +OMA +OMP +OOM +OPARI2's +OPARI2 +ORCA's +ORCA +ORCID +OSPRay's +OSPRay +OSU-Micro-Benchmarks's +OSU-Micro-Benchmarks +OSX +OTF2's +OTF2 +Octave's +Octave +Octopus's +Octopus +OneDrive +OpenACC +OpenBLAS's +OpenBLAS +OpenBabel's +OpenBabel +OpenCMISS's +OpenCMISS +OpenCV's +OpenCV +OpenFAST's +OpenFAST +OpenFOAM's +OpenFOAM +OpenGL +OpenJPEG's +OpenJPEG +OpenMP +OpenMPI's +OpenMPI +OpenSSH +OpenSSL's +OpenSSL +OpenSees's +OpenSees +OpenSeesPy's +OpenSeesPy +OpenSlide's +OpenSlide +OrfM's +OrfM +OrthoFiller's +OrthoFiller +OrthoFinder's +OrthoFinder +OrthoMCL's +OrthoMCL +Otago +PALEOMIX's +PALEOMIX +PAML's +PAML +PAPI's +PAPI +PBJelly's +PBJelly +PCIe +PCRE's +PCRE +PCRE2's +PCRE2 +PDT's +PDT +PEAR's +PEAR +PEST++'s +PEST++ +PETSc's +PETSc +PFFT's +PFFT +PFR +PGI's +PGI +PHASIUS's +PHASIUS +PLINK's +PLINK +PLUMED's +PLUMED +POSIX +PRANK's +PRANK +PROJ's +PROJ +PSpaMM's +PSpaMM +PUMI's +PUMI +PacBio +Pango's +Pango +ParMETIS's +ParMETIS +ParaView's +ParaView +Parallel's +Parallel +ParallelIO's +ParallelIO +Paraver's +Paraver +Peregrine's +Peregrine +Perl's +Perl +PhyML's +PhyML +PhyloPhlAn's +PhyloPhlAn +Pilon's +Pilon +Pletzer +PnetCDF's +PnetCDF +Porechop's +Porechop +Porechop_ABI's +Porechop_ABI +PostgreSQL's +PostgreSQL +PowerShell +Prodigal's +Prodigal +ProtHint's +ProtHint +Proteinortho's +Proteinortho +PuTTY +PyOpenGL's +PyOpenGL +PyQt's +PyQt +PyTorch's +PyTorch +Python's +Python +Python-GPU's +Python-GPU +Python-Geo's +Python-Geo +QChem's +QChem +QIIME2's +QIIME2 +QUAST's +QUAST +QoS +Qt5's +Qt5 +QuantumESPRESSO's +QuantumESPRESSO +QuickTree's +QuickTree +R's +R +R-Geo's +R-Geo +R-bundle-Bioconductor's +R-bundle-Bioconductor +RANGS-GSHHS's +RANGS-GSHHS +RAxML's +RAxML +RAxML-NG's +RAxML-NG +RDP-Classifier's +RDP-Classifier +RE2's +RE2 +REANNZ +RECON's +RECON +REViewer's +REViewer +RFPlasmid's +RFPlasmid +RFdiffusion's +RFdiffusion +RMBlast's +RMBlast +RNAmmer's +RNAmmer +ROCm's +ROCm +ROOT's +ROOT +RSEM's +RSEM +RSGISLib's +RSGISLib +RStudio +RStudio-Server's +RStudio-Server +Racon's +Racon +Ragout's +Ragout +RapidNJ's +RapidNJ +Ratatosk's +Ratatosk +Raven's +Raven +Rcorrector's +Rcorrector +Relion's +Relion +RepeatMasker's +RepeatMasker +RepeatModeler's +RepeatModeler +RepeatScout's +RepeatScout +Riskscape's +Riskscape +Roary's +Roary +Rose's +Rose +Rosetta's +Rosetta +Rstudio's +Rstudio +Ruby's +Ruby +Rust's +Rust +SAGE's +SAGE +SAMtools's +SAMtools +SAS's +SAS +SCOTCH's +SCOTCH +SCP +SCons's +SCons +SDL2's +SDL2 +SEPP's +SEPP +SFTP +SHA-1 +SHA-256 +SHAPEIT4's +SHAPEIT4 +SIF +SIONlib's +SIONlib +SIP's +SIP +SKESA's +SKESA +SMRT-Link's +SMRT-Link +SNVoter's +SNVoter +SNVoter-NanoMethPhase's +SNVoter-NanoMethPhase +SOAPdenovo2's +SOAPdenovo2 +SOCI's +SOCI +SPAdes's +SPAdes +SPIDER's +SPIDER +SQLite's +SQLite +SSAHA2's +SSAHA2 +SSH +SSHFS +SSIF +STAR's +STAR +STAR-Fusion's +STAR-Fusion +SUNDIALS's +SUNDIALS +SURVIVOR's +SURVIVOR +SWIG's +SWIG +Salmon's +Salmon +Sambamba's +Sambamba +ScaLAPAC +ScaLAPACK's +ScaLAPACK +Scalasca's +Scalasca +Schrödinger +Score-P's +Score-P +SeisSol's +SeisSol +SeqAn's +SeqAn +SeqAn3's +SeqAn3 +SeqKit's +SeqKit +SeqMonk's +SeqMonk +SiBELia's +SiBELia +Siesta's +Siesta +Siesta-Optical's +Siesta-Optical +SignalP's +SignalP +Singularity's +Singularity +Skylake +Slurm +Sniffles's +Sniffles +SortMeRNA's +SortMeRNA +SourceTracker's +SourceTracker +Spack's +Spack +Spark's +Spark +Spectrum Scale's +Spectrum Scale +SquashFS +SqueezeMeta's +SqueezeMeta +Stacks's +Stacks +StringTie's +StringTie +Structure's +Structure +Subread's +Subread +Subversion's +Subversion +SuiteSparse's +SuiteSparse +SuperLU's +SuperLU +Supernova's +Supernova +Synda +Szip's +Szip +TEtranscripts's +TEtranscripts +TMHMM's +TMHMM +TOGA's +TOGA +TRES +TSEBRA's +TSEBRA +TURBOMOLE's +TURBOMOLE +TWL-NINJA's +TWL-NINJA +TZ +Tamaki +Tcl's +Tcl +TensorBoard's +TensorBoard +TensorFlow's +TensorFlow +TensorRT's +TensorRT +Theano's +Theano +Tk's +Tk +TopHat's +TopHat +TransDecoder's +TransDecoder +Transrate's +Transrate +TreeMix's +TreeMix +Trilinos's +Trilinos +TrimGalore's +TrimGalore +Trimmomatic's +Trimmomatic +Trinity's +Trinity +Trinotate's +Trinotate +Trycycler's +Trycycler +Tuakiri +TuiView's +TuiView +TurboVNC's +TurboVNC +UCC's +UCC +UCX's +UCX +UDF +UDUNITS's +UDUNITS +UI +USEARCH's +USEARCH +UTF +UUID +Unicycler's +Unicycler +UoA +VASP's +VASP +VCF-kit's +VCF-kit +VCFtools's +VCFtools +VEP's +VEP +VIBRANT's +VIBRANT +VMs's +VMs +VM's +VM +VMD's +VMD +VNC +VPN +VSCode +VSEARCH's +VSEARCH +VTK's +VTK +VTune's +VTune +Valgrind's +Valgrind +VarScan's +VarScan +Velvet's +Velvet +VelvetOptimiser's +VelvetOptimiser +ViennaRNA's +ViennaRNA +Vim's +Vim +VirHostMatcher's +VirHostMatcher +VirSorter's +VirSorter +VirtualGL's +VirtualGL +WAAFLE's +WAAFLE +WCRP +WRF +WSL +WhatsHap's +WhatsHap +Whenua +WinSCP +Winnowmap's +Winnowmap +Wise2's +Wise2 +XALT's +XALT +XC +XGKS's +XGKS +XHMM's +XHMM +XMDS2's +XMDS2 +XSD's +XSD +XVFB +XZ's +XZ +Xeon +Xerces +Xerces-C++'s +Xerces-C++ +Xming +YAML +YAXT's +YAXT +Yade's +Yade +Yasm's +Yasm +Z3's +Z3 +Zealander +Zendesk +ZeroMQ's +ZeroMQ +Zip's +Zip +Zonation's +Zonation +abritamr's +abritamr +admonition +analytics +angsd's +angsd +annum +ant's +ant +antiSMASH's +antiSMASH +any2fasta's +any2fasta +argtable's +argtable +aria2's +aria2 +arpack-ng's +arpack-ng +at-spi2-atk's +at-spi2-atk +at-spi2-core's +at-spi2-core +attr's +attr +authenticators's +authenticators +authenticator's +authenticator +azul-zulu's +azul-zulu +backplane +bamUtil's +bamUtil +barrnap's +barrnap +basecaller +bcl2fastq2's +bcl2fastq2 +beagle-lib's +beagle-lib +benchmark +benchmarking +best's +best +binutils's +binutils +bioawk's +bioawk +biomolecules +biomolecule +blasr_libcpp's +blasr_libcpp +branches's +branches +breseq's +breseq +bsddb3's +bsddb3 +bzip2's +bzip2 +c-ares's +c-ares +cURL's +cURL +cairo's +cairo +cdbfasta's +cdbfasta +chainforge's +chainforge +checkpointing +checksums +checksum +chewBBACA's +chewBBACA +chiplets +chopper's +chopper +code-server's +code-server +collapsable +compleasm's +compleasm +conda +config +craype +cromwell's +cromwell +cron +crontab +ctags's +ctags +ctffind's +ctffind +ctrl +cuDNN's +cuDNN +customisable +customisations +cutadapt's +cutadapt +cuteSV's +cuteSV +cwltool's +cwltool +cylc's +cylc +cyvcf2's +cyvcf2 +dadi's +dadi +dammit's +dammit +datamash's +datamash +datasets's +datasets +dataset +deepTools's +deepTools +deprioritised +dereference +devtools's +devtools +double-conversion's +double-conversion +drep's +drep +dtcmp's +dtcmp +duphold's +duphold +duplex-tools's +duplex-tools +eDNA's +eDNA eScience +easi's +easi +ecCodes's +ecCodes +ectyper's +ectyper +edlib's +edlib +eggnog-mapper's +eggnog-mapper +emmtyper's +emmtyper +ensmallen's +ensmallen +entrez-direct's +entrez-direct +exes +exe +executables +executable +exonerate's +exonerate +expat's +expat +fairshare +fastStructure's +fastStructure +fastp's +fastp +fastq-tools's +fastq-tools +fcGENE's +fcGENE +fgbio's +fgbio +filesets +fileset +filesystems +filesystem +findable +fineRADstructure's +fineRADstructure +fineSTRUCTURE's +fineSTRUCTURE +flatbuffers's +flatbuffers +flex's +flex +fmlrc's +fmlrc +fmt's +fmt +fontconfig's +fontconfig +forge's +forge +foss's +foss +freetype's +freetype +funcx-endpoint's +funcx-endpoint +fxtract's +fxtract +g2clib's +g2clib +g2lib's +g2lib +ga4gh's +ga4gh +gcloud's +gcloud +geany's +geany +gemmforge's +gemmforge +genometools's +genometools +genomics +genomic +genotypes +genotype +genotyping +gettext's +gettext +gfastats's +gfastats +gffread's +gffread +giflib's +giflib +gimkl's +gimkl +gimpi's +gimpi +git's +git +glob +globbing +globus-automate-client's +globus-automate-client +globus-compute-endpoint's +globus-compute-endpoint +gmsh's +gmsh +gnuplot's +gnuplot +gompi's +gompi +google-sparsehash's +google-sparsehash +googletest's +googletest +gperf's +gperf +grib_api's +grib_api +grive2's +grive2 +gsort's +gsort +h5pp's +h5pp +haplocheck's +haplocheck +hapū +help2man's +help2man +hifiasm's +hifiasm +hooks's +hooks +hunspell's +hunspell +hwloc's +hwloc +hyperthreaded +hyperthreading +hypothesis's +hypothesis +icc's +icc +iccifort's +iccifort +ifort's +ifort +iimpi's +iimpi +imkl's +imkl +imkl-FFTW's +imkl-FFTW +impalajit's +impalajit +impi's +impi +in situ +info's +info +inodes +inode +intel's +intel +intel-compilers's +intel-compilers +intltool's +intltool +iofbf's +iofbf +iompi's +iompi +ipyrad's +ipyrad +ispc's +ispc +iwi +jbigkit's +jbigkit +jcvi's +jcvi +jemalloc's +jemalloc +jq's +jq +json-c's +json-c +jvarkit's +jvarkit +kaitiaki +kaitiakitanga +kalign2's +kalign2 +kallisto's +kallisto +kia +kineto's +kineto +kma's +kma +libFLAME's +libFLAME +libGLU's +libGLU +libKML's +libKML +libStatGen's +libStatGen +libaec's +libaec +libarchive's +libarchive +libcircle's +libcircle +libconfig's +libconfig +libdeflate's +libdeflate +libdrm's +libdrm +libdwarf's +libdwarf +libepoxy's +libepoxy +libevent's +libevent +libffi's +libffi +libgcrypt's +libgcrypt +libgd's +libgd +libgeotiff's +libgeotiff +libgit2's +libgit2 +libglvnd's +libglvnd +libgpg-error's +libgpg-error +libgpuarray's +libgpuarray +libgtextutils's +libgtextutils +libiconv's +libiconv +libio +libjpeg-turbo's +libjpeg-turbo +libmatheval's +libmatheval +libpciaccess's +libpciaccess +libpmi's +libpmi +libpng's +libpng +libreadline's +libreadline +libsigc++'s +libsigc++ +libspatialite's +libspatialite +libtool's +libtool +libunistring's +libunistring +libunwind's +libunwind +libvdwxc's +libvdwxc +libxc's +libxc +libxml++'s +libxml++ +libxml2's +libxml2 +libxslt's +libxslt +libxsmm's +libxsmm +libzstd's +libzstd +lighttpd's +lighttpd +likwid's +likwid +localhost +lockdown +logs's +logs +lp_solve's +lp_solve +lwgrp's +lwgrp +lz4's +lz4 +maf_stream's +maf_stream +magma's +magma +make's +make +manta's +manta +mapDamage's +mapDamage +meRanTK's +meRanTK +medaka's +medaka +megalodon's +megalodon +metaWRAP's +metaWRAP +metagenomics +metagenomic +miRDeep2's +miRDeep2 +microarchitecture +mihi +mimalloc's +mimalloc +miniBUSCO's +miniBUSCO +miniasm's +miniasm +minieigen's +minieigen +minimap2's +minimap2 +miniprot's +miniprot +mlpack's +mlpack +modbam2bed's +modbam2bed +modkit's +modkit +mosdepth's +mosdepth +motif's +motif +mpcci's +mpcci +mpifileutils's +mpifileutils +muParser's +muParser +multithread +multithreaded +multithreading +namespaces +namespace +nano's +nano +nanoQC's +nanoQC +nanofilt's +nanofilt +nanoget's +nanoget +nanomath's +nanomath +nanopolish's +nanopolish +nav +ncbi-vdb's +ncbi-vdb +ncurses's +ncurses +ncview's +ncview +ne's +ne +nearline's nearline +netCDF's +netCDF +netCDF-C++'s +netCDF-C++ +netCDF-C++4's +netCDF-C++4 +netCDF-Fortran's +netCDF-Fortran +nettle's +nettle +networkx's +networkx +nga +ngā nobackup +nodejs's +nodejs +nseg's +nseg +nsync's +nsync +nullarbor's +nullarbor +numactl's +numactl +objects's +objects +onboarding +ont-guppy-gpu's +ont-guppy-gpu +openseespy's +openseespy +ora +padloc's +padloc +pairtools's +pairtools +panaroo's +panaroo +pandoc's +pandoc +parallel's +parallel +parallel-fastq-dump's +parallel-fastq-dump +parallelisation +parallelise +parasail's +parasail +patchelf's +patchelf +pauvre's +pauvre +pggb's +pggb +pgge's +pgge +phylogenetics +phylogenetic +phyx's +phyx +picard's +picard +pigz's +pigz +pixman's +pixman +pkg-config's +pkg-config +pod5's +pod5 +pplacer's +pplacer precompiled -namespaces -namespace +preseq's +preseq +procurements +procurement +prodigal's +prodigal +prodigal-gv's +prodigal-gv +profilers's +profilers +profiler's +profiler +programmatically +programmes +programme +prokka's +prokka +proovread's +proovread +protobuf's +protobuf +protobuf-python's +protobuf-python +psmc's +psmc +pstoedit's +pstoedit +pullseq's +pullseq +purge_dups's +purge_dups +purge_haplotigs's +purge_haplotigs +pv's +pv +pyani's +pyani +pycoQC's +pycoQC +pymol-open-source's +pymol-open-source +pyseer's +pyseer +pyspoa's +pyspoa +qcat's +qcat +rDock's +rDock +randfold's +randfold +rasusa's +rasusa +razers3's +razers3 +rclone's +rclone +re2c's +re2c +refs's +refs +repos +repo +reproducibility +requeued +rkcommon's +rkcommon +rnaQUAST's +rnaQUAST +rollout +rstudio's +rstudio +rstudio-server's +rstudio-server runtime +rust-fmlrc's +rust-fmlrc +samblaster's +samblaster +samclip's +samclip +savvy's +savvy +sbt's +sbt +sc-RNA's +sc-RNA +scalable +screen_assembly's +screen_assembly +seqmagick's +seqmagick +seqtk's +seqtk +setgid +shrinkwrap's +shrinkwrap +simuG's +simuG +sismonr's +sismonr +slow5tools's +slow5tools +smafa's +smafa +smoove's +smoove +snakemake's +snakemake +snaphu's +snaphu +snappy's +snappy +snp-sites's +snp-sites +snpEff's +snpEff +somalier's +somalier +spaln's +spaln +spdlog's +spdlog +spglib's +spglib +splat's +splat +spoa's +spoa +sratoolkit's +sratoolkit +srun +srun-wrapper's +srun-wrapper +stderr +stdin +stdout +subdirectorys +subdirectory +sublime's +sublime sudo +supercomputers +supercomputer +supercomputings +supercomputing +swarm's +swarm +swissknife's +swissknife +symlink +tRNAscan-SE's +tRNAscan-SE +tabix's +tabix +tabixpp's +tabixpp +taonga +tbb's +tbb +tbl2asn's +tbl2asn +templating +test's +test +tmpfs +tmux's +tmux +toolchains +toolchain +tooltips's +tooltips +tooltip's +tooltip +transcriptomes +transcriptome +trf's +trf +trimAl's +trimAl +unallocated +unimap's +unimap +unmount +unrar's +unrar +util-linux's +util-linux +validator +vcflib's +vcflib +vectorisation +verkko's +verkko +vg's +vg walltimes walltime -libio -inodes -inode -parallelisation -Globus's -Globus -Jupyter's -Jupyter -Slurm's -Slurm -Glib -Skylake -Lmod's -Lmod -JupyterHub's -JupyterHub -GitHub's -GitHub -OpenMP's -OpenMP -OMP's -OMP +wgsim's +wgsim +wheel's +wheel +whānau +wtdbg's +wtdbg +wxWidgets's +wxWidgets +x264's +x264 +x265's +x265 +xbitmaps's +xbitmaps +xkbcommon's +xkbcommon +xtb's +xtb +yacrd's +yacrd +yajl's +yajl +yak's +yak +yaml-cpp's +yaml-cpp +zlib's +zlib +zstd's +zstd diff --git a/docs/assets/glossary/dictionary.yaml b/docs/assets/glossary/dictionary.yaml deleted file mode 100644 index 1b6db64..0000000 --- a/docs/assets/glossary/dictionary.yaml +++ /dev/null @@ -1,41 +0,0 @@ ---- -# Proper names -Globus: -Jupyter: -Slurm: -Glib: -Skylake: -Lmod: - -# Commands -srun: -Nano: - checkcase: false - -# Acronyms -NeSI: - long: New Zealand eScience Infrastructure -NZ: -HTTPS: -CUDA: - -# Jargon -eScience: -nearline: -nobackup: -precompiled: -namespaces: -runtime: -sudo: -walltime: -libio: - -# Misc -lockdown: -onboarding: -rollout: -deprioritised: - -# Maori -Māui: -Mahuika: diff --git a/docs/assets/glossary/snippets.md b/docs/assets/glossary/snippets.md deleted file mode 100644 index d686857..0000000 --- a/docs/assets/glossary/snippets.md +++ /dev/null @@ -1,7 +0,0 @@ -*[NeSI's]: New Zealand national high performance computing platform. -*[NeSI]: New Zealand national high performance computing platform. -*[HPCs]: Like a regular computer, but larger. Primarily used for heating data centers. -*[HPC]: Like a regular computer, but larger. Primarily used for heating data centers. -*[supercomputers]: Like a regular computer, but larger. Primarily used for heating data centers. -*[supercomputer]: Like a regular computer, but larger. Primarily used for heating data centers. -*[MPI]: A standardised message-passing standard designed to function on parallel computing architectures. diff --git a/docs/assets/glossary/update_dictionary.sh b/docs/assets/glossary/update_dictionary.sh new file mode 100644 index 0000000..5f26cb8 --- /dev/null +++ b/docs/assets/glossary/update_dictionary.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +( cd "${PWD/\/docs\/*/}" && wget -O docs/assets/glossary/dictionary.txt https://raw.githubusercontent.com/nesi/nesi-wordlist/main/outputs/dictionary.txt ) diff --git a/docs/fail_checks.md b/docs/fail_checks.md index e2140b7..0d6d7a8 100644 --- a/docs/fail_checks.md +++ b/docs/fail_checks.md @@ -2,7 +2,6 @@ template: main.html hide: toc title: Fail Checks -invalid-entry: something --- # Fail Checks @@ -11,6 +10,7 @@ Designed to trigger as many fails as possible. ## Spelling Checks + speling checkces arre gooder ## Link checks From 4a0596decc19c98d02a1897283d9035e2d84bbda Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 27 Sep 2024 12:44:50 +1200 Subject: [PATCH 10/31] Demo deploy should work now --- .github/workflows/demo_deploy.yml | 141 ++++++++++++++++-------------- 1 file changed, 73 insertions(+), 68 deletions(-) diff --git a/.github/workflows/demo_deploy.yml b/.github/workflows/demo_deploy.yml index e4b77e5..e286ec5 100644 --- a/.github/workflows/demo_deploy.yml +++ b/.github/workflows/demo_deploy.yml @@ -1,68 +1,73 @@ -# Requires some setup - - -# name: Deploy PR branches -# on: -# pull_request: -# env: -# GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} -# TARGET_REPO: "support-docs-dev" -# TARGET_OWNER: "CallumWalley" -# WORKFLOW_ID: "deploy.yml" -# DEPLOY_URL: "https://callumwalley.github.io/support-docs-dev" -# HEAD: ${{ github.event.pull_request.head.ref }} -# permissions: write-all -# jobs: -# demo-deploy: -# # continue-on-error: true -# name: Trigger test deployments -# runs-on: ubuntu-latest -# steps: -# - name: Trigger Workflow in Another Repository -# run: | -# set -x -# set -o xtrace -# curl -L \ -# -X POST \ -# -H "Accept: application/vnd.github+json" \ -# -H "Authorization: Bearer ${{ secrets.PAT }}" \ -# -H "X-GitHub-Api-Version: 2022-11-28" \ -# https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/dispatches \ -# -d '{"event_type":"deploy","client_payload":{"pr-branches":"${{ github.event.pull_request.head.ref }}", "use-cache":"true"}}' -# - name: Wait for Workflow Action -# run: | -# # Just give a minute or so to deploy -# # sleep 90 -# # curl -L \ -# # -X POST \ -# # -H "Accept: application/vnd.github+json" \ -# # -H "Authorization: Bearer ${{ secrets.PAT }}" \ -# # -H "X-GitHub-Api-Version: 2022-11-28" \ -# # https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/actions/runs/deploy.yml -# - name: Checkout repository -# uses: actions/checkout@v4 -# with: -# fetch-depth: 0 -# - name: Post messages open requests -# run: | -# msg="Test deployment available at ${DEPLOY_URL}/${HEAD}" -# changed_pages="$(git diff --name-only origin/main -- '*.md')" -# # Logic for truncating out long sections commented out. -# # maxlines=-5 -# if [ -n "${changed_pages}" ]; then -# msg="${msg}

Seems the following pages differ;
    " -# for f in ${changed_pages};do -# # maxlines=((maxlines+1)) -# #if [ ${maxlines} -lt 1 ]; then -# g=${f#*/}; h=${g%.*} -# msg="${msg}
  • ${h##*/}
  • " -# #fi -# done -# # if [ ${maxlines} -gt 0 ];then -# # msg="${msg}
  • ... and ${maxlines} more.
  • " -# # fi -# msg="${msg}
" -# fi -# msg="${msg}
Test site may take a minute or so to deploy after PR Open or Update." -# (gh pr comment ${HEAD} --edit-last --body "${msg}") || (gh pr comment ${HEAD} --body "${msg}") -# echo "::info title=Deploy successful::${DEPLOY_URL}/${HEAD}" +name: Deploy PR branches +on: + pull_request: +env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TARGET_REPO: "mkdocs-demo-deploy" + TARGET_OWNER: "CallumWalley" + WORKFLOW_ID: "deploy.yml" + DEPLOY_URL: "https://callumwalley.github.io/mkdocs-demo-deploy" + HEAD: ${{ github.event.pull_request.head.ref }} +permissions: write-all +jobs: + demo-deploy: + # continue-on-error: true + name: Trigger test deployments + runs-on: ubuntu-latest + steps: + # - name: Trigger Workflow in Another Repository + # run: | + # set -x + # set -o xtrace + # curl -L \ + # -X POST \ + # -H "Accept: application/vnd.github+json" \ + # -H "Authorization: Bearer ${{ secrets.PAT }}" \ + # -H "X-GitHub-Api-Version: 2022-11-28" \ + # https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/dispatches \ + # -d "{\"event_type\":\"deploy\",\"client_payload\":}" + - name: Run 'deploy.yml' Workflow + uses: convictional/trigger-workflow-and-wait@v1.6.1 + with: + owner: ${TARGET_OWNER} + repo: ${TARGET_REPO} + github_token: ${{ secrets.PAT }} + workflow_file_name: deploy.yml + client_payload: '{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}' + # - name: Wait for Workflow Action + # run: | + # # Just give a minute or so to deploy + # # sleep 60 + # curl -L \ + # -X POST \ + # -H "Accept: application/vnd.github+json" \ + # -H "Authorization: Bearer ${{ secrets.PAT }}" \ + # -H "X-GitHub-Api-Version: 2022-11-28" \ + # https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/actions/runs/deploy.yml + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Post messages open requests + run: | + msg="Test deployment available at ${DEPLOY_URL}/${GITHUB_REPOSITORY}/${HEAD}" + changed_pages="$(git diff --name-only origin/main -- '*.md')" + # Logic for truncating out long sections commented out. + # maxlines=-5 + if [ -n "${changed_pages}" ]; then + msg="${msg}

Seems the following pages differ;
    " + for f in ${changed_pages};do + # maxlines=((maxlines+1)) + #if [ ${maxlines} -lt 1 ]; then + g=${f#*/}; h=${g%.*} + msg="${msg}
  • ${h##*/}
  • " + #fi + done + # if [ ${maxlines} -gt 0 ];then + # msg="${msg}
  • ... and ${maxlines} more.
  • " + # fi + msg="${msg}
" + fi + msg="${msg}

See all deployed demo sites" + (gh pr comment ${HEAD} --edit-last --body "${msg}") || (gh pr comment ${HEAD} --body "${msg}") + echo "::info title=Deploy successful::${DEPLOY_URL}/${GITHUB_REPOSITORY}/${HEAD}" From afa5c1d6ce458446ecab7445fc4cc1ed31c69948 Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 12:48:36 +1200 Subject: [PATCH 11/31] Test PR --- README.md | 33 ++++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index a6c8017..7e5643f 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,33 @@ # NeSI docs template -Put some infor about the site here. +Replace this readme with info about the site. -## Contents +## First Time Setup -The repository is organised using the following folders: +You will need to have Python **3.10** or later installed on your computer. -- `checks` : scripts intended to be run by CI, -- `docs`: markdown files, structure determines categories and sections[^1], -- `docs/assets`: non-template related files, e.g. images, -- `overrides`: theme overides or extensions for page templates. -- `overrides/partials`: Overrides and extensions for sub components. +Clone this repository and create a Python virtual environment using: -[^1]: A section or category can be replaced by an `index.md` file, this will replace the default nav with a page. +```sh +git clone https://github.com/nesi/nesi-mkdoc-template.git +cd nesi-mkdoc-template +python -m venv .venv +source .venv/bin/activate +pip3 install pip-tools +pip-compile +pip3 install -r requirements.txt +``` + +## Build and deploy + +```sh +source .venv/bin/activate +mkdocs serve -c +``` + +Take note of any warnings or errors. + +A link to the deployment will be printed once served. ## Developer Documentation From 2129b9f03faf960db495da251eec88d689988c27 Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 12:58:38 +1200 Subject: [PATCH 12/31] Update demo_deploy.yml --- .github/workflows/demo_deploy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/demo_deploy.yml b/.github/workflows/demo_deploy.yml index e286ec5..63e0169 100644 --- a/.github/workflows/demo_deploy.yml +++ b/.github/workflows/demo_deploy.yml @@ -33,7 +33,7 @@ jobs: repo: ${TARGET_REPO} github_token: ${{ secrets.PAT }} workflow_file_name: deploy.yml - client_payload: '{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}' + client_payload: '{"targets":"${GITHUB_REPOSITORY}:${HEAD}", "use-cache":"true"}' # - name: Wait for Workflow Action # run: | # # Just give a minute or so to deploy From f6ecd0169d7b964c46e298d8554fcc723078b15a Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 12:59:35 +1200 Subject: [PATCH 13/31] trigger --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 7e5643f..79abb8f 100644 --- a/README.md +++ b/README.md @@ -50,6 +50,7 @@ Deployments of open pull requests can be viewed at [https://callumwalley.github. We are using the [mkdocs material theme](https://squidfunk.github.io/mkdocs-material/). + ## Analyics Google Analytics can be set up. From 13e81af58fb8d7f96952987ed9774abdd0a7e23b Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:15:20 +1200 Subject: [PATCH 14/31] Update demo_deploy.yml --- .github/workflows/demo_deploy.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/demo_deploy.yml b/.github/workflows/demo_deploy.yml index e286ec5..9a19fea 100644 --- a/.github/workflows/demo_deploy.yml +++ b/.github/workflows/demo_deploy.yml @@ -1,6 +1,7 @@ name: Deploy PR branches on: pull_request: + workflow_dispatch: env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} TARGET_REPO: "mkdocs-demo-deploy" From ecb47383b0f2408a13d6f11b71291220f251c999 Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:23:37 +1200 Subject: [PATCH 15/31] Update demo_deploy.yml --- .github/workflows/demo_deploy.yml | 44 ++++++++++++++++++------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/.github/workflows/demo_deploy.yml b/.github/workflows/demo_deploy.yml index 9a19fea..7b35fd3 100644 --- a/.github/workflows/demo_deploy.yml +++ b/.github/workflows/demo_deploy.yml @@ -16,25 +16,27 @@ jobs: name: Trigger test deployments runs-on: ubuntu-latest steps: - # - name: Trigger Workflow in Another Repository - # run: | - # set -x - # set -o xtrace - # curl -L \ - # -X POST \ - # -H "Accept: application/vnd.github+json" \ - # -H "Authorization: Bearer ${{ secrets.PAT }}" \ - # -H "X-GitHub-Api-Version: 2022-11-28" \ - # https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/dispatches \ - # -d "{\"event_type\":\"deploy\",\"client_payload\":}" - - name: Run 'deploy.yml' Workflow - uses: convictional/trigger-workflow-and-wait@v1.6.1 - with: - owner: ${TARGET_OWNER} - repo: ${TARGET_REPO} - github_token: ${{ secrets.PAT }} - workflow_file_name: deploy.yml - client_payload: '{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}' + - name: Trigger Workflow in Another Repository + run: | + set -x + set -o xtrace + curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.PAT }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/dispatches \ + -d "{\"event_type\":\"deploy\",\"client_payload\":}" + + # This would be better if it worked + # - name: Run 'deploy.yml' Workflow + # uses: convictional/trigger-workflow-and-wait@v1.6.1 + # with: + # owner: ${TARGET_OWNER} + # repo: ${TARGET_REPO} + # github_token: ${{ secrets.PAT }} + # workflow_file_name: deploy.yml + # client_payload: '{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}' # - name: Wait for Workflow Action # run: | # # Just give a minute or so to deploy @@ -45,6 +47,10 @@ jobs: # -H "Authorization: Bearer ${{ secrets.PAT }}" \ # -H "X-GitHub-Api-Version: 2022-11-28" \ # https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/actions/runs/deploy.yml + - name: Wait for Workflow Action + run: | + # Just give a minute or so to deploy + sleep 60 - name: Checkout repository uses: actions/checkout@v4 with: From 35998314cc631ae8ec17a49a6f5fb8fc98d0886e Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:27:46 +1200 Subject: [PATCH 16/31] Oops no payload --- .github/workflows/demo_deploy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/demo_deploy.yml b/.github/workflows/demo_deploy.yml index 670e6be..e30f101 100644 --- a/.github/workflows/demo_deploy.yml +++ b/.github/workflows/demo_deploy.yml @@ -26,7 +26,7 @@ jobs: -H "Authorization: Bearer ${{ secrets.PAT }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/dispatches \ - -d "{\"event_type\":\"deploy\",\"client_payload\":}" + -d "{\"event_type\":\"deploy\",\"client_payload\":{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}}" # This would be better if it worked # - name: Run 'deploy.yml' Workflow From 3a49693dc45b86abbfe934ada620f894f6615655 Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:34:39 +1200 Subject: [PATCH 17/31] Delete TODO.md --- TODO.md | 1 - 1 file changed, 1 deletion(-) delete mode 100644 TODO.md diff --git a/TODO.md b/TODO.md deleted file mode 100644 index f87f5c1..0000000 --- a/TODO.md +++ /dev/null @@ -1 +0,0 @@ -# TODO \ No newline at end of file From aaa3b7cb02d44c5a0931d93a739dd1e019206f9c Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:54:58 +1200 Subject: [PATCH 18/31] Update demo_deploy.yml --- .github/workflows/demo_deploy.yml | 40 +++++++++++++++---------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/.github/workflows/demo_deploy.yml b/.github/workflows/demo_deploy.yml index e30f101..82e227c 100644 --- a/.github/workflows/demo_deploy.yml +++ b/.github/workflows/demo_deploy.yml @@ -12,31 +12,31 @@ env: permissions: write-all jobs: demo-deploy: - # continue-on-error: true + continue-on-error: true name: Trigger test deployments runs-on: ubuntu-latest steps: - - name: Trigger Workflow in Another Repository - run: | - set -x - set -o xtrace - curl -L \ - -X POST \ - -H "Accept: application/vnd.github+json" \ - -H "Authorization: Bearer ${{ secrets.PAT }}" \ - -H "X-GitHub-Api-Version: 2022-11-28" \ - https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/dispatches \ - -d "{\"event_type\":\"deploy\",\"client_payload\":{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}}" + # - name: Trigger Workflow in Another Repository + # run: | + # set -x + # set -o xtrace + # curl -L \ + # -X POST \ + # -H "Accept: application/vnd.github+json" \ + # -H "Authorization: Bearer ${{ secrets.PAT }}" \ + # -H "X-GitHub-Api-Version: 2022-11-28" \ + # https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/dispatches \ + # -d "{\"event_type\":\"deploy\",\"client_payload\":{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}}" # This would be better if it worked - # - name: Run 'deploy.yml' Workflow - # uses: convictional/trigger-workflow-and-wait@v1.6.1 - # with: - # owner: ${TARGET_OWNER} - # repo: ${TARGET_REPO} - # github_token: ${{ secrets.PAT }} - # workflow_file_name: deploy.yml - # client_payload: '{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}' + - name: Run 'deploy.yml' Workflow + uses: convictional/trigger-workflow-and-wait@v1.6.1 + with: + owner: ${TARGET_OWNER} + repo: ${TARGET_REPO} + github_token: ${{ secrets.PAT }} + workflow_file_name: deploy.yml + client_payload: '{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}' # - name: Wait for Workflow Action # run: | # curl -L \ From 69bb22e36224276ec28a79363cb9138f6140622e Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:55:40 +1200 Subject: [PATCH 19/31] Delete SETUP.md --- SETUP.md | 28 ---------------------------- 1 file changed, 28 deletions(-) delete mode 100644 SETUP.md diff --git a/SETUP.md b/SETUP.md deleted file mode 100644 index e5c7ed3..0000000 --- a/SETUP.md +++ /dev/null @@ -1,28 +0,0 @@ -# SETUP - -## First Time Setup - -You will need to have Python **3.10** or later installed on your computer. - -Clone this repository and create a Python virtual environment using: - -```sh -git clone https://github.com/nesi/nesi-mkdoc-template.git -cd nesi-mkdoc-template -python -m venv .venv -source .venv/bin/activate -pip3 install pip-tools -pip-compile -pip3 install -r requirements.txt -``` - -## Build and deploy - -```sh -source .venv/bin/activate -mkdocs serve -c -``` - -Take note of any warnings or errors. - -A link to the deployment will be printed once served. From 06774796a9f96dc7e8e04246a6eab3e46facbd21 Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:56:54 +1200 Subject: [PATCH 20/31] Update demo_deploy.yml --- .github/workflows/demo_deploy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/demo_deploy.yml b/.github/workflows/demo_deploy.yml index 82e227c..b0b9926 100644 --- a/.github/workflows/demo_deploy.yml +++ b/.github/workflows/demo_deploy.yml @@ -36,7 +36,7 @@ jobs: repo: ${TARGET_REPO} github_token: ${{ secrets.PAT }} workflow_file_name: deploy.yml - client_payload: '{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}' + client_payload: '{"targets":"${GITHUB_REPOSITORY}:${HEAD}", "use-cache":"true"}' # - name: Wait for Workflow Action # run: | # curl -L \ From e51da1649150ddcaad1c43a45e7ebd3e6d80d92f Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:57:46 +1200 Subject: [PATCH 21/31] Update README.md --- overrides/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/overrides/README.md b/overrides/README.md index e86545b..d77d88c 100644 --- a/overrides/README.md +++ b/overrides/README.md @@ -20,6 +20,7 @@ This is for customising the [material theme](https://squidfunk.github.io/mkdocs- When possible, it is best to _extend_ a template (using 'super') rather than straight replacing. + Overriding files must mirror the original file structure if you are replacing an existing file. However, you can put your own custom stuff in here. Here is a list of the original available files, before override: From 727e9ef5899ffcb5f5376cbf30cd7ad8e29ef2f9 Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 16:03:11 +1200 Subject: [PATCH 22/31] Return to working --- .github/workflows/demo_deploy.yml | 54 +++++++++++++++---------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/.github/workflows/demo_deploy.yml b/.github/workflows/demo_deploy.yml index b0b9926..1a6aca0 100644 --- a/.github/workflows/demo_deploy.yml +++ b/.github/workflows/demo_deploy.yml @@ -16,35 +16,35 @@ jobs: name: Trigger test deployments runs-on: ubuntu-latest steps: - # - name: Trigger Workflow in Another Repository - # run: | - # set -x - # set -o xtrace - # curl -L \ - # -X POST \ - # -H "Accept: application/vnd.github+json" \ - # -H "Authorization: Bearer ${{ secrets.PAT }}" \ - # -H "X-GitHub-Api-Version: 2022-11-28" \ - # https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/dispatches \ - # -d "{\"event_type\":\"deploy\",\"client_payload\":{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}}" + - name: Trigger Workflow in Another Repository + run: | + set -x + set -o xtrace + curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.PAT }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/dispatches \ + -d "{\"event_type\":\"deploy\",\"client_payload\":{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}}" # This would be better if it worked - - name: Run 'deploy.yml' Workflow - uses: convictional/trigger-workflow-and-wait@v1.6.1 - with: - owner: ${TARGET_OWNER} - repo: ${TARGET_REPO} - github_token: ${{ secrets.PAT }} - workflow_file_name: deploy.yml - client_payload: '{"targets":"${GITHUB_REPOSITORY}:${HEAD}", "use-cache":"true"}' - # - name: Wait for Workflow Action - # run: | - # curl -L \ - # -X POST \ - # -H "Accept: application/vnd.github+json" \ - # -H "Authorization: Bearer ${{ secrets.PAT }}" \ - # -H "X-GitHub-Api-Version: 2022-11-28" \ - # https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/actions/runs/deploy.yml + # - name: Run 'deploy.yml' Workflow + # uses: convictional/trigger-workflow-and-wait@v1.6.1 + # with: + # owner: ${TARGET_OWNER} + # repo: ${TARGET_REPO} + # github_token: ${{ secrets.PAT }} + # workflow_file_name: deploy.yml + # client_payload: '{"targets":"${GITHUB_REPOSITORY}:${HEAD}", "use-cache":"true"}' + - name: Wait for Workflow Action + run: | + curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.PAT }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/actions/runs/deploy.yml - name: Wait for Workflow Action run: | # Just give a minute or so to deploy From 442c189e42fa4e89bb82f4a09c2ac81642bb2e96 Mon Sep 17 00:00:00 2001 From: Cal <35017184+CallumWalley@users.noreply.github.com> Date: Fri, 27 Sep 2024 16:04:17 +1200 Subject: [PATCH 23/31] Delete overrides/partials/.gitkeep --- overrides/partials/.gitkeep | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 overrides/partials/.gitkeep diff --git a/overrides/partials/.gitkeep b/overrides/partials/.gitkeep deleted file mode 100644 index e69de29..0000000 From d78895b4309fd4d2a1c97feced2f1638812b3a29 Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 27 Sep 2024 16:16:01 +1200 Subject: [PATCH 24/31] remove comments --- .github/workflows/deploy.yml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 0bf2980..4dc0803 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -3,11 +3,6 @@ on: push: branches: [main] workflow_dispatch: - # inputs: - # pr-deploy: - # description: Deploy open merge requests in sub-directories. - # type: boolean - # default: true env: PYTHON_VERSION: 3.x GH_TOKEN: ${{ github.token }} @@ -62,10 +57,3 @@ jobs: with: key: mkdocs-${{ hashfiles('.cache/**') }} path: .cache - # - name: Post messages to PRs - # run: | - # for pr in ${{ steps.dev-deps.outputs.branches }}; do - # msg="Test deployment available at https://docs.nesi.org.nz/${pr}" - # (gh pr comment ${pr} --edit-last --body "${msg}") || (gh pr comment ${pr} --body "${msg}") - # done - From 984a94275a5ca2a482db1d30b1018a3f916c6666 Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 27 Sep 2024 17:00:20 +1200 Subject: [PATCH 25/31] Fix commented out catch block --- checks/run_meta_check.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/checks/run_meta_check.py b/checks/run_meta_check.py index b532567..e23151b 100755 --- a/checks/run_meta_check.py +++ b/checks/run_meta_check.py @@ -62,7 +62,7 @@ def main(): _nav_check() with open(input_path, "r") as f: print(f"Checking meta for {f.name}") - if 1: + try: contents = f.read() match = re.match(r"---\n([\s\S]*?)---", contents, re.MULTILINE) if not match: @@ -93,8 +93,8 @@ def main(): _run_check(check) for check in ENDCHECKS: _run_check(check) - # except Exception as e: - # print(f"::error file={input_path},title=misc,col=0,endColumn=0,line=1 ::{e}") + except Exception as e: + print(f"::error file={input_path},title=misc,col=0,endColumn=0,line=1 ::{e}") def _run_check(f): for r in f(): From 4dd58ab1f43b835de1bf9933079d33be97321f7d Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 27 Sep 2024 17:02:41 +1200 Subject: [PATCH 26/31] Update from upstream again --- .github/workflows/demo_deploy.yml | 78 +++++++++++++++++++++++ .vscode/tasks.json | 3 +- checks/README.md | 6 ++ checks/run_meta_check.py | 36 ++++++----- checks/run_proselint.py | 4 +- checks/run_test_build.py | 13 +++- docs/assets/glossary/dictionary.txt | 2 + docs/assets/glossary/update_dictionary.sh | 4 +- docs/assets/stylesheets/footer copy.css | 31 --------- docs/assets/stylesheets/theme.css | 41 ++++++++++++ overrides/README.md | 1 + overrides/partials/.gitkeep | 0 12 files changed, 166 insertions(+), 53 deletions(-) create mode 100644 .github/workflows/demo_deploy.yml delete mode 100644 docs/assets/stylesheets/footer copy.css delete mode 100644 overrides/partials/.gitkeep diff --git a/.github/workflows/demo_deploy.yml b/.github/workflows/demo_deploy.yml new file mode 100644 index 0000000..1a6aca0 --- /dev/null +++ b/.github/workflows/demo_deploy.yml @@ -0,0 +1,78 @@ +name: Deploy PR branches +on: + pull_request: + workflow_dispatch: +env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TARGET_REPO: "mkdocs-demo-deploy" + TARGET_OWNER: "CallumWalley" + WORKFLOW_ID: "deploy.yml" + DEPLOY_URL: "https://callumwalley.github.io/mkdocs-demo-deploy" + HEAD: ${{ github.event.pull_request.head.ref }} +permissions: write-all +jobs: + demo-deploy: + continue-on-error: true + name: Trigger test deployments + runs-on: ubuntu-latest + steps: + - name: Trigger Workflow in Another Repository + run: | + set -x + set -o xtrace + curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.PAT }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/dispatches \ + -d "{\"event_type\":\"deploy\",\"client_payload\":{\"targets\":\"${GITHUB_REPOSITORY}:${HEAD}\", \"use-cache\":\"true\"}}" + + # This would be better if it worked + # - name: Run 'deploy.yml' Workflow + # uses: convictional/trigger-workflow-and-wait@v1.6.1 + # with: + # owner: ${TARGET_OWNER} + # repo: ${TARGET_REPO} + # github_token: ${{ secrets.PAT }} + # workflow_file_name: deploy.yml + # client_payload: '{"targets":"${GITHUB_REPOSITORY}:${HEAD}", "use-cache":"true"}' + - name: Wait for Workflow Action + run: | + curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.PAT }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/actions/runs/deploy.yml + - name: Wait for Workflow Action + run: | + # Just give a minute or so to deploy + sleep 60 + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Post messages open requests + run: | + msg="Test deployment available at ${DEPLOY_URL}/${GITHUB_REPOSITORY}/${HEAD}" + changed_pages="$(git diff --name-only origin/main -- '*.md')" + # Logic for truncating out long sections commented out. + # maxlines=-5 + if [ -n "${changed_pages}" ]; then + msg="${msg}

Seems the following pages differ;
    " + for f in ${changed_pages};do + # maxlines=((maxlines+1)) + #if [ ${maxlines} -lt 1 ]; then + g=${f#*/}; h=${g%.*} + msg="${msg}
  • ${h##*/}
  • " + #fi + done + # if [ ${maxlines} -gt 0 ];then + # msg="${msg}
  • ... and ${maxlines} more.
  • " + # fi + msg="${msg}
" + fi + msg="${msg}

See all deployed demo sites" + (gh pr comment ${HEAD} --edit-last --body "${msg}") || (gh pr comment ${HEAD} --body "${msg}") + echo "::info title=Deploy successful::${DEPLOY_URL}/${GITHUB_REPOSITORY}/${HEAD}" diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 47a4a27..ee48d21 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -48,6 +48,7 @@ "column": 4, "endLine": 5, "message": 7, + "loop": true }, "owner": "proselint", "fileLocation": "autoDetect", @@ -94,7 +95,7 @@ "column": 4, "endColumn": 5, "line": 6, - "message": 7 + "message": 7, }, "owner": "test-build", "fileLocation": [ diff --git a/checks/README.md b/checks/README.md index 29daf11..40408b5 100644 --- a/checks/README.md +++ b/checks/README.md @@ -16,6 +16,12 @@ Spellcheck pipeline settings can be modified in [.spellcheck.yml](../.spellcheck List of custom words can be found in [dictionary.txt](../docs/assets/glossary/dictionary.txt), however you **should not edit this manually**, see [adding-words-to-dictionary](../docs/CONTRIBUTING.md#adding-words-to-dictionary). +This list is automatically updated daily in the case of docs.nesi, but not the other sites. +You should occasionally run +`wget -O docs/assets/glossary/dictionary.txt https://raw.githubusercontent.com/nesi/nesi-wordlist/main/outputs/dictionary.txt` +to keep it up to date. + + ### Limitations Spellchecker does not provide output lineumber / column. diff --git a/checks/run_meta_check.py b/checks/run_meta_check.py index 0879620..e23151b 100755 --- a/checks/run_meta_check.py +++ b/checks/run_meta_check.py @@ -100,6 +100,8 @@ def _run_check(f): for r in f(): print(f"::{r.get('level', 'warning')} file={input_path},title={f.__name__},col={r.get('col', 0)},endColumn={r.get('endColumn', 99)},line={r.get('line', 1)}::{r.get('message', 'something wrong')}") sys.stdout.flush() + time.sleep(0.01) + def _title_from_filename(): @@ -136,26 +138,29 @@ def _unpack(toc, a): return toc[a[0]] return _unpack(toc[a[0]]["children"], a[1:]) - if in_code_block: - return + try: + if in_code_block: + return - header_match = re.match(r"^(#+)\s*(.*)$", line) + header_match = re.match(r"^(#+)\s*(.*)$", line) - if not header_match: - return - - header_level = len(header_match.group(1)) - header_name = header_match.group(2) + if not header_match: + return + + header_level = len(header_match.group(1)) + header_name = header_match.group(2) - if header_level == 1: - toc = {header_name: {"lineno": lineno, "children": {}}} - toc_parents = [header_name] + if header_level == 1: + toc = {header_name: {"lineno": lineno, "children": {}}} + toc_parents = [header_name] - while header_level < len(toc_parents)+1: - toc_parents.pop(-1) + while header_level < len(toc_parents)+1: + toc_parents.pop(-1) - _unpack(toc, toc_parents)["children"][header_name] = {"level": header_level, "lineno": lineno, "children": {}} - toc_parents += [header_name] + _unpack(toc, toc_parents)["children"][header_name] = {"level": header_level, "lineno": lineno, "children": {}} + toc_parents += [header_name] + except Exception: + print(f"::error file={input_path},title=misc-nav,col=0,endColumn=0,line=1 ::Failed to parse Nav tree. Something is wrong.") def _nav_check(): @@ -268,4 +273,3 @@ def _count_children(d): # FIXME terrible hack to make VSCode in codespace capture the error messages # see https://github.com/microsoft/vscode/issues/92868 as a tentative explanation - time.sleep(5) diff --git a/checks/run_proselint.py b/checks/run_proselint.py index 014f4ca..787220e 100755 --- a/checks/run_proselint.py +++ b/checks/run_proselint.py @@ -6,6 +6,7 @@ import sys from pathlib import Path +import time import proselint from proselint import config, tools @@ -22,10 +23,11 @@ for file in files: print(f"Running proselint on {file}") content = Path(file).read_text(encoding="utf8") + fails = proselint.tools.lint(content, config=config_custom) for notice in proselint.tools.lint(content, config=config_custom): print( f"::{notice[7]} file={file},line={notice[2]+1}," f"col={notice[3]+2},endColumn={notice[2]+notice[6]+1}," f"title={notice[0]}::'{notice[1]}'", - flush=True, ) + time.sleep(0.01) diff --git a/checks/run_test_build.py b/checks/run_test_build.py index 1e60509..0d962f0 100755 --- a/checks/run_test_build.py +++ b/checks/run_test_build.py @@ -6,11 +6,14 @@ import logging import sys import re +import time + """ -This doesnt work and I have no idea why. +This works but is a bit messy """ + def parse_macro(record): # These are not useful messages @@ -29,6 +32,13 @@ def parse_macro(record): record.name = g["title"] record.filename = g["file"] record.msg = g["message"] + + # Does not give correct path to file in question in 'title'. + # Infer from message. + m = re.search(r"'(.*?\.md)'", record.msg) + if m: + record.filename = m.group(1) + return True @@ -42,3 +52,4 @@ def parse_macro(record): log.addHandler(sh) config = load_config(config_file_path="./mkdocs.yml") build.build(config) + time.sleep(5) diff --git a/docs/assets/glossary/dictionary.txt b/docs/assets/glossary/dictionary.txt index 19c9590..a4f19d9 100644 --- a/docs/assets/glossary/dictionary.txt +++ b/docs/assets/glossary/dictionary.txt @@ -1750,6 +1750,7 @@ json-c's json-c jvarkit's jvarkit +kaitiaki kaitiakitanga kalign2's kalign2 @@ -2168,6 +2169,7 @@ tabix's tabix tabixpp's tabixpp +taonga tbb's tbb tbl2asn's diff --git a/docs/assets/glossary/update_dictionary.sh b/docs/assets/glossary/update_dictionary.sh index d83bf72..5f26cb8 100644 --- a/docs/assets/glossary/update_dictionary.sh +++ b/docs/assets/glossary/update_dictionary.sh @@ -1,5 +1,3 @@ #!/bin/bash -# Updates dictionary for spellcheck - -wget -O docs/assets/glossary/dictionary.txt https://raw.githubusercontent.com/nesi/nesi-wordlist/main/outputs/dictionary.txt +( cd "${PWD/\/docs\/*/}" && wget -O docs/assets/glossary/dictionary.txt https://raw.githubusercontent.com/nesi/nesi-wordlist/main/outputs/dictionary.txt ) diff --git a/docs/assets/stylesheets/footer copy.css b/docs/assets/stylesheets/footer copy.css deleted file mode 100644 index 14dc199..0000000 --- a/docs/assets/stylesheets/footer copy.css +++ /dev/null @@ -1,31 +0,0 @@ -#new-footer { - font-family: Lato; - font-size: 12px; - font-weight: 400; - background-color: #101010; -} - -#partners { - height: auto; - background-color: #101010; -} - -#partners #logos img { - height: 40px; - margin: 10px; -} - -#partners #logos { - padding: 10px; - text-align: center; -} - -#partners #logos .nesi-footer-logo img { - margin-right: 100px; - height: 60px; -} - -#partners #logos img { - height: 40px; - margin: 10px; -} \ No newline at end of file diff --git a/docs/assets/stylesheets/theme.css b/docs/assets/stylesheets/theme.css index f8f2e42..483b1e3 100644 --- a/docs/assets/stylesheets/theme.css +++ b/docs/assets/stylesheets/theme.css @@ -14,21 +14,62 @@ --md-accent-bg-color: rgb(210,227,235); --md-accent-bg-color--light: rgb(210,227,235,0.5); } +/* :root{ + --nesi-grey : #414f5c; + --nesi-grey--light: #94a5ad; + --nesi-yellow :#fcce06; + --nesi-purple: rgb(202, 159, 213); + --nesi-orange : rgb(244, 121, 37); + --nesi-blue : #4fbaed; + --nesi-red:#ef315e; + --nesi-green: #cce310; + + [data-md-color-scheme="default"]{ + --md-primary-fg-color: var(--nesi-red); + + --md-accent-fg-color: var(--nesi-orange);; + --md-accent-fg-color--transparent: rgb(244, 121, 37, 0.25); + } + + --md-accent-bg-color: rgb(210,227,235); + [data-md-color-scheme="slate"] { + --md-primary-fg-color: var(--nesi-red); + --md-accent-fg-color: var(--nesi-orange);; + --md-accent-fg-color--transparent: rgb(244, 121, 37, 0.25); + + .nt-card-image>img { + filter: brightness(0) invert(1); + } + } +} +// Logo biggification +/* .md-header__button.md-logo img, .md-header__button.md-logo svg { + height: 4rem; + margin: -2rem; +} */ + +/* Version table stuff */ .md-tag.md-tag-ver{ color: var(--md-code-fg-color); } .md-tag.md-tag-ver-shown { outline: var(--md-primary-fg-color) 2px solid; } + +.md-tag-ver-warn { + text-decoration: line-through; +} .md-typeset__table { width: 100%; } .md-typeset__table table:not([class]) { display: table } +/* convenience class. Not sure if it is used */ .hidden{ display: none; } +/* Get support button */ .md-button-support{ position: absolute; margin: -2rem 0 0 1rem; diff --git a/overrides/README.md b/overrides/README.md index e86545b..d77d88c 100644 --- a/overrides/README.md +++ b/overrides/README.md @@ -20,6 +20,7 @@ This is for customising the [material theme](https://squidfunk.github.io/mkdocs- When possible, it is best to _extend_ a template (using 'super') rather than straight replacing. + Overriding files must mirror the original file structure if you are replacing an existing file. However, you can put your own custom stuff in here. Here is a list of the original available files, before override: diff --git a/overrides/partials/.gitkeep b/overrides/partials/.gitkeep deleted file mode 100644 index e69de29..0000000 From b0452b45f7730f84379212fcd0e9c8c5a1c7043b Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 27 Sep 2024 17:05:16 +1200 Subject: [PATCH 27/31] Uneeded change --- .vscode/tasks.json | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index ee48d21..25b10fc 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -47,8 +47,7 @@ "line": 3, "column": 4, "endLine": 5, - "message": 7, - "loop": true + "message": 7 }, "owner": "proselint", "fileLocation": "autoDetect", @@ -95,7 +94,7 @@ "column": 4, "endColumn": 5, "line": 6, - "message": 7, + "message": 7 }, "owner": "test-build", "fileLocation": [ From 181e7bb5f5a499cb35eae89cd16c2371184873ce Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 27 Sep 2024 17:07:20 +1200 Subject: [PATCH 28/31] remove debug line --- checks/run_proselint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/run_proselint.py b/checks/run_proselint.py index 787220e..d6edd96 100755 --- a/checks/run_proselint.py +++ b/checks/run_proselint.py @@ -23,11 +23,11 @@ for file in files: print(f"Running proselint on {file}") content = Path(file).read_text(encoding="utf8") - fails = proselint.tools.lint(content, config=config_custom) for notice in proselint.tools.lint(content, config=config_custom): print( f"::{notice[7]} file={file},line={notice[2]+1}," f"col={notice[3]+2},endColumn={notice[2]+notice[6]+1}," f"title={notice[0]}::'{notice[1]}'", + flush=True ) time.sleep(0.01) From 10b5a0659eb1898fddfb21f5d36afff2c88ed568 Mon Sep 17 00:00:00 2001 From: cal Date: Fri, 27 Sep 2024 17:09:56 +1200 Subject: [PATCH 29/31] uneeded change --- overrides/README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/overrides/README.md b/overrides/README.md index d77d88c..e86545b 100644 --- a/overrides/README.md +++ b/overrides/README.md @@ -20,7 +20,6 @@ This is for customising the [material theme](https://squidfunk.github.io/mkdocs- When possible, it is best to _extend_ a template (using 'super') rather than straight replacing. - Overriding files must mirror the original file structure if you are replacing an existing file. However, you can put your own custom stuff in here. Here is a list of the original available files, before override: From 3e1362025d3f491582fd6ab4619eb77e6211f28e Mon Sep 17 00:00:00 2001 From: cal Date: Mon, 17 Feb 2025 11:56:43 +1300 Subject: [PATCH 30/31] Updated checks and CI --- .github/workflows/README.md | 30 ++++- .github/workflows/auto_merge.yml | 4 +- .github/workflows/checks.yml | 34 ++++- .github/workflows/demo_deploy.yml | 10 +- .github/workflows/deploy.yml | 7 +- checks/README.md | 6 - checks/fail_checks.md | 70 +++++++++++ checks/run_meta_check.py | 79 ++++++------ checks/run_proselint.py | 8 +- checks/run_pyspelling.py | 6 + checks/run_slurm_lint.py | 203 ++++++++++++++++++++++++++++++ checks/run_test_build.py | 19 ++- 12 files changed, 406 insertions(+), 70 deletions(-) create mode 100644 checks/fail_checks.md create mode 100755 checks/run_slurm_lint.py diff --git a/.github/workflows/README.md b/.github/workflows/README.md index f986924..7590d39 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -2,11 +2,35 @@ Description of current CI workflow. +## [fetch_includes.yml](fetch_includes.yml) + +Retrieves dynamically generated content from external sources. + +Currently retrieves: + +- Software module list from [modules-list](https://github.com/nesi/modules-list). +- Glossary, spellcheck dictionary and snippets from [nesi-wordlist](https://github.com/nesi/nesi-wordlist) + +It then runs [link_apps_pages.py](#link_apps_pagespy). + +All modified files are added to a new branch called `new-assets` and merged into main. + +In theory, all this could be done at deployment, but I wanted to make sure that changes to these remote files didn't break anything. + +## [link_apps_pages.py](link_apps_pages.py) + +A Python script used to add a link to the appropriate documentation to [modules-list.json](../../docs/assets/module-list.json). + +The script checks all titles of input files, and sets the `support` key to be equal to the pages url. +It also adds whatever tags are on that page to the `domains` key. + +_One day I would like to simplify this whole thing._ + ## [checks.yml](checks.yml) A series of QA checks run on the documentation. -The checks can be started manually from the ![workflow page](https://github.com/nesi/agdr-docs/actions/workflows/checks.yml/badge.svg), +The checks can be started manually from the ![workflow page](https://github.com/nesi/support-docs/actions/workflows/checks.yml/badge.svg), select the target branch, give the pattern of files to include, and select which checks you want done. Checks will also be run on any _non main_ branch pushes. All checks will be run, but only on _changed_ files. @@ -16,7 +40,3 @@ More info on what these checks do in [README.md](../../checks/README.md) ## [deploy.yml](deploy.yml) Runs on push to _main_ branch. Builds and deploys pages. - -## [depo_deploy.yml](demo_deploy.yml) - -## [auto_merge.yml](auto_marge.yml) diff --git a/.github/workflows/auto_merge.yml b/.github/workflows/auto_merge.yml index 9d2097c..ceb5748 100644 --- a/.github/workflows/auto_merge.yml +++ b/.github/workflows/auto_merge.yml @@ -9,14 +9,14 @@ env: LABEL_NAME: auto_merge jobs: automerge: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - run: | for branch in $(gh pr list -R ${TARGET_REPO} --label "${LABEL_NAME}" --json headRefName --jq .[].headRefName);do - git checkout $branch + git checkout ${branch} gh pr merge --squash --auto --delete-branch done diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 0640c7b..c697e0e 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -4,6 +4,7 @@ env: on: push: branches-ignore: [main] + repository_dispatch: workflow_dispatch: inputs: fileList: @@ -27,6 +28,10 @@ on: description: Check Meta default: true type: boolean + checkSlurm: + description: Check Slurm + default: true + type: boolean testBuild: description: Test Build default: true @@ -34,7 +39,7 @@ on: jobs: get: name: Determine what files to check - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout Repo uses: actions/checkout@v4 @@ -58,7 +63,7 @@ jobs: spellcheck: name: Check Spelling if: ${{ github.event_name != 'workflow_dispatch' || inputs.checkSpelling }} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: get steps: - if: ${{! needs.get.outputs.filelist}} @@ -80,7 +85,7 @@ jobs: proselint: name: Check Prose if: ${{github.event_name != 'workflow_dispatch'|| inputs.checkProse}} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: get steps: - if: ${{! needs.get.outputs.filelist}} @@ -100,7 +105,7 @@ jobs: mdlint: name: Check Markdown if: ${{github.event_name != 'workflow_dispatch' || inputs.checkMarkdown}} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: get steps: - if: ${{! needs.get.outputs.filelist}} @@ -120,7 +125,7 @@ jobs: metacheck: name: Check page meta if: ${{github.event_name != 'workflow_dispatch'|| inputs.checkMeta}} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: get steps: - if: ${{ ! needs.get.outputs.filelist}} @@ -134,10 +139,27 @@ jobs: run: | shopt -s globstar extglob python3 checks/run_meta_check.py ${{needs.get.outputs.filelist}} + slurmcheck: + name: Check slurm scripts + if: ${{github.event_name != 'workflow_dispatch'|| inputs.checkSlurm}} + runs-on: ubuntu-22.04 + needs: get + steps: + - if: ${{ ! needs.get.outputs.filelist}} + name: No files to check meta on. + run: exit 0 + - if: ${{needs.get.outputs.filelist}} + name: Check out repo. + uses: actions/checkout@v4 + - if: ${{needs.get.outputs.filelist}} + name: Check markdown meta. + run: | + shopt -s globstar extglob + python3 checks/run_slurm_lint.py ${{needs.get.outputs.filelist}} testBuild: name: Test build if: ${{github.event_name != 'workflow_dispatch' || inputs.testBuild}} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: get steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/demo_deploy.yml b/.github/workflows/demo_deploy.yml index 1a6aca0..fce5041 100644 --- a/.github/workflows/demo_deploy.yml +++ b/.github/workflows/demo_deploy.yml @@ -14,7 +14,7 @@ jobs: demo-deploy: continue-on-error: true name: Trigger test deployments - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Trigger Workflow in Another Repository run: | @@ -37,14 +37,6 @@ jobs: # github_token: ${{ secrets.PAT }} # workflow_file_name: deploy.yml # client_payload: '{"targets":"${GITHUB_REPOSITORY}:${HEAD}", "use-cache":"true"}' - - name: Wait for Workflow Action - run: | - curl -L \ - -X POST \ - -H "Accept: application/vnd.github+json" \ - -H "Authorization: Bearer ${{ secrets.PAT }}" \ - -H "X-GitHub-Api-Version: 2022-11-28" \ - https://api.github.com/repos/${TARGET_OWNER}/${TARGET_REPO}/actions/runs/deploy.yml - name: Wait for Workflow Action run: | # Just give a minute or so to deploy diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 4dc0803..1e22d9b 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,8 +1,8 @@ name: Deploy to gh-pages on: - push: + push: branches: [main] - workflow_dispatch: + workflow_dispatch: env: PYTHON_VERSION: 3.x GH_TOKEN: ${{ github.token }} @@ -14,7 +14,7 @@ permissions: jobs: documentation: name: Build documentation - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout repository uses: actions/checkout@v4 @@ -23,6 +23,7 @@ jobs: sparse-checkout: | docs overrides + .github - name: Set up Python runtime uses: actions/setup-python@v5 with: diff --git a/checks/README.md b/checks/README.md index 40408b5..29daf11 100644 --- a/checks/README.md +++ b/checks/README.md @@ -16,12 +16,6 @@ Spellcheck pipeline settings can be modified in [.spellcheck.yml](../.spellcheck List of custom words can be found in [dictionary.txt](../docs/assets/glossary/dictionary.txt), however you **should not edit this manually**, see [adding-words-to-dictionary](../docs/CONTRIBUTING.md#adding-words-to-dictionary). -This list is automatically updated daily in the case of docs.nesi, but not the other sites. -You should occasionally run -`wget -O docs/assets/glossary/dictionary.txt https://raw.githubusercontent.com/nesi/nesi-wordlist/main/outputs/dictionary.txt` -to keep it up to date. - - ### Limitations Spellchecker does not provide output lineumber / column. diff --git a/checks/fail_checks.md b/checks/fail_checks.md new file mode 100644 index 0000000..4eb0599 --- /dev/null +++ b/checks/fail_checks.md @@ -0,0 +1,70 @@ +--- +created_at: 2025-01-28 +template: not_a_template +--- + + + +This page is meant for testing the linting checks. For all checks to run properly this file should be moved under `docs`. + +It should trigger the fail threshold for `meta-checks`, `proselint-checks`, `spelling-checks`, `slurm-lint`, and `test-build` + +Add tests here as needed. + +Somee tpyos for spelchecker to triger fail neeeds to be at laest fiveteen keybored whopsers whiche is qiute a hihe tolerence i thinc. I woold loweir this threshehold bute for the momenent it can be a bitte overzaelos. + +``` as +typos shuold be igonred in code blokcs +``` + +!!! warning "some admonistion" + contents of admonition + +Typos should `be igonred` in inline code blocks. + +Typos should [be ignored](https://www.docs.nesi.org.nz) +Typos should [be ignored](../docs/General/FAQs/How_do_I_request_memory.md) + + + +Bad formatting for markdownlint + + * bullet points +- bullet points + +### Improper + +## Header nesting + + +```sl +!#/bin/bash + + +#SBATCH -j short-option +#SBATCH --not-a-real-flag=not_real + +module load something + +``` + +[bad link](../docs/General/not-a-page.md) + +[bad internal link](#impropers) + +Very very bad prose shit this is probably very unproper language sorry about that hopefully the chairman doesn't see lets circle back around. + +The following checks will only work if moved into a subdir of `docs/` + +https://www.nesi.org.nz/deadlink + + +{{broken_macro()}} + +{% include "not-real.md" -%} + + +{{broken_macro()}} + + +{% set app_name = page.no_a_variable | trim %} diff --git a/checks/run_meta_check.py b/checks/run_meta_check.py index e23151b..f6bfe5b 100755 --- a/checks/run_meta_check.py +++ b/checks/run_meta_check.py @@ -16,9 +16,11 @@ # Ignore files if they match this regex EXCLUDED_FROM_CHECKS = [r"docs/assets/.*", r".*/index\.html", r".*/index\.md", r".*\.pages\.yml"] +msg_count = {"debug": 0, "notice": 0, "warning": 0, "error": 0} + # Constants for use in checks. -MAX_TITLE_LENGTH = 24 # As font isn't monospace, this is only approx +MAX_TITLE_LENGTH = 28 # As font isn't monospace, this is only approx MAX_HEADER_LENGTH = 32 # minus 2 per extra header level MIN_TAGS = 2 RANGE_SIBLING = [4, 8] @@ -27,7 +29,7 @@ # Warning level for missing parameters. EXPECTED_PARAMETERS = { "title": "", - "template": ["main.html"], + "template": ["main.html", "supported_apps.html", "updates.html"], "description": "", "icon": "", "status": ["new", "deprecated"], @@ -36,10 +38,8 @@ "suggested": "", # Add info here when implimented. "created_at": "", "tags": "", # Add info here when implimented. - "vote_count": "", - "vote_sum": "", - "zendesk_article_id": "", - "zendesk_section_id": "", + "search": "", + "hide": ["toc", "nav", "tags"], } @@ -61,15 +61,13 @@ def main(): continue _nav_check() with open(input_path, "r") as f: - print(f"Checking meta for {f.name}") + _emit("", {"level": "debug", "file": input_path, "message": f"Checking meta for {f.name}"}) try: contents = f.read() match = re.match(r"---\n([\s\S]*?)---", contents, re.MULTILINE) if not match: - print( - f"::warning file={input_path},title=meta.parse,col=0,endColumn=99,line=1\ - ::Meta block missing or malformed." - ) + _emit("meta.parse", {"file": input_path, "col": 0, "endColumn": 99, "line": 1, + "message": "Meta block missing or malformed."}) meta = {} else: meta = yaml.safe_load(match.group(1)) @@ -94,14 +92,19 @@ def main(): for check in ENDCHECKS: _run_check(check) except Exception as e: - print(f"::error file={input_path},title=misc,col=0,endColumn=0,line=1 ::{e}") + _emit("misc", {"level": "error", "file": input_path, "message": e}) + def _run_check(f): for r in f(): - print(f"::{r.get('level', 'warning')} file={input_path},title={f.__name__},col={r.get('col', 0)},endColumn={r.get('endColumn', 99)},line={r.get('line', 1)}::{r.get('message', 'something wrong')}") - sys.stdout.flush() - time.sleep(0.01) + _emit(f.__name__, r) + +def _emit(f, r): + msg_count[r.get('level', 'warning')] += 1 + print(f"::{r.get('level', 'warning')} file={input_path},title={f},col={r.get('col', 0)},endColumn={r.get('endColumn', 99)},line={r.get('line', 1)}::{r.get('message', 'something wrong')}") + sys.stdout.flush() + time.sleep(0.01) def _title_from_filename(): @@ -146,7 +149,7 @@ def _unpack(toc, a): if not header_match: return - + header_level = len(header_match.group(1)) header_name = header_match.group(2) @@ -160,29 +163,29 @@ def _unpack(toc, a): _unpack(toc, toc_parents)["children"][header_name] = {"level": header_level, "lineno": lineno, "children": {}} toc_parents += [header_name] except Exception: - print(f"::error file={input_path},title=misc-nav,col=0,endColumn=0,line=1 ::Failed to parse Nav tree. Something is wrong.") + _emit("misc.nav", {"level": "error", "file": input_path, + "message": "Failed to parse Nav tree. Something is very wrong."}) def _nav_check(): - doc_root = Path(DOC_ROOT).resolve() - rel_path = input_path.resolve().relative_to(doc_root) - for i in range(1, len(rel_path.parts)): - num_siblings = 0 - for file_name in os.listdir(doc_root.joinpath(Path(*rel_path.parts[:i]))): - if not any(re.match(pattern, file_name) for pattern in EXCLUDED_FROM_CHECKS): - num_siblings += 1 - if num_siblings < RANGE_SIBLING[0]: - print( - f"::warning file={input_path},title=meta.siblings,col=0,endColumn=99,line=1::Parent category \ -'{rel_path.parts[i-1]}' has too few children ({num_siblings}). Try to nest '{RANGE_SIBLING[0]}' or more \ -items here to justify it's existence." - ) - elif num_siblings > RANGE_SIBLING[1]: - print( - f"::warning file={input_path},title=meta.siblings,col=0,endColumn=99,line=1::Parent category \ -'{rel_path.parts[i-1]}' has too many children ({num_siblings}). Try to keep number of items in a category \ -under '{RANGE_SIBLING[1]}', maybe add some new categories?" - ) + try: + doc_root = Path(DOC_ROOT).resolve() + rel_path = input_path.resolve().relative_to(doc_root) + for i in range(1, len(rel_path.parts)): + num_siblings = 0 + for file_name in os.listdir(doc_root.joinpath(Path(*rel_path.parts[:i]))): + if not any(re.match(pattern, file_name) for pattern in EXCLUDED_FROM_CHECKS): + num_siblings += 1 + if num_siblings < RANGE_SIBLING[0]: + _emit("meta.siblings", {"file": input_path, "message": f"Parent category \ + '{rel_path.parts[i-1]}' has too few children ({num_siblings}). Try to nest '{RANGE_SIBLING[0]}' or more \ + items here to justify it's existence."}) + elif num_siblings > RANGE_SIBLING[1]: + _emit("meta.siblings", {"file": input_path, "message": f"Parent category \ + '{rel_path.parts[i-1]}' has too many children ({num_siblings}). Try to keep number of items in a category \ + under '{RANGE_SIBLING[1]}', maybe add some new categories?"}) + except ValueError as e: + _emit("meta.nav", {"file": input_path, "level": "error", "message": f"{e}. Nav checks will be skipped"}) def title_redundant(): @@ -273,3 +276,7 @@ def _count_children(d): # FIXME terrible hack to make VSCode in codespace capture the error messages # see https://github.com/microsoft/vscode/issues/92868 as a tentative explanation + time.sleep(5) + + # Arbitrary weighting whether to fail check or not + exit((100 * (len(sys.argv)-1)) < msg_count["notice"] + (30 * msg_count["warning"] + (100 * msg_count["error"]))) diff --git a/checks/run_proselint.py b/checks/run_proselint.py index 36bcecf..56f8730 100755 --- a/checks/run_proselint.py +++ b/checks/run_proselint.py @@ -11,10 +11,14 @@ import proselint from proselint import config, tools +ALLOWABLE_NOTICES = 5 + if __name__ == "__main__": files = sys.argv[1:] + count_notices = 0 + # Load defaults from config. config_custom = tools.load_options( config_file_path=".proselint.json", conf_default=config.default @@ -23,7 +27,6 @@ for file in files: print(f"Running proselint on {file}") content = Path(file).read_text(encoding="utf8") - fails = proselint.tools.lint(content, config=config_custom) for notice in proselint.tools.lint(content, config=config_custom): print( f"::{notice[7]} file={file},line={notice[2]+1}," @@ -31,4 +34,7 @@ f"title={notice[0]}::'{notice[1]}'", flush=True ) + count_notices += 1 time.sleep(0.01) + + exit(count_notices >= ALLOWABLE_NOTICES*(len(sys.argv)-1)) diff --git a/checks/run_pyspelling.py b/checks/run_pyspelling.py index 3604423..ddb25ed 100755 --- a/checks/run_pyspelling.py +++ b/checks/run_pyspelling.py @@ -9,8 +9,12 @@ from pyspelling import spellcheck from flashtext import KeywordProcessor +ALLOWABLE_TYPOS = 20 + if __name__ == "__main__": + count_typos = 0 + for source in sys.argv[1:]: print(f"Running Pyspelling on {source}") @@ -39,7 +43,9 @@ f"title=spelling::Word '{word}' is misspelled.", flush=True, ) + count_typos += 1 # FIXME terrible hack to make VSCode in codespace capture the error messages # see https://github.com/microsoft/vscode/issues/92868 as a tentative explanation time.sleep(5) + exit(count_typos >= ALLOWABLE_TYPOS*(len(sys.argv)-1)) diff --git a/checks/run_slurm_lint.py b/checks/run_slurm_lint.py new file mode 100755 index 0000000..6048db3 --- /dev/null +++ b/checks/run_slurm_lint.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python3 + +""" +Runs checks on slurm scrips found in code and outputs in github action readable format +""" + +__author__ = "cal w" + +import re +import sys +import time +from pathlib import Path + +msg_count = {"debug": 0, "notice": 0, "warning": 0, "error": 0} + +LINES_AFTER_SHEBANG = 1 +LINES_AFTER_HEADER = 1 +SHEBANG = r"^#!\/bin\/bash -e\s*$" +SBATCH_HEADER = r"^(.*)#SBATCH\s*(-[^=\s]*)([\s=]*)(\S*.*?)(#.*)?$" +SBATCH_DELIM = r"=|\s+" + +REQUIRED_SBATCH_HEADER = [ + {"long": "--job-name", "short": "-j"}, + {"long": "--account", "short": "-A"}, + {"long": "--time", "short": "-t"}, +] + +# Not used yet +ALLOWED_SBATCH_HEADER = [ + {"long": "--cpus-per-task", "short": "-c"}, + {"long": "--array", "short": "-a"}, + {"long": "--dependency", "short": "-d"}, + {"long": "--gpus-per-node", "short": ""}, + {"long": "--hint", "short": ""}, + {"long": "--mem=", "short": ""} +] + + +def main(): + # Per file variables + global input_path, title_from_h1, title_from_filename, title, meta, contents, input_path + + # Walk variables + global lineno, line, in_code_block, last_header_level, last_header_lineno, sibling_headers + + global toc, toc_parents, header + + inputs = sys.argv[1:] + + for input_string in inputs: + + input_path = Path(input_string) + with open(input_path, "r") as f: + contents = f.read() + for lineno, indent, slurm in finditer2(r"\n( *)```\s*sl.*\n([\s\S\n]*?)\n\s*```", contents, re.MULTILINE): + #try: + parse_script(lineno+3, indent, slurm) + #except Exception as e: + # print(f"::ERROR file={input_path},title=failed_to_parse, col=0, endColumn=99, line={lineno}::Failed to parse slurm script {e}") + + +def parse_script(start_linno, indent, slurm): + global step, n_lines_after_shebang, n_lines_after_header, line, uses_equals_delim, uses_whitespace_delim + global match_header_line, start_of_header, slurm_headers + + uses_equals_delim = False + uses_whitespace_delim = False + + def _run_check(f): + for r in f(): + msg_count[r.get('level', 'warning')] += 1 + print(f"::{r.get('level', 'warning')} file={input_path},title={f.__name__}," + + f"col={r.get('col', 0) + indent},endColumn={r.get('endColumn', 99) + indent}," + + f"line={start_linno + r.get('line', lineno)}::{r.get('message', 'something wrong')}") + sys.stdout.flush() + time.sleep(0.01) + + n_lines_after_shebang = 0 + n_lines_after_header = 0 + slurm_headers = [] + + start_of_header = 0 + end_of_header = 0 + step = 0 # 0 : shebang + # 1 : slurm header + # 2 : bash + for lineno, line in enumerate(slurm.split("\n")): + line = line.removeprefix(' '*indent) + if lineno == 0: + _run_check(slurm_shebang) + continue + if step == 0: + if re.match(r"^\s*$", line): + n_lines_after_shebang += 1 + elif re.match(SBATCH_HEADER, line): + _run_check(lines_after_shebang) + step = 1 + start_of_header = lineno + else: + _run_check(content_before_slurm_header) + if step == 1: + if re.match(r"^\s*$", line): + n_lines_after_header += 1 + continue + match_header_line = re.match(SBATCH_HEADER, line) + slurm_headers.append(match_header_line) + if match_header_line: + for check in SBATCH_HEADER_WALK: + _run_check(check) + else: + step = 2 + end_of_header = line + for check in SBATCH_HEADER_ALL: + _run_check(check) + + # elif : + + +def finditer2(pattern, string, flags): + """ + A version of ``re.finditer`` that returns ``(match, line_number)`` pairs. + """ + matches = list(re.finditer(pattern, string, flags)) + if matches: + end = matches[-1].start() + # -1 so a failed `rfind` maps to the first line. + newline_table = {-1: 0} + for i, m in enumerate(re.finditer("\\n", string), 1): + # Don't find ncewlines past our last match. + offset = m.start() + if offset > end: + break + newline_table[offset] = i + + # Failing to find the newline is OK, -1 maps to 0. + for m in matches: + newline_offset = string.rfind("\n", 0, m.start()) + line_number = newline_table[newline_offset] + yield (line_number, len(m.group(1)), m.group(2)) + + +def slurm_shebang(): + if not re.match(SHEBANG, line): + yield {"message": f"Your shebang was '{line}', should use '{SHEBANG}'"} + + +def lines_after_shebang(): + if n_lines_after_shebang != LINES_AFTER_SHEBANG: + yield {"level": "notice", "message": f"There are {n_lines_after_shebang} blank lines after the shebang, should be {LINES_AFTER_SHEBANG}."} + + +def content_before_slurm_header(): + yield {"level": "error", "message": f"There is text ('{line}') between the shebang and slurm header. This is not a valid SLURM script."} + + +def malformed_delimiter(): + global uses_equals_delim, uses_whitespace_delim + delim = match_header_line.group(3) + if delim == "=": + uses_equals_delim = True + yield {"level": "notice", "message": "Whitespace is preffered SLURM header delimiter."} + elif delim.isspace(): + uses_whitespace_delim = True + else: + yield {"level": "error", "message": f"'{delim}' is not a valid SLURM header delimiter."} + + +def inconsistant_delimiter(): + if uses_equals_delim and uses_whitespace_delim: + yield {"message": "Header uses both whitespace and '=' delimiters.", "line": start_of_header} + + +def short_option(): + if not match_header_line.group(2)[:2] == "--": + yield {"level": "notice", "col": 8, "endColumn": 8 + len(match_header_line.group(2)), + "message": f"Using short form flag '{match_header_line.group(2)}'. Long form is prefered."} + + +def minimum_options(): + for header in REQUIRED_SBATCH_HEADER: + for h in slurm_headers: + if not h: + continue + a = h.group(2) + if a == header["long"] or a == header["short"]: + break + else: + yield {"message": f"Script header must contain \'{header['long']}\'.", + "line": start_of_header} + + +SBATCH_HEADER_WALK = [malformed_delimiter, short_option] +SBATCH_HEADER_ALL = [inconsistant_delimiter, minimum_options] + +if __name__ == "__main__": + main() + + # FIXME terrible hack to make VSCode in codespace capture the error messages + # see https://github.com/microsoft/vscode/issues/92868 as a tentative explanation + time.sleep(5) + + # Arbitrary weighting whether to fail check or not + exit(100*(len(sys.argv)-1) < msg_count["notice"] + (30 * msg_count["warning"] + (100 * msg_count["error"]))) diff --git a/checks/run_test_build.py b/checks/run_test_build.py index 0d962f0..15f0bba 100755 --- a/checks/run_test_build.py +++ b/checks/run_test_build.py @@ -13,6 +13,8 @@ This works but is a bit messy """ +msg_count = {"DEBUG": 0, "INFO": 0, "WARNING": 0, "ERROR": 0} + def parse_macro(record): @@ -28,7 +30,7 @@ def parse_macro(record): return False g = m.groupdict() - record.levelname = g["level"] + record.levelname = g["level"].strip().upper().split("\x1b")[0] record.name = g["title"] record.filename = g["file"] record.msg = g["message"] @@ -42,14 +44,27 @@ def parse_macro(record): return True +def count_msg(record): + msg_count[record.levelname] += 1 + + return True + + if __name__ == '__main__': log = logging.getLogger('root') log.setLevel(logging.INFO) sh = logging.StreamHandler(sys.stdout) sh.addFilter(parse_macro) + sh.addFilter(count_msg) sh.setFormatter(logging.Formatter( '::%(levelname)s file=%(filename)s,title=%(name)s,col=0,endColumn=0,line=%(lineno)s::%(message)s')) log.addHandler(sh) config = load_config(config_file_path="./mkdocs.yml") - build.build(config) + config.plugins.on_startup(command='build', dirty=True) + try: + build.build(config, dirty=True) + finally: + config.plugins.on_shutdown() + time.sleep(5) + exit(100 < msg_count["INFO"] + (30 * msg_count["WARNING"] + (100 * msg_count["ERROR"]))) From e019af42a7e82a00e4bb73141642f88b3f994592 Mon Sep 17 00:00:00 2001 From: cal Date: Mon, 17 Feb 2025 11:58:22 +1300 Subject: [PATCH 31/31] update vsocde stuff --- .vscode/includes.code-snippets | 10 ++++-- .vscode/launch.json | 43 ++++++++++++++++++++++++++ .vscode/settings.json | 19 +++++++++--- .vscode/tasks.json | 56 ++++++++++++++++++++++++++++++++-- 4 files changed, 118 insertions(+), 10 deletions(-) create mode 100644 .vscode/launch.json diff --git a/.vscode/includes.code-snippets b/.vscode/includes.code-snippets index 1399dd7..1070248 100644 --- a/.vscode/includes.code-snippets +++ b/.vscode/includes.code-snippets @@ -1,5 +1,5 @@ { - // Place your agdr-docs workspace snippets here. Each snippet is defined under a snippet name and has a scope, prefix, body and + // Place your support-docs workspace snippets here. Each snippet is defined under a snippet name and has a scope, prefix, body and // description. Add comma separated ids of the languages where the snippet is applicable in the scope field. If scope // is left empty or omitted, the snippet gets applied to all languages. The prefix is what is // used to trigger the snippet and the body will be expanded and inserted. Possible variables are: @@ -85,7 +85,9 @@ "created_at: $CURRENT_YEAR-$CURRENT_MONTH-$CURRENT_DATE", // "title: ${1:${TM_FILENAME/_/ /g}}}", "description: ${1}", - "tags: [${2}]", + "tags: ", + " - ${2}", + " - ${3}", "---", "", "${0}" @@ -98,7 +100,9 @@ "---", "created_at: $CURRENT_YEAR-$CURRENT_MONTH-$CURRENT_DATE", "description: ${1:Will be used to generate page preview. Should not contain keywords not in the body of article.}", - "tags: [${2:tag1},${3:tag2}]", + "tags:", + " - ${2:tag1}", + " - ${3:tag2}", "---", "", "${4:Short preface to article.}", diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..a8fdbff --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,43 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Debug Proselint", + "type": "debugpy", + "request": "launch", + "program": "checks/run_proselint.py", + "args": ["docs/fail_checks.md"], + "console": "integratedTerminal", + "justMyCode": false + }, + { + "name": "Debug Meta Check", + "type": "debugpy", + "request": "launch", + "program": "checks/run_meta_check.py", + "args": ["docs/fail_checks.md"], + "console": "integratedTerminal", + "justMyCode": true + }, + { + "name": "Debug Testbuild", + "type": "debugpy", + "request": "launch", + "program": "checks/run_test_build.py", + "console": "integratedTerminal", + "justMyCode": false + }, + { + "name": "Debug Slurm Lint", + "type": "debugpy", + "request": "launch", + "program": "checks/run_slurm_lint.py", + "args": ["docs/Scientific_Computing/Supported_Applications/ABAQUS.md"], + "console": "integratedTerminal", + "justMyCode": true + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json index d7cfff3..da7e92e 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -3,16 +3,24 @@ "triggerTaskOnSave.on": true, "triggerTaskOnSave.tasks": { "meta-checks": [ - "docs/**/*.md" + "docs/**/*.md", + "checks/fail_checks.md" ], "proselint-checks": [ - "docs/**/*.md" + "docs/**/*.md", + "checks/fail_checks.md" ], "spelling-checks": [ - "docs/**/*.md" + "docs/**/*.md", + "checks/fail_checks.md" + ], + "slurm-lint": [ + "docs/**/*.md", + "checks/fail_checks.md" ], "test-build": [ - "docs/**/*.md" + "docs/**/*.md", + "checks/fail_checks.md" ] }, "files.autoSave": "afterDelay", @@ -40,6 +48,9 @@ "redhat.telemetry.enabled": false, "githubPullRequests.pushBranch": "always", "git.useEditorAsCommitInput": false, + "yaml.schemas": { + "https://json.schemastore.org/github-workflow.json": "file:///workspaces/support-docs/.github/workflows/deploy.yml" + }, "markdown.copyFiles.destination": { "docs/**/*": "/docs/assets/images/" }, diff --git a/.vscode/tasks.json b/.vscode/tasks.json index ee48d21..784d2f0 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -12,7 +12,6 @@ "showReuseMessage": false }, "command": "./checks/run_meta_check.py", - "problemMatcher": { "severity": "info", "pattern": { @@ -48,7 +47,6 @@ "column": 4, "endLine": 5, "message": 7, - "loop": true }, "owner": "proselint", "fileLocation": "autoDetect", @@ -95,7 +93,7 @@ "column": 4, "endColumn": 5, "line": 6, - "message": 7, + "message": 7 }, "owner": "test-build", "fileLocation": [ @@ -104,6 +102,58 @@ ], }, }, + { + "label": "slurm-lint", + "args": [ + "${file}" + ], + "presentation": { + "reveal":"never", + "showReuseMessage": false + }, + "command": "./checks/run_slurm_lint.py", + "problemMatcher": { + "severity": "info", + "pattern": { + "regexp": "^::(notice|warning|error) file=(.*?),title=(.*?),col=([0-9]*),endColumn=([0-9]*),line=([0-9]*)::(.*)$", + "severity": 1, + "file": 2, + "code": 3, + "column": 4, + "endColumn": 5, + "line": 6, + "message": 7, + }, + "owner": "slurm-lint", + "fileLocation": "autoDetect", + }, + }, + { + "label": "fetch", + "group": "none", + "type": "shell", + "isBackground": false, + "presentation": { + "reveal":"never", + "showReuseMessage": false + }, + "runOptions": { + "runOn": "folderOpen" + }, + "command": "bash .github/fetch_includes.sh", + "problemMatcher": { + "severity": "info", + "pattern": { + "regexp": "^::(notice|warning|error) file=(.*?),title=(.*?)::(.*)$", + "severity": 1, + "file": 2, + "code": 3, + "message": 4 + }, + "owner": "fetch", + "fileLocation": "autoDetect", + }, + }, { "label": "serve", "group": "build",