Skip to content

Commit

Permalink
scripts: validate-autoinstall-user-data refresh
Browse files Browse the repository at this point in the history
./scripts/validate-autoinstall-user-data is used by the integration
tests to verify the written user data validates against the combined
JSON schema, but we have introduced run-time checks for more things
than can be caught by simple JSON validation (e.g. warns/errors on
unknown keys or strict top-level key checking for supporting a
top-level "autoinstall" keyword in the non-cloud-config delivery
scenario). This changes the validation script to rely on the logic
from the server directly to perform pre-validation of the the
supplied autoinstall configuration.

Additionally, this adds an argparser to make it more user-friendly.
Now we can advertise this script as something for users to pre-validate
their autoinstall configurations.
  • Loading branch information
Chris-Peterson444 committed May 3, 2024
1 parent 396e4d5 commit 99e6eb3
Show file tree
Hide file tree
Showing 2 changed files with 191 additions and 44 deletions.
2 changes: 1 addition & 1 deletion scripts/runtests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ validate () {
answers-core)
;;
*)
python3 scripts/validate-autoinstall-user-data.py < $tmpdir/var/log/installer/autoinstall-user-data
python3 scripts/validate-autoinstall-user-data.py --check-link < $tmpdir/var/log/installer/autoinstall-user-data
# After the lunar release and the introduction of mirror testing, it
# came to our attention that new Ubuntu installations have the security
# repository configured with the primary mirror URL (i.e.,
Expand Down
233 changes: 190 additions & 43 deletions scripts/validate-autoinstall-user-data.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,63 +27,210 @@
"""

import argparse
import asyncio
import io
import json
import sys
import tempfile
import traceback
from argparse import Namespace
from pathlib import Path
from textwrap import dedent
from typing import Any

import jsonschema
import yaml


def main() -> None:
""" Entry point. """
parser = argparse.ArgumentParser()

parser.add_argument("--json-schema",
help="Path to the JSON schema",
type=argparse.FileType("r"),
default="autoinstall-schema.json")
parser.add_argument("input", nargs="?",
help="Path to the user data instead of stdin",
type=argparse.FileType("r"),
default="-")
parser.add_argument("--no-expect-cloudconfig", dest="expect-cloudconfig",
action="store_false",
help="Assume the data is not wrapped in cloud-config.",
default=True)

args = vars(parser.parse_args())

user_data: io.TextIOWrapper = args["input"]

if args["expect-cloudconfig"]:
assert user_data.readline() == "#cloud-config\n"
def get_autoinstall_data(data): return data["autoinstall"]
# Python path trickery so we can import subiquity code and still call this
# script without using the makefile
scripts_dir = sys.path[0]
subiquity_root = Path(scripts_dir) / ".."
curtin_root = subiquity_root / "curtin"
probert_root = subiquity_root / "probert"
# At the very least, local curtin needs to be in the front of the python path
sys.path.insert(0, str(subiquity_root))
sys.path.insert(1, str(curtin_root))
sys.path.insert(2, str(probert_root))

from subiquity.cmd.server import make_server_args_parser # noqa: E402
from subiquity.server.dryrun import DRConfig # noqa: E402
from subiquity.server.server import SubiquityServer # noqa: E402


def parse_args() -> Namespace:
"""Parse arguments with argparse"""

description: str = dedent(
"""\
Validate autoinstall user data against the autoinstall schema. By default
expects the user data is wrapped in a cloud-config. Example:
#cloud-config
autoinstall:
<user data here>
To validate the user data directly, you can pass --no-expect-cloudconfig
"""
)

parser = argparse.ArgumentParser(
prog="validate-autoinstall-user-data",
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter,
)

parser.add_argument(
"input",
help="Path to the autoinstall configuration instead of stdin",
nargs="?",
type=argparse.FileType("r"),
default="-",
)
parser.add_argument(
"--no-expect-cloudconfig",
dest="expect_cloudconfig",
action="store_false",
help="Assume the data is not wrapped in cloud-config.",
default=True,
)
parser.add_argument(
"-v",
"--verbosity",
action="count",
help=(
"Increase output verbosity. Use -v for more info, -vv for "
"detailed output, and -vvv for fully detailed output."
),
default=0,
)
# An option we use in CI to make sure Subiquity will insert a link to
# the documentation in the auto-generated autoinstall file post-install
parser.add_argument(
"--check-link",
dest="check_link",
action="store_true",
help=argparse.SUPPRESS,
default=False,
)

args: Namespace = parser.parse_args()

return args


def make_app():
parser = make_server_args_parser()
opts, unknown = parser.parse_known_args(["--dry-run"])
app = SubiquityServer(opts, "")
# This is needed because the ubuntu-pro server controller accesses dr_cfg
# in the initializer.
app.dr_cfg = DRConfig()
app.base_model = app.make_model()
app.controllers.load_all()
return app


def parse_cloud_config(data: str) -> dict[str, Any]:
"""Parse cloud-config and extract autoinstall"""

first_line: str = data.splitlines()[0]
if not first_line == "#cloud-config":
raise AssertionError(
(
"Expected data to be wrapped in cloud-config "
"but first line is not '#cloud-config'. Try "
"passing --no-expect-cloudconfig."
)
)

cc_data: dict[str, Any] = yaml.safe_load(data)

if "autoinstall" not in cc_data:
raise AssertionError(
(
"Expected data to be wrapped in cloud-config "
"but could not find top level 'autoinstall' "
"key."
)
)
else:
def get_autoinstall_data(data):
try:
cfg = data["autoinstall"]
except KeyError:
cfg = data
return cfg
return cc_data["autoinstall"]


# Verify autoinstall doc link is in the file
async def verify_autoinstall(cfg_path: str, verbosity: int = 0) -> int:
"""Verify autoinstall configuration.
Returns 0 if succesfully validated.
Returns 1 if fails to validate.
"""

# Make a dry-run server
app = make_app()

# Supress start and finish events unless verbosity >=2
if verbosity < 2:
for el in app.event_listeners:
el.report_start_event = lambda x, y: None
el.report_finish_event = lambda x, y, z: None
# Suppress info events unless verbosity >=1
if verbosity < 1:
for el in app.event_listeners:
el.report_info_event = lambda x, y: None

# Tell the server where to load the autoinstall
app.autoinstall = cfg_path
# Make sure events are printed (we could fail during read, which
# would happen before we setup the reporting controller)
app.controllers.Reporting.config = {"builtin": {"type": "print"}}
app.controllers.Reporting.start()
# Do both validation phases
try:
app.load_autoinstall_config(only_early=True, context=None)
app.load_autoinstall_config(only_early=False, context=None)
except Exception as exc:

print(exc) # Has the useful error message

# Print the full traceback if verbosity >=2
if verbosity > 2:
traceback.print_exception(exc)

stream_pos: int = user_data.tell()
print("Failure: The provided autoinstall config did not validate succesfully")
return 1

data: str = user_data.read()
print("Success: The provided autoinstall config validated succesfully")
return 0

link: str = "https://canonical-subiquity.readthedocs-hosted.com/en/latest/reference/autoinstall-reference.html" # noqa: E501

assert link in data
def main() -> None:
"""Entry point."""

args: Namespace = parse_args()

user_data: io.TextIOWrapper = args.input
str_data: str = user_data.read()

# Verify autoinstall doc link is in the file
if args.check_link:
link: str = (
"https://canonical-subiquity.readthedocs-hosted.com/en/latest/reference/autoinstall-reference.html" # noqa: E501
)

if link not in str_data:
raise AssertionError("Documentation link missing from user data")

# Parse out the autoinstall if expected within cloud-config
if args.expect_cloudconfig:
ai_data: str = yaml.dump(parse_cloud_config(str_data))
else:
ai_data = str_data

# Verify autoinstall schema
user_data.seek(stream_pos)
with tempfile.TemporaryDirectory() as td:
path = Path(td) / "autoinstall.yaml"
with open(path, "w") as tf:
tf.write(ai_data)

data = yaml.safe_load(user_data)
ret_code = asyncio.run(verify_autoinstall(path, verbosity=args.verbosity))

jsonschema.validate(get_autoinstall_data(data),
json.load(args["json_schema"]))
sys.exit(ret_code)


if __name__ == "__main__":
Expand Down

0 comments on commit 99e6eb3

Please sign in to comment.