-
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathJustfile
1150 lines (919 loc) · 42.3 KB
/
Justfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#######################
# Goal of this file:
#
# * No bin/ scripts scattered around the repo
# * All development, CI, and production scripts in one place.
# * No complicated scripts in CI. Include scripts here, run them on GH actions.
# * No hidden magical scripts on developers machines without a place to go
# * All ENV variables should be handled via direnv and not configured here
# * Any python code which is not completely independent of the project, should be in app/ so
# refactoring tools can rename all symbols automatically.
# * Make CI more portable. By including as much logic as possible within the Justfile you can
# easily move to a different CI system if you need to.
# * Be greedy about new scripts that help optimize the devloop environment. Just autocomplete + fzf makes it easy to
# and sort through really long lists of recipes.
# * Scripts marked as `[macos]` should only run on dev machines. By default, this setup does not support non-macos
# dev machines.
#
#######################
# _ is currently being used a recipe namespace char, use `-` to separate words
# TODO this will be improved later on: https://github.com/casey/just/issues/2442
# `pipefail` is important: without this option, a shell script can easily hide an error in a way that is hard to debug
# this will cause some extra frustration when developing scripts initially, but will make working with them more
# intuitive and less error prone over time.
# zsh is the default shell under macos, let's mirror it
set shell := ["zsh", "-cu", "-o", "pipefail"]
# TODO v (cmd tracing) by default for [script]? created weird terminal clearing behavior
# TODO can we force tracing and a custom PS4 prompt? Would be good to understand how Just handles echoing commands
# set script-interpreter := ["zsh", "-euvBh"]
# determines what shell to use for [script]
set script-interpreter := ["zsh", "-euBh", "-o", "pipefail"]
# avoid seeing comments in the output
set ignore-comments := true
# for [script] support
set unstable := true
# used for image name, op vault access, etc
PROJECT_NAME := "python-starter-template"
# execute a command in the (nearly) exact same environment as CI
EXECUTE_IN_TEST := "CI=true direnv exec ."
# the `exec` magic is to ensure `sys.stdout.isatty()` reports as false, which can change pytest plugin functionality
# exec 1> >(cat)
default:
just --list
lint: js_lint py_lint db_lint
# watches all important python files and automatically restarts the process if anything changes
PYTHON_WATCHMEDO := "uv run --with watchdog watchmedo auto-restart --directory=./ --pattern=*.py --recursive --"
# start all of the services you need for development in a single terminal
[macos]
[script]
dev: local-alias dev_kill
just _banner_echo "Starting dev services"
# TODO we should think about the worker command a bit more...should we use the same exact command? should we generate vs hardcode?
# create a tmp Procfile with all of the dev services we need running
cat << 'EOF' > tmp/Procfile.dev
py_dev: just py_dev
py_worker: {{PYTHON_WATCHMEDO}} $(yq '.worker' Procfile --output-format yaml)
py_scheduler: {{PYTHON_WATCHMEDO}} $(yq '.scheduler' Procfile --output-format yaml)
js_dev: just js_dev
openapi: just js_generate-openapi --watch
EOF
# foreman is abandonded, but it still works
# hivemind does not ignore terminal clear control sequences
# ultraman looks to have some obvious bugs
foreman start --root . --procfile=tmp/Procfile.dev
# kill all processes bound to server ports
[macos]
[script]
dev_kill:
just _banner_echo "Killing all processes bound to server ports"
for port in "$JAVASCRIPT_SERVER_PORT" "$PYTHON_SERVER_PORT" "$PYTHON_TEST_SERVER_PORT"; do
echo "Checking for processes on port $port"
pids=("${(@f)$(lsof -t -i :${port} 2>/dev/null || true)}")
if [[ -n "$pids" ]]; then
for pid in $pids; do
kill -9 "$pid"
echo "Killed process $pid on port $port"
done
else
echo "No processes found on port $port"
fi
done
#######################
# Utilities
#######################
# build commands generate a lot of output and when `[script]` is used no commands are echo'd, this lets us make build
# output easier to read in CI.
@_banner_echo BANNER:
# TODO use style tags from justfile
# two spaces added because of the '# ' prefix on the banner message
banner_length=$(echo -n "{{BANNER}} " | wc -c) && \
printf "\n\033[0;36m%${banner_length}s#\033[0m\n" | tr " " "#" && \
printf "\033[0;36m# %s \033[0m\n" "{{BANNER}}" && \
printf "\033[0;36m%${banner_length}s#\033[0m\n\n" | tr " " "#"
#######################
# Setup
#######################
# NOTE nixpacks is installed during the deployment step and not as a development prerequisite
BREW_PACKAGES := "fd entr 1password-cli yq jq"
EXTRA_BREW_PACKAGES := "lefthook peterldowns/tap/localias foreman"
[macos]
[script]
_brew_check_and_install brew_target:
if ! brew list {{brew_target}} > /dev/null; then
echo "{{brew_target}} is not installed. Installing..."
brew install {{brew_target}}
fi
# include all development requirements not handled by `mise` for local development
[macos]
[doc("--extras to install non-essential productivity tooling")]
requirements *flags:
@if ! which mise > /dev/null; then \
echo "mise is not installed. Please install."; \
echo " => https://mise.jdx.dev"; \
exit 1; \
fi
@if ! which docker > /dev/null; then \
echo "docker is not installed. Please install."; \
exit 1; \
fi
@for brew_package in {{BREW_PACKAGES}}; do \
just _brew_check_and_install $brew_package; \
done
@if [[ "{{flags}}" =~ "--extras" ]]; then \
echo "Adding aiautocommit..."; \
uv tool add aiautocommit; \
\
echo "Removing sample git hooks..."; \
rm .git/hooks/*.sample || true; \
\
echo "Installing git hooks..."; \
lefthook install; \
\
for brew_package in {{EXTRA_BREW_PACKAGES}}; do \
just _brew_check_and_install $brew_package; \
done; \
\
if ! which commitlint > /dev/null; then \
if ! cargo --list | grep -q binstall; then \
echo "cargo binstall not available, skipping commitlint installation"; \
else \
cargo binstall -y commitlint-rs; \
fi; \
fi; \
fi
# setup everything you need for local development
[macos]
setup: requirements && py_setup db_seed js_build
# NOTE this task should be non-destructive, the user should opt-in to something like `nuke`
# some reasoning behind the logic here:
#
# - js_build is required for running e2e tests on the server
@if [ ! -f .env.dev.local ]; then \
cp .env.dev.local-example .env.dev.local; \
echo "Please edit .env.dev.local to your liking."; \
fi
@if [ ! -f .env.local ]; then \
cp .env.local-example .env.local; \
echo "Please edit .env.local to your liking."; \
fi
@echo 'If you are using localais, run `just local-alias` to start the daemon'
# TODO extract to my personal dotfiles as well
# TODO should change the CURRENT_BASE for py and other x.x.y upgrades
[script]
[macos]
_mise_upgrade:
# Get current tools and versions from local .tool-versions only
TOOLS=("${(@f)$(mise list --current --json | jq -r --arg PWD "$PWD" 'to_entries | map(select(.value[0].source.path == $PWD + "/.tool-versions")) | from_entries | keys[]')}")
for TOOL in $TOOLS; do
# Get current version
CURRENT=$(mise list --current --json | jq -r --arg TOOL "$TOOL" --arg PWD "$PWD" 'to_entries | map(select(.value[0].source.path == $PWD + "/.tool-versions")) | from_entries | .[$TOOL][0].version')
echo "Current version of $TOOL: $CURRENT"
if [[ "$TOOL" == "node" || "$TOOL" == "python" ]]; then
# Extract major.minor version
CURRENT_BASE=$(echo "$CURRENT" | cut -d. -f1,2)
echo "Current base version of $TOOL: $CURRENT_BASE"
# Get latest version matching current major.minor
LATEST=$(mise ls-remote "$TOOL" | grep -E "^${CURRENT_BASE}\.[0-9]+$" | sort -V | tail -n1)
else
# Extract major version
CURRENT_BASE=$(echo "$CURRENT" | cut -d. -f1)
echo "Current base version of $TOOL: $CURRENT_BASE"
# Get latest version matching current major version
LATEST=$(mise ls-remote "$TOOL" | grep -E "^${CURRENT_BASE}\.[0-9.]+$" | sort -V | tail -n1)
fi
if [[ -n $LATEST && $CURRENT != $LATEST ]]; then
sed -i '' "s/^$TOOL .*/$TOOL $LATEST/" .tool-versions
echo "Updated $TOOL: $CURRENT -> $LATEST"
fi
done
# TODO https://discord.com/channels/1066429325269794907/1314301006992900117/1316773799688933406
mise install
just _mise_version_sync
git add .tool-versions
# sync the mise version to github actions yaml
[macos]
_mise_version_sync:
mise_version=$(mise --version | awk '{print $1}') && \
yq e '.runs.steps.0.with.version = "'$mise_version'"' .github/actions/common-setup/action.yml -i
git add .github/actions/common-setup/action.yml
# upgrade mise, language versions, and essential packages
[macos]
tooling_upgrade: && _mise_upgrade _js_sync-engine-versions
mise self-update
HOMEBREW_NO_AUTO_UPDATE=1 brew upgrade {{BREW_PACKAGES}}
# upgrade everything: all packages on all languages, tooling, etc
[macos]
upgrade: tooling_upgrade js_upgrade py_upgrade
uv run python -m app.cli write-versions
# run (or reload) daemon to setup local development aliases
[macos]
[script]
local-alias:
if [[ "$(localias status)" == "daemon running with pid "* ]]; then
just _banner_echo "Localias Daemon Already Running, Reloading"
localias reload
exit 0
fi
localias start
just _banner_echo "Local Alias Configuration"
localias debug config --print
clean: js_clean py_clean build_clean
rm -rf tmp/* || true
rm -rf .git/hooks/* || true
# destroy and rebuild py, js, db, etc
nuke: js_nuke py_nuke db_nuke
#######################
# Javascript
#######################
WEB_DIR := "web"
_pnpm := "cd " + WEB_DIR + " && pnpm ${PNPM_GLOBAL_FLAGS:-}"
# pnpm alias
pnpm +PNPM_CMD:
{{_pnpm}} {{PNPM_CMD}}
js_setup:
# frozen-lockfile is used on CI and when building for production, so we default so that mode
{{_pnpm}} install --frozen-lockfile
# TODO do we actually need this? Or will RR do this for us when building a preview + build?
{{_pnpm}} react-router typegen
js_clean:
rm -rf {{WEB_DIR}}/build {{WEB_DIR}}/client {{WEB_DIR}}/node_modules {{WEB_DIR}}/.react-router || true
# clean and rebuild
js_nuke: js_clean js_setup
js_lint +FILES=".":
# TODO support GITHUB_ACTIONS/CI formatting
{{_pnpm}} prettier --check {{FILES}}
# `eslint-config-typescript` seems dead
{{_pnpm}} eslint --cache --cache-location ./node_modules/.cache/eslint {{FILES}}
{{_pnpm}} dlx depcheck
# automatically fix linting errors
js_lint-fix:
{{_pnpm}} prettier --write .
{{_pnpm}} eslint --cache --cache-location ./node_modules/.cache/eslint . --fix
# run tests in the exact same environment that will be used on CI
js_test:
# NOTE vitest automatically will detect GITHUB_ACTIONS and change the output format
# CI=true impacts how various JS tooling run
if [[ -n "${CI:-}" ]]; then \
{{_pnpm}} run test; \
else \
cd {{WEB_DIR}} && {{EXECUTE_IN_TEST}} pnpm run test; \
fi
# run a development server
js_dev:
[[ -d {{WEB_DIR}}/node_modules ]] || just js_setup
{{_pnpm}} run dev
# build a production javascript bundle, helpful for running e2e python tests
js_build: js_setup
# NOTE this is *slightly* different than the production build: NODE_ENV != production and the ENV variables are different
# this can cause build errors to occur via nixpacks, but not here.
#
# If you want to replicate a production environment, run: `just js_clean && export NODE_ENV=production && just js_build`
# to test building in an environment much closer to production. Node and pnpm versions can still be *slightly* different
# than your local environment since `mise` is not used within nixpacks.
# as you'd expect, the `web/build` directory is wiped on each run, so we don't need to clear it manually
export VITE_BUILD_COMMIT="{{GIT_SHA}}" && {{_pnpm}} run build
# interactive repl for testing ts
js_play:
# TODO this needs some work
{{_pnpm}} dlx tsx ./playground.ts
# interactively upgrade all js packages
js_upgrade:
{{_pnpm}} dlx npm-check-updates --interactive
# intentionally without lockfile so it's updated
{{_pnpm}} install
cd {{WEB_DIR}} && git add package.json pnpm-lock.yaml
# generate a typescript client from the openapi spec
[doc("Optional flag: --watch")]
js_generate-openapi *flag:
if {{ if flag == "--watch" { "true" } else { "false" } }}; then; \
fd --extension=py . | entr just _js_generate-openapi; \
else; \
just _js_generate-openapi; \
fi
_js_generate-openapi:
# jq is here to pretty print the output
LOG_LEVEL=error uv run python -m app.cli dump-openapi | jq -r . > "$OPENAPI_JSON_PATH"
# generate the js client with the latest openapi spec
{{_pnpm}} run openapi
# generated route types can dependend on the openapi spec, so we need to regenerate it
{{_pnpm}} exec react-router typegen
# TODO watch js files
# react-router typegen
# safe-routes typegen
# full build for py e2e tests
# run shadcn commands with the latest library version
js_shadcn *arguments:
{{_pnpm}} dlx shadcn@latest {{arguments}}
js_shadcn_upgrade:
just js_shadcn diff
JAVASCRIPT_PACKAGE_JSON := WEB_DIR / "package.json"
# update package.json engines to match the current versions in .tool-versions
[macos]
[script]
_js_sync-engine-versions:
NODE_VERSION=$(mise list --current --json | jq -r ".node[0].version")
PNPM_VERSION=$(pnpm -v)
# jq does not have edit in place
# https://stackoverflow.com/questions/36565295/jq-to-replace-text-directly-on-file-like-sed-i
tmp_package=$(mktemp)
# >= vs ^ or ~ can cause weird compatibility issues such as:
# https://community.render.com/t/issue-with-deploy/26570/7
# Always take a conservative approach with javascript system versions.
jq "
. + {
engines: {
node: \"^$NODE_VERSION\",
pnpm: \"^$PNPM_VERSION\"
}
}" "{{JAVASCRIPT_PACKAGE_JSON}}" > "$tmp_package"
mv "$tmp_package" "{{JAVASCRIPT_PACKAGE_JSON}}"
#######################
# Python
#######################
# this is used for jinja + HTML linting, if you put templates elsewhere, you'll need to update this
JINJA_TEMPLATE_DIR := "app/templates"
# create venv and install packages
py_setup:
[ -d ".venv" ] || uv venv
# don't include debugging-extras on CI
# --no-sources to allow local dev packages to be used: https://github.com/astral-sh/uv/issues/9258#issuecomment-2499541207
if [ -z "${CI:-}" ]; then \
uv sync --group=debugging-extras; \
else \
uv sync --no-sources; \
fi
# important for CI to install browsers for playwright
# the installation process is fast enough (<10s) to eliminate the need for attempting to cache via GHA
# if this turns out not to be true, we should implement: https://github.com/hirasso/thumbhash-custom-element/blob/main/.github/workflows/tests.yml
if [ -z "${CI:-}" ]; then \
uv run playwright install chromium; \
else \
uv run playwright install chromium --only-shell; \
fi
# clean entire py project without rebuilding
py_clean:
# pycache should never appear because of PYTHON* vars
rm -rf .pytest_cache .ruff_cache .venv celerybeat-schedule || true
rm -rf tests/**/snapshot_tests_failures || true
# rm -rf $PLAYWRIGHT_BROWSERS_PATH
# TODO should remove pnpm global cache: pnpm store path
# rm -rf $(pnpm store path)
# rebuild the venv from scratch
py_nuke: py_clean && py_setup
# reload will recreate the venv and reset VIRTUAL_ENV and friends
direnv reload
py_upgrade:
# https://github.com/astral-sh/uv/issues/6794
uv sync -U --group=debugging-extras
uv tool upgrade --all
git add pyproject.toml uv.lock
# open up a development server
py_dev:
PORT=$PYTHON_SERVER_PORT uv run python main.py
py_play:
./playground.py
# TODO should have additional tool for workers and all server processes
# run all linting operations and fail if any fail
[script]
py_lint +FILES=".":
# + indicates one more arguments being required in Justfile syntax
# NOTE this is important: we want all operations to run instead of fail fast
set +e
# TODO we should either abstract this out or remove it...
# Define a more detailed colored PS4 without current directory so -x output is easier to read
setopt prompt_subst
export PS4='%F{green}+%f '
set -x
if [ -n "${CI:-}" ]; then
# TODO I'm surprised that ruff doesn't auto detect github... need to double check on this
uv tool run ruff check --output-format=github {{FILES}} || exit_code=$?
uv tool run ruff format --check {{FILES}} || exit_code=$?
uv run pyright {{FILES}} --outputjson > pyright_report.json || exit_code=$?
# TODO this is a neat trick, we should use it in other places too + document
# https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions#setting-a-warning-message
# https://github.com/jakebailey/pyright-action/blob/b7d7f8e5e5f195796c6f3f0b471a761a115d3b2c/src/main.ts#L62
jq -r '.generalDiagnostics[] | "::\(.severity) file=\(.file),line=\(.range.start.line),endLine=\(.range.end.line),col=\(.range.start.character),endColumn=\(.range.end.character)::\(.message)"' < pyright_report.json
rm pyright_report.json
# check jinja2 template language
uv run j2lint --extension j2,html {{JINJA_TEMPLATE_DIR}} --json > j2link_report.json || exit_code=$?
jq -r '(.ERRORS[] | "::\(if .severity == "HIGH" then "error" else "warning" end) file=\(.filename),line=\(.line_number),title=\(.id)::\(.message)"), (.WARNINGS[] | "::warning file=\(.filename),line=\(.line_number),title=\(.id)::\(.message)")' < j2link_report.json
rm j2link_report.json
else
uv tool run ruff check {{FILES}} || exit_code=$?
uv run pyright {{FILES}} || exit_code=$?
uv run j2lint --extension j2,html {{JINJA_TEMPLATE_DIR}}
fi
# TODO should only run if {{FILES}} contains a template
# NOTE djlint does *not* check jinja syntax, only HTML. GH friendly output is automatically enabled.
uv run djlint {{JINJA_TEMPLATE_DIR}} --profile=jinja
# TODO right now, this tool doesn't work with manual maps :/
# TODO https://github.com/fpgmaas/deptry/issues/610#issue-2190147786
# TODO https://github.com/fpgmaas/deptry/issues/740
# uv tool run deptry --experimental-namespace-package . || exit_code=$?
if [[ -n "${exit_code:-}" ]]; then
echo "One or more commands failed"
exit 1
fi
# automatically fix linting errors
py_lint_fix:
uv tool run ruff check . --fix
uv tool run ruff format
uv run djlint --profile=jinja --reformat {{JINJA_TEMPLATE_DIR}}
# NOTE pyright and other linters do not have an automatic fix flow
# build js for py e2e tests
py_js-build:
# integration tests should mimic production as closely as possible
# to do this, we build the app and serve it like it will be served in production
export PNPM_GLOBAL_FLAGS="--silent" && {{EXECUTE_IN_TEST}} just js_build
PYTEST_COV_PARAMS := "--cov --cov-report=html:${TEST_RESULTS_DIRECTORY}/htmlcov --cov-report=term"
# run tests with the exact same environment that will be used on CI
[script]
py_test:
# Define a more detailed colored PS4 without current directory so -x output is easier to read
setopt prompt_subst
export PS4='%F{green}+%f '
set -x
just _banner_echo "Building Javascript for Integration Tests"
just py_js-build
# TODO we don't need to see all of the details for this part of the build, since we are primarily testing javascript
# TODO I wonder if I could make EXECUTE_IN_TEST blank if in the test environment...
# NOTE unfortunately, because of the asyncio loop + playwright, we need to run the playwright integration tests separately
if [[ -n "${CI:-}" ]]; then
just _banner_echo "Running Non-Integration Tests"
uv run pytest . --ignore tests/integration {{PYTEST_COV_PARAMS}}
just _banner_echo "Running Integration"
uv run pytest tests/integration --cov-append {{PYTEST_COV_PARAMS}}
else
{{EXECUTE_IN_TEST}} uv run pytest . --ignore tests/integration {{PYTEST_COV_PARAMS}}
{{EXECUTE_IN_TEST}} uv run pytest tests/integration --cov-append {{PYTEST_COV_PARAMS}}
fi
# open playwright trace viewer on last trace zip. --remote to download last failed remote trace
[macos]
py_playwright_trace remote="":
mkdir -p ${PLAYWRIGHT_RESULT_DIRECTORY}
# helpful to download to unique folder for two reasons: (a) easier to match up to web GHA view and (b) eliminates risk of gh-cli erroring out bc the directory already exists
if [ "{{remote}}" = "--remote" ]; then \
failed_run_id=$(just _gha_last_failed_run_id) && \
mkdir -p ${PLAYWRIGHT_RESULT_DIRECTORY}/${failed_run_id} && \
gh run --dir ${PLAYWRIGHT_RESULT_DIRECTORY}/${failed_run_id} download $failed_run_id; \
fi
# NOTE it's insane, but fd does not have a "find last modified file"
# https://github.com/sharkdp/fd/issues/196
uv run playwright show-trace $(fd --no-ignore-vcs . ${PLAYWRIGHT_RESULT_DIRECTORY} -e zip -t f --exec-batch stat -f '%m %N' | sort -n | tail -1 | cut -f2- -d" ")
# record playwright interactions for integration tests and dump them to a file
[macos]
[script]
py_playwright-record:
mkdir -p tmp/playwright
recorded_interaction=tmp/playwright/$(date +%m-%d-%s).py
uv run playwright codegen \
--target python-pytest \
--output $recorded_interaction \
https://${JAVASCRIPT_SERVER_HOST}
echo $recorded_interaction
pbcopy < $recorded_interaction
# open mailpit web ui, helpful for inspecting emails
py_mailpit_open:
open "https://$(echo $SMTP_URL | cut -d'/' -f3 | cut -d':' -f1)"
#######################
# CI Management
#######################
GHA_YML_NAME := "build_and_publish.yml"
# TODO should scope to the current users runs
# rerun last failed CI run
ci_rerun:
gh run rerun $(just _gha_last_failed_run_id)
# view the last failed gha in the browser
ci_view-last-failed:
gh run view --web $(just _gha_last_failed_run_id)
# TODO output here is still messy, may be able to customize with --template
# tail failed logs right in your terminal
ci_tail-last-failed:
gh run view --log-failed $(just _gha_last_failed_run_id)
# live tail currently running ci job
ci_watch-running *flag:
if {{ if flag == "--web" { "true" } else { "false" } }}; then \
gh run view --web $(just _gha_running_run_id); \
else \
gh run watch $(just _gha_running_run_id); \
fi
# very destructive action: deletes all workflow run logs
[confirm('Are you sure you want to delete all workflow logs?')]
ci_wipe_run_logs:
REPO=$(gh repo view --json name --jq '.name') && \
OWNER=$(gh repo view --json owner --jq '.owner.login') && \
gh api repos/$OWNER/$REPO/actions/workflows --paginate --jq '.workflows[] | .id' | \
xargs -I{} gh api repos/$OWNER/$REPO/actions/workflows/{}/runs --paginate --jq '.workflow_runs[].id' | \
xargs -I{} gh api -X DELETE /repos/$OWNER/$REPO/actions/runs/{}
# get the last failed run ID
_gha_last_failed_run_id:
# NOTE this is tied to the name of the yml!
gh run list --status=failure --workflow={{GHA_YML_NAME}} --json databaseId --jq '.[0].databaseId'
_gha_running_run_id:
gh run list --status=in_progress --workflow={{GHA_YML_NAME}} --json databaseId --jq '.[0].databaseId'
##########################
# Dev Container Management
##########################
# Use --fast to avoid waiting until the containers are healthy, useful for CI runs
[doc("Optional flag: --fast")]
up *flag:
# if images have already been pulled, this ensures the latest versions are pulled so they match with
# CI or other environments that are pulling fresh versions of the images
docker compose pull
docker compose up -d {{ if flag == "--fast" { "" } else { "--wait" } }}
down: db_down
docker compose down
# separate task for the db to support db_reset
db_up:
docker compose up -d --wait postgres
# TODO may need to run `docker rm $(docker ps -aq)` as well
# TODO docker down does not exit 1 if it partially failed
# turn off the database *and* completely remove the data
db_down:
docker compose down --volumes postgres
##############################################
# Database Migrations
#
# Goal is to have similar semantics to rails.
##############################################
# completely destroy the dev and test databases, destroying the containers and rebuilding them
db_reset_hard: db_down db_up db_migrate db_seed
# NOTE migration & seed are intentionally omitted so db_nuke and friends can run
# destroys all data in the dev and test databases, leaves the containers running
db_reset:
psql $DATABASE_URL -c "DROP SCHEMA public CASCADE; CREATE SCHEMA public;"
psql $TEST_DATABASE_URL -c "DROP SCHEMA public CASCADE; CREATE SCHEMA public;"
db_lint:
uv run alembic check
# TODO there's also a more advanced github integration, but seems a bit cleaner:
# https://squawkhq.com/docs/github_app
# TODO don't fail on warnings https://github.com/sbdchd/squawk/issues/348
# TODO remove rule exclusion when https://github.com/sbdchd/squawk/issues/392 is fixed
# TODO should submit upstream for the jq transformations so others can copy, add to docs
if [ -n "${CI:-}" ]; then \
LOG_LEVEL=error uv run alembic upgrade head --sql | \
uv run squawk --reporter=json --exclude=prefer-text-field | \
jq -r '.[] | "::\(if .level == "Error" then "error" else "warning" end) file=\(.file),line=\(.line),col=\(.column),title=\(.rule_name)::\(.messages[0].Note)"'; \
else \
LOG_LEVEL=error uv run alembic upgrade head --sql | uv run squawk --exclude=prefer-text-field; \
fi
# open the database in the default macos GUI
[macos]
db_open:
# TablePlus via Setapp is a great option here
open $DATABASE_URL
# tui to interact with the database
[macos]
db_play:
uv tool run pgcli $DATABASE_URL
# run migrations on dev and test
db_migrate:
# if this folder is wiped, you'll get a strange error from alembic
mkdir -p migrations/versions
# dev database is created automatically, but test database is not. We need to fail gracefully when the database already exists.
psql $DATABASE_URL -c "CREATE DATABASE ${TEST_DATABASE_NAME};" || true
@just _banner_echo "Migrating Database"
uv run alembic upgrade head
[ -n "${CI:-}" ] || (just _banner_echo "Migrating Test Database" && {{EXECUTE_IN_TEST}} uv run alembic upgrade head)
# TODO should pick versions
# alembic history | fzf
# db_down:
# uv run alembic downgrade
# add seed data to dev and test
db_seed: db_migrate
@just _banner_echo "Seeding Database"
uv run python migrations/seed.py
[ -n "${CI:-}" ] || (just _banner_echo "Seeding Test Database" && {{EXECUTE_IN_TEST}} uv run python migrations/seed.py)
# TODO you can't preview what the migration will look like before naming it?
# generate migration based on the current state of the database
[script]
db_generate_migration migration_name="":
if [ -z "{{migration_name}}" ]; then
echo "Enter the migration name (use add/remove/update prefix): "
read name
else
name={{migration_name}}
fi
# underscores & alpha chars only
name=$(echo "$name" | tr ' ' '_' | tr '-' '_' | tr -cd '[:alnum:]_')
uv run alembic revision --autogenerate -m "$name"
just _banner_echo "Migration Generated. Run 'just db_migrate' to apply the migration"
# destroy and rebuild the database from the ground up, without mutating migrations
db_destroy: db_reset db_migrate db_seed
# rm migrations and regenerate: only for use in early development
db_nuke:
# I personally hate having a nearly-greenfield project with a bunch of migrations from DB schema iteration
# this should only be used *before* you've launched and prod and don't need properly migration support
# first, wipe all of the existing migrations
rm -rf migrations/versions/* || true
just db_reset
just db_generate_migration "initial_commit"
# enable SQL debugging on the postgres database
[macos]
db_debug:
docker compose exec postgres \
psql -U $POSTGRES_USER -c "ALTER SYSTEM SET log_statement = 'all'; SELECT pg_reload_conf();"
[macos]
db_debug_off:
docker compose exec postgres \
psql -U $POSTGRES_USER -c "ALTER SYSTEM SET log_statement = 'none'; SELECT pg_reload_conf();"
#######################
# Secrets
#######################
_secrets_service-token CONTEXT WRITE_PERMISSION="false":
# if OP_SERVICE_ACCOUNT_TOKEN is set, the service-account API will not work
unset OP_SERVICE_ACCOUNT_TOKEN && \
write_permission=$([[ "{{WRITE_PERMISSION}}" == "true" ]] && echo ",write_items" || echo "") && \
op service-account create {{PROJECT_NAME}}-{{CONTEXT}} \
--expires-in '90d' \
--vault "${OP_VAULT_UID}:read_items${write_permission}" \
--raw
# for terraform and other tools which can create entries
[macos]
secrets_write-service-token:
just _secrets_service-token write true | jq -r -R '@sh "export OP_SERVICE_ACCOUNT_TOKEN=\(.)"'
# generate service account token to be used locally for a developer
[macos]
secrets_local-service-token user=`whoami`:
just _secrets_service-token {{user}} | jq -r -R '@sh "export OP_SERVICE_ACCOUNT_TOKEN=\(.)"'
# grant GH actions access to the 1p vault, this needs to be done every 90d
[macos]
secrets_ci_grant-github-actions:
# 90d is the max expiration time allowed
# this can be safely run multiple times, it will not regenerate the service account token
service_account_token=$(just _secrets_service-token github-actions) && \
gh secret set OP_SERVICE_ACCOUNT_TOKEN --app actions --body "$service_account_token"
# manage the op service account from the web ui
[macos]
secrets_ci_manage:
# you cannot revoke/delete a service account with the cli, you must login and delete it from the web ui
open https://$OP_ACCOUNT/developer-tools/directory
#######################
# GitHub
#######################
GITHUB_RULESET := """
{
"name": "Protect master from force pushes",
"target": "branch",
"enforcement": "active",
"conditions": {
"ref_name": {
"include": ["refs/heads/master"],
"exclude": []
}
},
"rules": [
{
"type": "non_fast_forward"
}
]
}
"""
_github_repo:
gh repo view --json nameWithOwner -q .nameWithOwner
github_ruleset_delete:
repo=$(just _github_repo) && \
ruleset_name=$(echo '{{GITHUB_RULESET}}' | jq -r .name) && \
ruleset_id=$(gh api repos/$repo/rulesets --jq ".[] | select(.name == \"$ruleset_name\") | .id") && \
(([ -n "${ruleset_id}" ] || (echo "No ruleset found" && exit 0)) || gh api --method DELETE repos/$repo/rulesets/$ruleset_id)
github_ruleset: github_ruleset_delete
gh api --method POST repos/$(just _github_repo)/rulesets --input - <<< '{{GITHUB_RULESET}}'
#######################
# Production Build
#
# Some of the ENV variables and labels below are pulled from these projects:
#
# - https://github.com/iloveitaly/github-action-nixpacks/blob/2ad8c4fab7059ede8b6103f17b2ec23f42961fd9/entrypoint.sh
# - https://devcenter.heroku.com/articles/dyno-metadata
#
#######################
GIT_DIRTY := `if [ -n "$(git status --porcelain)" ]; then echo "-dirty"; fi`
GIT_SHA := `git rev-parse HEAD` + GIT_DIRTY
GIT_DESCRIPTION := `git log -1 --pretty=%s`
BUILD_CREATED_AT := `date -u +%FT%TZ`
NIXPACKS_BUILD_METADATA := (
'-e BUILD_COMMIT="' + GIT_SHA + '" ' +
'-e BUILD_DESCRIPTION="' + GIT_DESCRIPTION + '" ' +
'-e BUILD_CREATED_AT="' + BUILD_CREATED_AT + '" '
)
# NOTE production secrets are *not* included in the image, they are set on deploy
PYTHON_NIXPACKS_BUILD_CMD := "nixpacks build ." + \
" --name " + PYTHON_IMAGE_TAG + \
" " + NIXPACKS_BUILD_METADATA + \
" --env PYTHON_ENV=production" + \
" --platform=linux/amd64" + \
" $(just direnv_export_docker '" + SHARED_ENV_FILE +"' --params)" + \
" --inline-cache --cache-from " + PYTHON_PRODUCTION_IMAGE_NAME + ":latest" + \
" --label org.opencontainers.image.revision='" + GIT_SHA + "'" + \
" --label org.opencontainers.image.created='" + BUILD_CREATED_AT + "'" + \
' --label org.opencontainers.image.source="$(just _repo_url)"' + \
' --label org.opencontainers.image.description="Primary application deployment image"' + \
' --label build.run_id="$(just _build_id)"'
# Production assets bundle public "secrets" (safe to expose publicly) which are extracted from the environment
# for this reason, we need to emulate the production environment, then build the assets statically.
# Also, we can't just mount /app/build/server with -v since the build process removes the entire /app/build directory.
# Some ENV var are set for us, like NODE_ENV: https://nixpacks.com/docs/providers/node#environment-variables
JAVASCRIPT_NIXPACKS_BUILD_CMD := "nixpacks build " + WEB_DIR + " " + \
" --name " + JAVASCRIPT_IMAGE_TAG + " " + \
" " + NIXPACKS_BUILD_METADATA + \
" --platform=linux/amd64 " + \
" --env VITE_BUILD_COMMIT=" + GIT_SHA + " " + \
" --cache-from " + JAVASCRIPT_PRODUCTION_IMAGE_NAME + ":latest --inline-cache" + \
" $(just direnv_export_docker '" + JAVASCRIPT_SECRETS_FILE + "' --params) " + \
" $(just direnv_export_docker '" + SHARED_ENV_FILE + "' --params) " + \
" --label org.opencontainers.image.description=\"Used for building javascript assets, not for deployment\""
# .env file without any secrets that should exist on all environments
SHARED_ENV_FILE := ".env"
# .env file with production variables, no secrets, for python
PYTHON_PRODUCTION_ENV_FILE := ".env.production.backend"
# .env file with production variables that are safe to share publicly (frontend)
JAVASCRIPT_SECRETS_FILE := ".env.production.frontend"
# by default, the py image name is pulled from the project name
PYTHON_IMAGE_NAME := PROJECT_NAME
PYTHON_IMAGE_TAG := PYTHON_IMAGE_NAME + ":" + GIT_SHA
# the js image is not deployed and is only used during build, so we simply add a -javascript suffix
JAVASCRIPT_IMAGE_NAME := PYTHON_IMAGE_NAME + "-javascript"
JAVASCRIPT_IMAGE_TAG := JAVASCRIPT_IMAGE_NAME + ":" + GIT_SHA
PYTHON_PRODUCTION_IMAGE_NAME := "ghcr.io/iloveitaly/python-starter-template"
JAVASCRIPT_PRODUCTION_IMAGE_NAME := PYTHON_PRODUCTION_IMAGE_NAME + "-javascript"
[script]
_production_build_assertions:
# TODO we should abstract out "IS_CI" to some sort of Justfile check :/
# only run this on CI
[ ! -z "${CI:-}" ] || exit 0
# if the workspace is dirty, some configuration is not correct: we want a completely clean build environment
if [ ! -z "{{GIT_DIRTY}}" ]; then \
echo "Git workspace is dirty! This should never happen on prod!" >&2; \
git status; \
exit 1; \
fi
if [ ! -d "{{JINJA_TEMPLATE_DIR}}" ]; then \
echo "Jinja template directory does not exist! This should never happen on prod" >&2; \
exit 1; \
fi
# within nixpacks, this is where the SPA client assets are built
JAVASCRIPT_CONTAINER_BUILD_DIR := "/app/build/client"
# outside of nixpacks, within the python application folder, this is where the SPA assets are stored
JAVASCRIPT_PRODUCTION_BUILD_DIR := "public"
# build the javascript assets by creating an image, building assets inside the container, and then copying them to the host
build_javascript: _production_build_assertions
@just _banner_echo "Building JavaScript Assets in Container..."
rm -rf "{{JAVASCRIPT_PRODUCTION_BUILD_DIR}}" || true
{{JAVASCRIPT_NIXPACKS_BUILD_CMD}}
@just _banner_echo "Extracting JavaScript Assets from Container..."
# Cannot extract files out of a image, only a container. We create a tmp container to extract assets.
docker rm tmp-js-container || true
docker create --name tmp-js-container {{JAVASCRIPT_IMAGE_TAG}}
docker cp tmp-js-container:/app/build/production/client "{{JAVASCRIPT_PRODUCTION_BUILD_DIR}}"
# dump nixpacks-generated Dockerfile for manual build and production debugging
build_javascript_dump:
{{JAVASCRIPT_NIXPACKS_BUILD_CMD}} --out web
# support non-macos installations for github actions
_build_requirements:
@if ! which nixpacks > /dev/null; then \
echo "nixpacks is not installed. Installing...."; \
{{ if os() == "macos" { "brew install nixpacks" } else { "curl -sSL https://nixpacks.com/install.sh | bash" } }}; \
fi
# url of the repo on github for build metadata
@_repo_url:
gh repo view --json url --jq ".url" | tr -d " \n"
# unique ID (mostly) to identify where/when this image was built for docker labeling
@_build_id:
if [ -z "${GITHUB_RUN_ID:-}" ]; then \
echo "{{ os() }}-$(whoami)"; \
else \
echo "$GITHUB_RUN_ID"; \
fi
# build the docker container using nixpacks
build: _build_requirements _production_build_assertions build_javascript
@just _banner_echo "Building Python Image..."
{{PYTHON_NIXPACKS_BUILD_CMD}}
build_push: _production_build_assertions
# JS image is not used in prod, but is used for nixpacks caching, so we push to the registry
docker tag {{PYTHON_IMAGE_TAG}} {{PYTHON_PRODUCTION_IMAGE_NAME}}:{{GIT_SHA}}
docker push {{PYTHON_PRODUCTION_IMAGE_NAME}}:{{GIT_SHA}}
docker tag {{PYTHON_IMAGE_TAG}} {{PYTHON_PRODUCTION_IMAGE_NAME}}:latest
docker push {{PYTHON_PRODUCTION_IMAGE_NAME}}:latest
docker tag {{JAVASCRIPT_IMAGE_TAG}} {{JAVASCRIPT_PRODUCTION_IMAGE_NAME}}:latest
docker push {{JAVASCRIPT_PRODUCTION_IMAGE_NAME}}:latest