diff --git a/docs/aleph.md b/docs/aleph.md index d93c5c408d3..e1bab36c6e6 100644 --- a/docs/aleph.md +++ b/docs/aleph.md @@ -74,6 +74,19 @@ docker attach aleph_api_1 You don't need to reload the page for it to load the changes, it does it dynamically. +## Troubleshooting + +### Problems accessing redis locally + +If you're with the VPN connected, turn it off. + +### PDB behaves weird + +Sometimes you have two traces at the same time, so each time you run a PDB +command it jumps from pdb trace. Quite confusing. Try to `c` the one you don't +want so that you're left with the one you want. Or put the `pdb` trace in a +conditional that only matches one of both threads. + # References - [Docs](http://docs.alephdata.org/) diff --git a/docs/coding/python/gitpython.md b/docs/coding/python/gitpython.md index 4724d229201..13dd61fa200 100644 --- a/docs/coding/python/gitpython.md +++ b/docs/coding/python/gitpython.md @@ -30,7 +30,7 @@ pip install GitPython ```python from git import Repo -repo = Repo.init('path/to/initialize') +repo = Repo.init("path/to/initialize") ``` If you want to get the working directory of the `Repo` object use the @@ -41,11 +41,10 @@ If you want to get the working directory of the `Repo` object use the ```python from git import Repo -repo = Repo('existing/git/repo/path') +repo = Repo("existing/git/repo/path") ``` -## [Clone -a repository](https://stackoverflow.com/questions/2472552/python-way-to-clone-a-git-repository) +## [Clone a repository](https://stackoverflow.com/questions/2472552/python-way-to-clone-a-git-repository) ```python from git import Repo @@ -61,9 +60,10 @@ Given a `repo` object: index = repo.index # add the changes -index.add(['README.md']) +index.add(["README.md"]) from git import Actor + author = Actor("An author", "author@example.com") committer = Actor("A committer", "committer@example.com") # commit by commit message and author and committer @@ -79,7 +79,7 @@ time is useful. import datetime from dateutil import tz -commit_date = datetime.datetime(2020, 2, 2, tzinfo=tz.tzlocal()), +commit_date = (datetime.datetime(2020, 2, 2, tzinfo=tz.tzlocal()),) index.commit( "my commit message", @@ -111,23 +111,21 @@ the log. It gives you a List of commits where the first element is the last commit in time. - ### Inspect the log Inspect it with the `repo.head.reference.log()`, which contains a list of `RefLogEntry` objects that have the interesting attributes: -* `actor`: Actor object of the author of the commit -* `time`: The commit timestamp, to load it as a datetime object use the - `datetime.datetime.fromtimestamp` method -* `message`: Message as a string. +- `actor`: Actor object of the author of the commit +- `time`: The commit timestamp, to load it as a datetime object use the + `datetime.datetime.fromtimestamp` method +- `message`: Message as a string. -## [Create -a branch](https://gitpython.readthedocs.io/en/stable/tutorial.html#advanced-repo-usage) +## [Create a branch](https://gitpython.readthedocs.io/en/stable/tutorial.html#advanced-repo-usage) ```python -new_branch = repo.create_head('new_branch') -assert repo.active_branch != new_branch # It's not checked out yet +new_branch = repo.create_head("new_branch") +assert repo.active_branch != new_branch # It's not checked out yet repo.head.reference = new_branch assert not repo.head.is_detached ``` @@ -149,29 +147,29 @@ Create a `test_data` directory in your testing directory with the contents of the git repository you want to test. Don't initialize it, we'll create a `repo` fixture that does it. Assuming that the data is in `tests/assets/test_data`: -!!! note "File: tests/conftest.py" - ```python - import shutil +File `tests/conftest.py`: - import pytest - from git import Repo - from py._path.local import LocalPath +```python +import shutil +import pytest +from git import Repo - @pytest.fixture(name="repo") - def repo_(tmpdir: LocalPath) -> Repo: - """Create a git repository with fake data and history. - Args: - tmpdir: Pytest fixture that creates a temporal directory - """ - # Copy the content from `tests/assets/test_data`. - repo_path = tmpdir / "test_data" - shutil.copytree("tests/assets/test_data", repo_path) +@pytest.fixture(name="repo") +def repo_(tmp_path: Path) -> Repo: + """Create a git repository with fake data and history. - # Initializes the git repository. - return Repo.init(repo_path) - ``` + Args: + tmp_path: Pytest fixture that creates a temporal Path + """ + # Copy the content from `tests/assets/test_data`. + repo_path = tmp_path / "test_data" + shutil.copytree("tests/assets/test_data", repo_path) + + # Initializes the git repository. + return Repo.init(repo_path) +``` On each test you can add the commits that you need for your use case. @@ -179,6 +177,7 @@ On each test you can add the commits that you need for your use case. author = Actor("An author", "author@example.com") committer = Actor("A committer", "committer@example.com") + @pytest.mark.freeze_time("2021-02-01T12:00:00") def test_repo_is_not_empty(repo: Repo) -> None: commit_date = datetime.datetime(2021, 2, 1, tzinfo=tz.tzlocal()) @@ -195,44 +194,44 @@ def test_repo_is_not_empty(repo: Repo) -> None: ``` If you feel that the tests are too verbose, you can create a fixture with all -the commits done, and select each case with the [freezegun pytest -fixture](pytest.md#freezegun). In my opinion, it will make the tests less clear -though. The fixture can look like: - -!!! note "File: tests/conftest.py" - ```python - import datetime - from dateutil import tz - import shutil - import textwrap - - import pytest - from git import Actor, Repo - from py._path.local import LocalPath - - - @pytest.fixture(name="full_repo") - def full_repo_(repo: Repo) -> Repo: - """Create a git repository with fake data and history. - - Args: - repo: an initialized Repo - """ - index = repo.index - author = Actor("An author", "author@example.com") - committer = Actor("A committer", "committer@example.com") - - # Add a commit in time - commit_date = datetime.datetime(2021, 2, 1, tzinfo=tz.tzlocal()) - index.add(["mkdocs.yml"]) - index.commit( - "Initial skeleton", - author=author, - committer=committer, - author_date=commit_date, - commit_date=commit_date, - ) - ``` +the commits done, and select each case with the +[freezegun pytest fixture](pytest.md#freezegun). In my opinion, it will make the +tests less clear though. The fixture can look like: + +File: `tests/conftest.py`: + +```python +import datetime +from dateutil import tz +import shutil +import textwrap + +import pytest +from git import Actor, Repo + + +@pytest.fixture(name="full_repo") +def full_repo_(repo: Repo) -> Repo: + """Create a git repository with fake data and history. + + Args: + repo: an initialized Repo + """ + index = repo.index + author = Actor("An author", "author@example.com") + committer = Actor("A committer", "committer@example.com") + + # Add a commit in time + commit_date = datetime.datetime(2021, 2, 1, tzinfo=tz.tzlocal()) + index.add(["mkdocs.yml"]) + index.commit( + "Initial skeleton", + author=author, + committer=committer, + author_date=commit_date, + commit_date=commit_date, + ) +``` Then you can use that fixture in any test: @@ -244,6 +243,6 @@ def test_assert_true(full_repo: Repo) -> None: # References -* [Docs](https://gitpython.readthedocs.io) -* [Git](https://github.com/gitpython-developers/GitPython) -* [Tutorial](https://gitpython.readthedocs.io/en/stable/tutorial.html#tutorial-label) +- [Docs](https://gitpython.readthedocs.io) +- [Git](https://github.com/gitpython-developers/GitPython) +- [Tutorial](https://gitpython.readthedocs.io/en/stable/tutorial.html#tutorial-label) diff --git a/docs/coding/python/pytest.md b/docs/coding/python/pytest.md index b7c3688d99c..39be78a221c 100644 --- a/docs/coding/python/pytest.md +++ b/docs/coding/python/pytest.md @@ -10,18 +10,17 @@ applications and libraries. Pytest stands out over other test frameworks in: -* Simple tests are simple to write in pytest. -* Complex tests are still simple to write. -* Tests are easy to read. -* You can get started in seconds. -* You use `assert` to fail a test, not things like `self.assertEqual()` or - `self.assertLessThan()`. Just `assert`. -* You can use pytest to run tests written for unittest or nose. - -!!! note "" - You can use [this cookiecutter - template](https://github.com/lyz-code/cookiecutter-python-project) to create - a python project with `pytest` already configured. +- Simple tests are simple to write in pytest. +- Complex tests are still simple to write. +- Tests are easy to read. +- You can get started in seconds. +- You use `assert` to fail a test, not things like `self.assertEqual()` or + `self.assertLessThan()`. Just `assert`. +- You can use pytest to run tests written for unittest or nose. + +Note: You can use +[this cookiecutter template](https://github.com/lyz-code/cookiecutter-python-project) +to create a python project with `pytest` already configured. # Install @@ -42,50 +41,51 @@ If you need more information run it with `-v`. Pytest automatically finds which tests to run in a phase called *test discovery*. It will get the tests that match one of the following conditions: -* Test files that are named `test_{{ something }}.py` or `{{ something }}_test.py`. -* Test methods and functions named `test_{{ something }}`. -* Test classes named `Test{{ Something }}`. +- Test files that are named `test_{{ something }}.py` or + `{{ something }}_test.py`. +- Test methods and functions named `test_{{ something }}`. +- Test classes named `Test{{ Something }}`. There are several possible outcomes of a test function: -* *PASSED (.)*: The test ran successfully. -* *FAILED (F)*: The test did not run usccessfully (or *XPASS* + strict). -* *SKIPPED (s)*: The test was skipped. You can tell pytest to skip a test by - using enter the `@pytest.mark.skip()` or `pytest.mark.skipif()` decorators. -* *xfail (x)*: The test was not supposed to pass, ran, and failed. You can tell - pytest that a test is expected to fail by using the `@pytest.mark.xfail()` - decorator. -* *XPASS (X)*: The tests was not supposed to pass, ran, and passed. -* *ERROR (E)*: An exception happened outside of the test function, in either - a fixture or a hook function. +- *PASSED (.)*: The test ran successfully. +- *FAILED (F)*: The test did not run usccessfully (or *XPASS* + strict). +- *SKIPPED (s)*: The test was skipped. You can tell pytest to skip a test by + using enter the `@pytest.mark.skip()` or `pytest.mark.skipif()` decorators. +- *xfail (x)*: The test was not supposed to pass, ran, and failed. You can tell + pytest that a test is expected to fail by using the `@pytest.mark.xfail()` + decorator. +- *XPASS (X)*: The tests was not supposed to pass, ran, and passed. +- *ERROR (E)*: An exception happened outside of the test function, in either a + fixture or a hook function. Pytest supports several cool flags like: -* `-k EXPRESSION`: Used to select a subset of tests to run. For example `pytest - -k "asdict or defaults"` will run both `test_asdict()` and - `test_defaults()`. -* `--lf` or `--last-failed`: Just run the tests that have failed in the previous - run. -* `-x`, or `--exitfirst`: Exit on first failed test. -* `-l` or `--showlocals`: Print out the local variables in a test if the test - fails. -* `-s` Allows any output that normally would be printed to `stdout` to actually - be printed to `stdout`. It's an alias of `--capture=no`, so the output is - not captured when the tests are run, which is the default behavior. This is - useful to debug with `print()` statements. -* `--durations=N`: It reports the slowest `N` number of tests/setups/teardowns - after the test run. If you pass in `--durations=0`, it reports everything in - order of slowest to fastest. -* `--setup-show`: Show the fixtures in use. +- `-k EXPRESSION`: Used to select a subset of tests to run. For example + `pytest -k "asdict or defaults"` will run both `test_asdict()` and + `test_defaults()`. +- `--lf` or `--last-failed`: Just run the tests that have failed in the previous + run. +- `-x`, or `--exitfirst`: Exit on first failed test. +- `-l` or `--showlocals`: Print out the local variables in a test if the test + fails. +- `-s` Allows any output that normally would be printed to `stdout` to actually + be printed to `stdout`. It's an alias of `--capture=no`, so the output is not + captured when the tests are run, which is the default behavior. This is useful + to debug with `print()` statements. +- `--durations=N`: It reports the slowest `N` number of tests/setups/teardowns + after the test run. If you pass in `--durations=0`, it reports everything in + order of slowest to fastest. +- `--setup-show`: Show the fixtures in use. # Fixtures Fixtures are functions that are run by pytest before (and sometimes after) the actual test functions. -You can use fixtures to get a data set for the tests to work on, or use -them to get a system into a known state before running a test. They are -also used to get data ready for multiple tests. +You can use fixtures to get a data set for the tests to work on, or use them to +get a system into a known state before running a test. They are also used to get +data ready for multiple tests. Here's a simple fixture that returns a number: @@ -102,15 +102,15 @@ def test_some_data(some_data): assert some_data == 42 ``` -The `@pytest.fixture()` decorator is used to tell pytest that a function is -a fixture.When you include the fixture name in the parameter list of a test +The `@pytest.fixture()` decorator is used to tell pytest that a function is a +fixture.When you include the fixture name in the parameter list of a test function,pytest knows to run it before running the test. Fixtures can do work, and can also return data to the test function. -The test test_some_data() has the name of the fixture, some_data, as -a parameter.pytest will see this and look for a fixture with this name. Naming -is significant in pytest. pytest will look in the module of the test for -a fixture of that name. +The test test_some_data() has the name of the fixture, some_data, as a +parameter.pytest will see this and look for a fixture with this name. Naming is +significant in pytest. pytest will look in the module of the test for a fixture +of that name. If the function is defined in the same file as where it's being used pylint will raise an `W0621: Redefining name %r from outer scope (line %s)` error. To @@ -130,39 +130,46 @@ directory and subdirectories. Although `conftest.py` is a Python module, it should not be imported by test files. The file gets read by pytest, and is considered a local *plugin*. -Another option is to save the fixtures in a file by [creating a local pytest -plugin](https://gist.github.com/peterhurford/09f7dcda0ab04b95c026c60fa49c2a68). +Another option is to save the fixtures in a file by +[creating a local pytest plugin](https://gist.github.com/peterhurford/09f7dcda0ab04b95c026c60fa49c2a68). -!!! note "File: tests/unit/conftest.py" +File: `tests/unit/conftest.py` - ```python - pytest_plugins = [ - "tests.unit.fixtures.some_stuff", - ] - ``` +```python +pytest_plugins = [ + "tests.unit.fixtures.some_stuff", +] +``` + +File: `tests/unit/fixtures/some_stuff.py`: -!!! note "File: tests/unit/fixtures/some_stuff.py" +```python +import pytest - ```python - import pytest - @pytest.fixture - def foo(): - return 'foobar' - ``` +@pytest.fixture +def foo(): + return "foobar" +``` ## Specifying fixture scope -Fixtures include an optional parameter called scope, which controls how often -a fixture gets set up and torn down. The scope parameter to `@pytest.fixture()` +Fixtures include an optional parameter called scope, which controls how often a +fixture gets set up and torn down. The scope parameter to `@pytest.fixture()` can have the values of function, class, module, or session. Here’s a rundown of each scope value: -* `scope='function'`: Run once per test function. The setup portion is run before each test using the fixture. The teardown portion is run after each test using the fixture. This is the default scope used when no scope parameter is specified. -* `scope='class'`: Run once per test class, regardless of how many test methods are in the class. -* `scope='module'`: Run once per module, regardless of how many test functions or methods or other fixtures in the module use it. -* `scope='session'` Run once per session. All test methods and functions using a fixture of session scope share one setup and teardown call. +- `scope='function'`: Run once per test function. The setup portion is run + before each test using the fixture. The teardown portion is run after each + test using the fixture. This is the default scope used when no scope parameter + is specified. +- `scope='class'`: Run once per test class, regardless of how many test methods + are in the class. +- `scope='module'`: Run once per module, regardless of how many test functions + or methods or other fixtures in the module use it. +- `scope='session'` Run once per session. All test methods and functions using a + fixture of session scope share one setup and teardown call. ## [Using fixtures at class level](https://docs.pytest.org/en/7.1.x/how-to/fixtures.html#use-fixtures-in-classes-and-modules-with-usefixtures) @@ -175,6 +182,7 @@ working directory but otherwise do not care for the concrete directory. class TestDirectoryInit: ... ``` + Due to the `usefixtures` marker, the `cleandir` fixture will be required for the execution of each test method, just as if you specified a `cleandir` function argument to each of them. @@ -187,33 +195,56 @@ You can specify multiple fixtures like this: ## Useful Fixtures +### [The tmp_path fixture](https://docs.pytest.org/en/6.2.x/tmpdir.html#the-tmp-path-fixture) + +You can use the `tmp_path` fixture which will provide a temporary directory +unique to the test invocation, created in the base temporary directory. + +`tmp_path` is a `pathlib.Path` object. Here is an example test usage: + +```python +def test_create_file(tmp_path): + d = tmp_path / "sub" + d.mkdir() + p = d / "hello.txt" + p.write_text(CONTENT) + assert p.read_text() == CONTENT + assert len(list(tmp_path.iterdir())) == 1 + assert 0 +``` + ### [The tmpdir fixture](https://docs.pytest.org/en/stable/tmpdir.html) +Warning: Don't use `tmpdir` use `tmp_path` instead because `tmpdir` uses `py` +which is unmaintained and has unpatched vulnerabilities. + You can use the `tmpdir` fixture which will provide a temporary directory unique to the test invocation, created in the base temporary directory. -`tmpdir` is a `py.path.local` object which offers `os.path` methods and more. Here is an example test usage: +`tmpdir` is a `py.path.local` object which offers `os.path` methods and more. +Here is an example test usage: + +File: `test_tmpdir.py`: -!!! note "File: test_tmpdir.py" +```python +from py._path.local import LocalPath - ```python - from py._path.local import LocalPath - def test_create_file(tmpdir: LocalPath): - p = tmpdir.mkdir("sub").join("hello.txt") - p.write("content") - assert p.read() == "content" - assert len(tmpdir.listdir()) == 1 - assert 0 - ``` +def test_create_file(tmpdir: LocalPath): + p = tmpdir.mkdir("sub").join("hello.txt") + p.write("content") + assert p.read() == "content" + assert len(tmpdir.listdir()) == 1 + assert 0 +``` The `tmpdir` fixture has a scope of `function` so you can't make a session directory. Instead use the `tmpdir_factory` fixture. - ```python from _pytest.tmpdir import TempPathFactory + @pytest.fixture(scope="session") def image_file(tmpdir_factory: TempPathFactory): img = compute_expensive_image() @@ -241,23 +272,25 @@ captured stdout and stderr. You can change the default logging level in the pytest configuration: -!!! note "File: pytest.ini" - ```ini - [pytest] +File: `pytest.ini`: + +```ini +[pytest] - log_level = debug - ``` +log_level = debug +``` Although it may not be a good idea in most cases. It's better to change the log level in the tests that need a lower level. -All the logs sent to the logger during the test run are available on the -fixture in the form of both the `logging.LogRecord` instances and the final log -text. This is useful for when you want to assert on the contents of a message: +All the logs sent to the logger during the test run are available on the fixture +in the form of both the `logging.LogRecord` instances and the final log text. +This is useful for when you want to assert on the contents of a message: ```python from _pytest.logging import LogCaptureFixture + def test_baz(caplog: LogCaptureFixture): func_under_test() for record in caplog.records: @@ -273,7 +306,7 @@ severity and message: def test_foo(caplog: LogCaptureFixture): logging.getLogger().info("boo %s", "arg") - assert ("root", logging.INFO, "boo arg") in caplog.record_tuples + assert ("root", logging.INFO, "boo arg") in caplog.record_tuples ``` You can call `caplog.clear()` to reset the captured log records in a test. @@ -305,7 +338,7 @@ Suppose you have a function to print a greeting to stdout: ```python def greeting(name): - print(f'Hi, {name}') + print(f"Hi, {name}") ``` You can test the output by using `capsys`. @@ -313,11 +346,12 @@ You can test the output by using `capsys`. ```python from _pytest.capture import CaptureFixture + def test_greeting(capsys: CaptureFixture[Any]): - greeting('Earthling') + greeting("Earthling") out, err = capsys.readouterr() - assert out == 'Hi, Earthling\n' - assert err == '' + assert out == "Hi, Earthling\n" + assert err == "" ``` The return value is whatever has been captured since the beginning of the @@ -338,8 +372,7 @@ pip install pytest-freezegun ##### Global usage -[Most of the -tests](https://medium.com/@boxed/flaky-tests-part-3-freeze-the-world-e4929a0da00e) +[Most of the tests](https://medium.com/@boxed/flaky-tests-part-3-freeze-the-world-e4929a0da00e) work with frozen time, so it's better to freeze it by default and unfreeze it on the ones that actually need time to move. @@ -349,12 +382,14 @@ To do that set in your `tests/conftest.py` a globally used fixture: if TYPE_CHECKING: from freezegun.api import FrozenDateTimeFactory + @pytest.fixture(autouse=True) def frozen_time() -> Generator[FrozenDateTimeFactory, None, None]: """Freeze all tests time""" with freezegun.freeze_time() as freeze: yield freeze ``` + Freeze time by using the freezer fixture: ##### Manual use @@ -363,6 +398,7 @@ Freeze time by using the freezer fixture: if TYPE_CHECKING: from freezegun.api import FrozenDateTimeFactory + def test_frozen_date(freezer: FrozenDateTimeFactory): now = datetime.now() time.sleep(1) @@ -375,7 +411,7 @@ This can then be used to move time: ```python def test_moving_date(freezer): now = datetime.now() - freezer.move_to('2017-05-20') + freezer.move_to("2017-05-20") later = datetime.now() assert now != later ``` @@ -383,23 +419,25 @@ def test_moving_date(freezer): You can also pass arguments to freezegun by using the `freeze_time` mark: ```python -@pytest.mark.freeze_time('2017-05-21') +@pytest.mark.freeze_time("2017-05-21") def test_current_date(): assert date.today() == date(2017, 5, 21) ``` -The `freezer` fixture and `freeze_time` mark can be used together, and they work with other fixtures: +The `freezer` fixture and `freeze_time` mark can be used together, and they work +with other fixtures: ```python @pytest.fixture def current_date(): return date.today() + @pytest.mark.freeze_time() def test_changing_date(current_date, freezer): - freezer.move_to('2017-05-20') + freezer.move_to("2017-05-20") assert current_date == date(2017, 5, 20) - freezer.move_to('2017-05-21') + freezer.move_to("2017-05-21") assert current_date == date(2017, 5, 21) ``` @@ -407,12 +445,11 @@ They can also be used in class-based tests: ```python class TestDate: - @pytest.mark.freeze_time def test_changing_date(self, current_date, freezer): - freezer.move_to('2017-05-20') + freezer.move_to("2017-05-20") assert current_date == date(2017, 5, 20) - freezer.move_to('2017-05-21') + freezer.move_to("2017-05-21") assert current_date == date(2017, 5, 21) ``` @@ -421,11 +458,11 @@ class TestDate: Sometimes you need to tweak your fixtures so they can be used in different tests. As usual, there are different solutions to the same problem. -!!! note "TL;DR: For simple cases [parametrize your -fixtures](#parametrize-your-fixtures) or use [parametrization to override the -default valued -fixture](#use-pytest-parametrization-to-override-the-default-valued-fixtures). -As your test suite get's more complex migrate to [pytest-case](pytest_cases.md)." +Note: "TL;DR: For simple cases +[parametrize your fixtures](#parametrize-your-fixtures) or use +[parametrization to override the default valued fixture](#use-pytest-parametrization-to-override-the-default-valued-fixtures). +As your test suite get's more complex migrate to +[pytest-case](pytest_cases.md)." Let's say you're running along merrily with some fixtures that create database objects for you: @@ -463,8 +500,9 @@ your default "supplier" fixture: def test_US_supplier_has_total_price_equal_net_price(product): assert product.total_price == product.net_price + def test_EU_supplier_has_total_price_including_VAT(supplier, product): - supplier.country = "FR" # oh, this doesn't work + supplier.country = "FR" # oh, this doesn't work assert product.total_price == product.net_price * 1.2 ``` @@ -482,6 +520,7 @@ def _default_supplier(): name=random_name(), ) + @pytest.fixture def us_supplier(db): s = _default_supplier() @@ -490,6 +529,7 @@ def us_supplier(db): yield s db.remove(s) + @pytest.fixture def eu_supplier(db): s = _default_supplier() @@ -500,9 +540,9 @@ def eu_supplier(db): ``` That's just one way you could do it, maybe you can figure out ways to reduce the -duplication of the `db.add()` stuff as well, but you are going to have -a different, named fixture for each customization of Supplier, and eventually -you may decide that doesn't scale. +duplication of the `db.add()` stuff as well, but you are going to have a +different, named fixture for each customization of Supplier, and eventually you +may decide that doesn't scale. ### Use factory fixtures @@ -532,7 +572,9 @@ way, and make all of your fixture hierarchy into factory functions: ```python def test_EU_supplier_has_total_price_including_VAT(make_supplier, product): supplier = make_supplier(country="FR") - product.supplier = supplier # OH, now this doesn't work, because it's too late again + product.supplier = ( + supplier # OH, now this doesn't work, because it's too late again + ) assert product.total_price == product.net_price * 1.2 ``` @@ -554,6 +596,7 @@ def make_product(db): yield _make_product db.remove(p) + def test_EU_supplier_has_total_price_including_VAT(make_supplier, make_product): supplier = make_supplier(country="FR") product = make_product(supplier=supplier) @@ -567,17 +610,13 @@ what-depends-on-what into your tests as well as your fixtures. Ugly! ### Parametrize your fixtures -You can also [parametrize your -fixtures](pytest_parametrized_testing.md#parametrize-the-fixtures). +You can also +[parametrize your fixtures](pytest_parametrized_testing.md#parametrize-the-fixtures). ```python -@pytest.fixture(params=['US', 'FR']) +@pytest.fixture(params=["US", "FR"]) def supplier(db, request): - s = Supplier( - ref=random_ref(), - name=random_name(), - country=request.param - ) + s = Supplier(ref=random_ref(), name=random_name(), country=request.param) db.add(s) yield s db.remove(s) @@ -587,15 +626,15 @@ Now any test that depends on supplier, directly or indirectly, will be run twice, once with `supplier.country = US` and once with `FR`. That's really cool for checking that a given piece of logic works in a variety -of different cases, but it's not really ideal in our case. We have to build -a bunch of if logic into our tests: +of different cases, but it's not really ideal in our case. We have to build a +bunch of if logic into our tests: ```python def test_US_supplier_has_no_VAT_but_EU_supplier_has_total_price_including_VAT(product): # this test is magically run twice, but: - if product.supplier.country == 'US': + if product.supplier.country == "US": assert product.total_price == product.net_price - if product.supplier.country == 'FR': + if product.supplier.country == "FR": assert product.total_price == product.net_price * 1.2 ``` @@ -630,11 +669,12 @@ the default value of country, even though the country fixture isn't explicitly named in that test: ```python -@pytest.mark.parametrize('country', ["US"]) +@pytest.mark.parametrize("country", ["US"]) def test_US_supplier_has_total_price_equal_net_price(product): assert product.total_price == product.net_price -@pytest.mark.parametrize('country', ["EU"]) + +@pytest.mark.parametrize("country", ["EU"]) def test_EU_supplier_has_total_price_including_VAT(product): assert product.total_price == product.net_price * 1.2 ``` @@ -645,8 +685,8 @@ spelunking in conftest.py. ### Use pytest-case -[pytest-case](pytest_cases.md) gives a lot of power when it comes to tweaking the -fixtures and parameterizations. +[pytest-case](pytest_cases.md) gives a lot of power when it comes to tweaking +the fixtures and parameterizations. Check that file for further information. @@ -689,11 +729,10 @@ def test_one_user(user): Pytest marks can be used to group tests. It can be useful to: -`slow` -: Mark the tests that are slow. +`slow` : Mark the tests that are slow. -`secondary` -: Mart the tests that use functionality that is being tested in the same file. +`secondary` : Mart the tests that use functionality that is being tested in the +same file. To mark a test, use the `@pytest.mark` decorator. For example: @@ -724,10 +763,9 @@ with pytest.raises(SystemExit): ## Testing exceptions with pytest ```python - def test_value_error_is_raised(): with pytest.raises(ValueError, match="invalid literal for int() with base 10: 'a'"): - int('a') + int("a") ``` ## [Excluding code from coverage](https://coverage.readthedocs.io/en/coverage-4.3.3/excluding.html) @@ -737,22 +775,16 @@ to tell `coverage.py` to ignore it. For example, if you have some code in abstract classes that is going to be tested on the subclasses, you can ignore it with `# pragma: no cover`. -If you want [other code to be -excluded](https://github.com/nedbat/coveragepy/issues/831), for example the -statements inside the `if TYPE_CHECKING:` add to your `pyproject.toml`: +If you want +[other code to be excluded](https://github.com/nedbat/coveragepy/issues/831), +for example the statements inside the `if TYPE_CHECKING:` add to your +`pyproject.toml`: ```toml [tool.coverage.report] -exclude_lines = [ - # Have to re-enable the standard pragma - 'pragma: no cover', - - # Type checking can not be tested - 'if TYPE_CHECKING:', -] +exclude_lines = [ "pragma: no cover", "if TYPE_CHECKING:",] ``` - # [Running tests in parallel](https://pypi.org/project/pytest-xdist/) `pytest-xdist` makes it possible to run the tests in parallel, useful when the @@ -800,6 +832,7 @@ import filelock import pytest from filelock import BaseFileLock + @pytest.fixture(name="lock", scope="session") def lock_( tmp_path_factory: pytest.TempPathFactory, @@ -834,6 +867,7 @@ Mark the tests you want to execute serially with a special mark, say serial: class Test: ... + @pytest.mark.serial def test_foo(): ... @@ -879,8 +913,8 @@ def test_foo(): # [Rerun tests that fail sometimes](https://pypi.org/project/pytest-rerunfailures/) -[pytest-rerunfailures](https://pypi.org/project/pytest-rerunfailures/) is -a plugin for pytest that re-runs tests to eliminate intermittent failures. Using +[pytest-rerunfailures](https://pypi.org/project/pytest-rerunfailures/) is a +plugin for pytest that re-runs tests to eliminate intermittent failures. Using this plugin is generally a bad idea, it would be best to solve the reason why your code is not reliable. It's useful when you rely on non robust third party software in a way that you can't solve, or if the error is not in your code but @@ -917,6 +951,7 @@ to run: @pytest.mark.flaky(reruns=5) def test_example(): import random + assert random.choice([True, False]) ``` @@ -933,30 +968,26 @@ with `pytest-xdist` though :(. # [Capture deprecation warnings](https://docs.pytest.org/en/latest/how-to/capture-warnings.html) -Python and its ecosystem does not have an assumption of strict SemVer, and has -a tradition of providing deprecation warnings. If you have good CI, you should -be able to catch warnings even before your users see them. Try the following -pytest configuration: +Python and its ecosystem does not have an assumption of strict SemVer, and has a +tradition of providing deprecation warnings. If you have good CI, you should be +able to catch warnings even before your users see them. Try the following pytest +configuration: ```toml [tool.pytest.ini_options] -filterwarnings = ["error"] +filterwarnings = [ "error",] ``` -This will turn warnings into errors and allow your CI to break before users break. +This will turn warnings into errors and allow your CI to break before users +break. You can ignore specific warnings as well. For example, the configuration below -will ignore all user warnings and specific deprecation warnings matching -a regex, but will transform all other warnings into errors. +will ignore all user warnings and specific deprecation warnings matching a +regex, but will transform all other warnings into errors. ```toml [tool.pytest.ini_options] -filterwarnings = [ - "error", - "ignore::UserWarning", - # note the use of single quote below to denote "raw" strings in TOML - 'ignore:function ham\(\) is deprecated:DeprecationWarning', -] +filterwarnings = [ "error", "ignore::UserWarning", "ignore:function ham\\(\\) is deprecated:DeprecationWarning",] ``` When a warning matches more than one option in the list, the action for the last @@ -965,17 +996,11 @@ matching option is performed. If you want to ignore the warning of a specific package use: ```toml -filterwarnings = [ - "error", - # Until https://github.com/ktosiek/pytest-freezegun/issues/35 is merged - "ignore::DeprecationWarning:pytest_freezegun.*" -] +filterwarnings = [ "error", "ignore::DeprecationWarning:pytest_freezegun.*",] ``` -!!! note - It's better to suppress a warning instead of disabling it for the - whole code, check how [here](use_warnings.md#suppressing-a-warning). - +Note: It's better to suppress a warning instead of disabling it for the whole +code, check how [here](use_warnings.md#suppressing-a-warning). ## [Ensuring code triggers a deprecation warning](https://docs.pytest.org/en/latest/how-to/capture-warnings.html#ensuring-code-triggers-a-deprecation-warning) @@ -985,6 +1010,7 @@ call triggers a `DeprecationWarning` or `PendingDeprecationWarning`: ```python import pytest + def test_myfunction_deprecated(): with pytest.deprecated_call(): myfunction(17) @@ -999,6 +1025,7 @@ works in a similar manner to raises: import warnings import pytest + def test_warning(): with pytest.warns(UserWarning): warnings.warn("my warning", UserWarning) @@ -1036,6 +1063,7 @@ The `recwarn` fixture will record warnings for the whole function: ```python import warnings + def test_hello(recwarn): warnings.warn("hello", UserWarning) assert len(recwarn) == 1 @@ -1057,12 +1085,12 @@ Add to your `pyproject.toml`: ```toml [tool.pytest.ini_options] - log_cli = true - log_cli_level = 10 +log_cli = true +log_cli_level = 10 ``` -Or run it in the command itself `pytest -o log_cli=true --log-cli-level=10 -func.py`. +Or run it in the command itself +`pytest -o log_cli=true --log-cli-level=10 func.py`. Remember you can [change the log level](#change-the-log-level) of the different components in case it's too verbose. @@ -1073,8 +1101,8 @@ Integrating pytest into your Vim workflow enhances your productivity while writing code, thus making it easier to code using TDD. I use [Janko-m's Vim-test plugin](https://github.com/janko-m/vim-test) (which -can be installed through [Vundle](https://github.com/VundleVim/Vundle.vim)) with the -following configuration. +can be installed through [Vundle](https://github.com/VundleVim/Vundle.vim)) with +the following configuration. ```vim nmap t :TestNearest --pdb @@ -1101,10 +1129,11 @@ file. As you can see only the `t` has the `--pdb` flag, so the rest of them will run en parallel and any pdb trace will fail. - # Reference -* Book [Python Testing with pytest by Brian Okken](https://www.oreilly.com/library/view/python-testing-with/9781680502848/). -* [Docs](https://docs.pytest.org/en/latest/) +- Book + [Python Testing with pytest by Brian Okken](https://www.oreilly.com/library/view/python-testing-with/9781680502848/). + +- [Docs](https://docs.pytest.org/en/latest/) -* [Vim-test plugin](https://github.com/janko-m/vim-test) +- [Vim-test plugin](https://github.com/janko-m/vim-test) diff --git a/docs/coding/python/python_snippets.md b/docs/coding/python/python_snippets.md index de18a8d0c70..0fd83b18d28 100644 --- a/docs/coding/python/python_snippets.md +++ b/docs/coding/python/python_snippets.md @@ -4,13 +4,23 @@ date: 20200717 author: Lyz --- +# [Pad integer with zeros](https://stackoverflow.com/questions/39402795/how-to-pad-a-string-with-leading-zeros-in-python-3) + +```python +>>> length = 1 +>>> print(f'length = {length:03}') +length = 001 +``` + # [Print datetime with a defined format](https://stackoverflow.com/questions/311627/how-to-print-a-date-in-a-regular-format) + ```python now = datetime.now() -today.strftime('We are the %d, %b %Y') +today.strftime("We are the %d, %b %Y") ``` -Where the datetime format is a string built from [these directives](#parse-a-datetime-from-a-string). +Where the datetime format is a string built from +[these directives](#parse-a-datetime-from-a-string). # [Print string with asciiart](https://www.askpython.com/python-modules/ascii-art) @@ -20,7 +30,8 @@ pip install pyfiglet ```python from pyfiglet import figlet_format -print(figlet_format('09 : 30')) + +print(figlet_format("09 : 30")) ``` If you want to change the default width of 80 caracteres use: @@ -35,41 +46,43 @@ print(f.renderText("aaaaaaaaaaaaaaaaa")) # Print specific time format ```python -datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S') -``` - -Code Meaning Example -%a Weekday as locale’s abbreviated name. Mon -%A Weekday as locale’s full name. Monday -%w Weekday as a decimal number, where 0 is Sunday and 6 is Saturday. 1 -%d Day of the month as a zero-padded decimal number. 30 -%-d Day of the month as a decimal number. (Platform specific) 30 -%b Month as locale’s abbreviated name. Sep -%B Month as locale’s full name. September -%m Month as a zero-padded decimal number. 09 -%-m Month as a decimal number. (Platform specific) 9 -%y Year without century as a zero-padded decimal number. 13 -%Y Year with century as a decimal number. 2013 -%H Hour (24-hour clock) as a zero-padded decimal number. 07 -%-H Hour (24-hour clock) as a decimal number. (Platform specific) 7 -%I Hour (12-hour clock) as a zero-padded decimal number. 07 -%-I Hour (12-hour clock) as a decimal number. (Platform specific) 7 -%p Locale’s equivalent of either AM or PM. AM -%M Minute as a zero-padded decimal number. 06 -%-M Minute as a decimal number. (Platform specific) 6 -%S Second as a zero-padded decimal number. 05 -%-S Second as a decimal number. (Platform specific) 5 -%f Microsecond as a decimal number, zero-padded on the left. 000000 -%z UTC offset in the form +HHMM or -HHMM (empty string if the the object is naive). -%Z Time zone name (empty string if the object is naive). -%j Day of the year as a zero-padded decimal number. 273 -%-j Day of the year as a decimal number. (Platform specific) 273 -%U Week number of the year (Sunday as the first day of the week) as a zero padded decimal number. All days in a new year preceding the first Sunday are considered to be in week 0. 39 -%W Week number of the year (Monday as the first day of the week) as a decimal number. All days in a new year preceding the first Monday are considered to be in week 0. -%c Locale’s appropriate date and time representation. Mon Sep 30 07:06:05 2013 -%x Locale’s appropriate date representation. 09/30/13 -%X Locale’s appropriate time representation. 07:06:05 -%% A literal '%' character. % +datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S") +``` + +| Code | Meaning Example | +| ---- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| %a | Weekday as locale’s abbreviated name. Mon | +| %A | Weekday as locale’s full name. Monday | +| %w | Weekday as a decimal number, where 0 is Sunday and 6 is Saturday. 1 | +| %d | Day of the month as a zero-padded decimal number. 30 | +| %-d | Day of the month as a decimal number. (Platform specific) 30 | +| %b | Month as locale’s abbreviated name. Sep | +| %B | Month as locale’s full name. September | +| %m | Month as a zero-padded decimal number. 09 | +| %-m | Month as a decimal number. (Platform specific) 9 | +| %y | Year without century as a zero-padded decimal number. 13 | +| %Y | Year with century as a decimal number. 2013 | +| %H | Hour (24-hour clock) as a zero-padded decimal number. 07 | +| %-H | Hour (24-hour clock) as a decimal number. (Platform specific) 7 | +| %I | Hour (12-hour clock) as a zero-padded decimal number. 07 | +| %-I | Hour (12-hour clock) as a decimal number. (Platform specific) 7 | +| %p | Locale’s equivalent of either AM or PM. AM | +| %M | Minute as a zero-padded decimal number. 06 | +| %-M | Minute as a decimal number. (Platform specific) 6 | +| %S | Second as a zero-padded decimal number. 05 | +| %-S | Second as a decimal number. (Platform specific) 5 | +| %f | Microsecond as a decimal number, zero-padded on the left. 000000 | +| %z | UTC offset in the form +HHMM or -HHMM (empty string if the the object is naive). | +| %Z | Time zone name (empty string if the object is naive). | +| %j | Day of the year as a zero-padded decimal number. 273 | +| %-j | Day of the year as a decimal number. (Platform specific) 273 | +| %U | Week number of the year (Sunday as the first day of the week) as a zero padded decimal number. All days in a new year preceding the first Sunday are considered to be in week 0. 39 | +| %W | Week number of the year (Monday as the first day of the week) as a decimal number. All days in a new year preceding the first Monday are considered to be in week 0. | +| %c | Locale’s appropriate date and time representation. Mon Sep 30 07:06:05 2013 | +| %x | Locale’s appropriate date representation. 09/30/13 | +| %X | Locale’s appropriate time representation. 07:06:05 | +| %% | A literal '%' character. % | + # [Get an instance of an Enum by value](https://stackoverflow.com/questions/29503339/how-to-get-all-values-from-python-enum-class) If you want to initialize a pydantic model with an `Enum` but all you have is @@ -106,10 +119,7 @@ class Environment(str, Enum): Now you can do: ```python -ServiceStatus( - name='test', - environment=Environment.get_by_value('production') -) +ServiceStatus(name="test", environment=Environment.get_by_value("production")) ``` # [Fix R1728: Consider using a generator](https://pylint.pycqa.org/en/latest/user_guide/messages/refactor/consider-using-generator.html) @@ -146,6 +156,8 @@ not. This is a problem because we silently ignore errors. ```python import subprocess + + def example(): proc = subprocess.run("ls") return proc.stdout @@ -157,18 +169,18 @@ when the return code of the command is non-zero. # [Convert bytes to string](https://pythonexamples.org/python-bytes-to-string/) ```python -byte_var.decode('utf-8') +byte_var.decode("utf-8") ``` # [Use pipes with subprocess](https://stackoverflow.com/questions/13332268/how-to-use-subprocess-command-with-pipes) -To use pipes with subprocess you need to use the flag `shell=True` which is [a -bad idea](https://github.com/duo-labs/dlint/blob/master/docs/linters/DUO116.md). +To use pipes with subprocess you need to use the flag `shell=True` which is +[a bad idea](https://github.com/duo-labs/dlint/blob/master/docs/linters/DUO116.md). Instead you should use two processes and link them together in python: ```python -ps = subprocess.Popen(('ps', '-A'), stdout=subprocess.PIPE) -output = subprocess.check_output(('grep', 'process_name'), stdin=ps.stdout) +ps = subprocess.Popen(("ps", "-A"), stdout=subprocess.PIPE) +output = subprocess.check_output(("grep", "process_name"), stdin=ps.stdout) ps.wait() ``` @@ -176,14 +188,15 @@ ps.wait() ```python import subprocess -p = subprocess.run(['myapp'], input='data_to_write', text=True) + +p = subprocess.run(["myapp"], input="data_to_write", text=True) ``` # [Copy and paste from clipboard](https://stackoverflow.com/questions/11063458/python-script-to-copy-text-to-clipboard) -You can use [many -libraries](https://www.delftstack.com/howto/python/python-copy-to-clipboard/) to -do it, but if you don't want to add any other dependencies you can use +You can use +[many libraries](https://www.delftstack.com/howto/python/python-copy-to-clipboard/) +to do it, but if you don't want to add any other dependencies you can use `subprocess run`. To copy from the `selection` clipboard, assuming you've got `xclip` installed, @@ -191,8 +204,8 @@ you could do: ```python subprocess.run( - ['xclip', '-selection', 'clipboard', '-i'], - input='text to be copied', + ["xclip", "-selection", "clipboard", "-i"], + input="text to be copied", text=True, check=True, ) @@ -201,18 +214,16 @@ subprocess.run( To paste it: ```python -subprocess.check_output( - ['xclip', '-o', '-selection', 'clipboard'] -).decode('utf-8') +subprocess.check_output(["xclip", "-o", "-selection", "clipboard"]).decode("utf-8") ``` # [Create random number](https://www.pythoncentral.io/how-to-generate-a-random-number-in-python/) ```python import random -a=random.randint(1,10) -``` +a = random.randint(1, 10) +``` # [Check if local port is available or in use](https://stackoverflow.com/questions/43270868/verify-if-a-local-port-is-available-in-python) @@ -272,7 +283,7 @@ class Container: Usage: ```python -params = {'user_id': 1, 'body': 'foo', 'bar': 'baz', 'amount': 10} +params = {"user_id": 1, "body": "foo", "bar": "baz", "amount": 10} Container(**params) # still doesn't work, raises a TypeError c = Container.from_kwargs(**params) print(c.bar) # prints: 'baz' @@ -286,11 +297,10 @@ txt = "I like bananas" x = txt.replace("bananas", "apples") ``` - # [Parse an RFC2822 date](https://stackoverflow.com/questions/1568856/how-do-i-convert-rfc822-to-a-python-datetime-object) -Interesting to test the accepted format of [RSS -dates](https://www.rssboard.org/rss-validator/docs/error/InvalidRFC2822Date.html). +Interesting to test the accepted format of +[RSS dates](https://www.rssboard.org/rss-validator/docs/error/InvalidRFC2822Date.html). ```python >>> from email.utils import parsedate_to_datetime @@ -301,8 +311,8 @@ datetime.datetime(1997, 3, 9, 13, 45, tzinfo=datetime.timezone(datetime.timedelt # [Convert a datetime to RFC2822](https://stackoverflow.com/questions/3453177/convert-python-datetime-to-rfc-2822) -Interesting as it's the accepted format of [RSS -dates](https://www.rssboard.org/rss-validator/docs/error/InvalidRFC2822Date.html). +Interesting as it's the accepted format of +[RSS dates](https://www.rssboard.org/rss-validator/docs/error/InvalidRFC2822Date.html). ```python >>> import datetime @@ -312,13 +322,13 @@ dates](https://www.rssboard.org/rss-validator/docs/error/InvalidRFC2822Date.html 'Tue, 10 Feb 2020 10:06:53 -0000' ``` - # [Encode url](https://www.urlencoder.io/python/) ```python import urllib.parse from pydantic import AnyHttpUrl + def _normalize_url(url: str) -> AnyHttpUrl: """Encode url to make it compatible with AnyHttpUrl.""" return typing.cast( @@ -355,7 +365,7 @@ class G: @classmethod @property def __doc__(cls): - return f'A doc for {cls.__name__!r}' + return f"A doc for {cls.__name__!r}" ``` If you're not, you can define the decorator `classproperty`: @@ -387,10 +397,10 @@ class classproperty: # noqa: N801, C0103 return self.function(owner_self) ``` -But you'll run into the `W0143: Comparing against a callable, did you omit the -parenthesis? (comparison-with-callable)` mypy error when using it to compare the -result of the property with anything, as it doesn't detect it's a property -instead of a method. +But you'll run into the +`W0143: Comparing against a callable, did you omit the parenthesis? (comparison-with-callable)` +mypy error when using it to compare the result of the property with anything, as +it doesn't detect it's a property instead of a method. # [How to close a subprocess process](https://stackoverflow.com/questions/62172227/how-to-close-subprocess-in-python) @@ -419,10 +429,11 @@ If you want to count the number of occurrences of each duplicate, you can use: ```python from collections import Counter + numbers = [1, 2, 3, 2, 5, 3, 3, 5, 6, 3, 4, 5, 7] counts = dict(Counter(numbers)) -duplicates = {key:value for key, value in counts.items() if value > 1} +duplicates = {key: value for key, value in counts.items() if value > 1} # Returns: {2: 2, 3: 4, 5: 3} ``` @@ -440,8 +451,9 @@ unique = list(set(numbers)) ```python import gzip import shutil -with gzip.open('file.txt.gz', 'rb') as f_in: - with open('file.txt', 'wb') as f_out: + +with gzip.open("file.txt.gz", "rb") as f_in: + with open("file.txt", "wb") as f_out: shutil.copyfileobj(f_in, f_out) ``` @@ -459,6 +471,7 @@ def compress(tar_file, members): tar.close() + def decompress(tar_file, path, members=None): """ Extracts `tar_file` and puts the `members` to `path`. @@ -486,7 +499,7 @@ bs = BeautifulSoup(requests.get(url), "lxml") import traceback # `e` is an exception object that you get from somewhere -traceback_str = ''.join(traceback.format_tb(e.__traceback__)) +traceback_str = "".join(traceback.format_tb(e.__traceback__)) ``` # Change the logging level of a library @@ -521,8 +534,7 @@ regex = re.compile( ) ``` -# [Remove the elements of a list from -another](https://stackoverflow.com/questions/4211209/remove-all-the-elements-that-occur-in-one-list-from-another) +# [Remove the elements of a list from another](https://stackoverflow.com/questions/4211209/remove-all-the-elements-that-occur-in-one-list-from-another) ```python >>> set([1,2,6,8]) - set([2,3,5,8]) @@ -539,7 +551,7 @@ performance option. ```python import shutil -shutil.copytree('bar', 'foo') +shutil.copytree("bar", "foo") ``` # [Copy a file](https://stackabuse.com/how-to-copy-a-file-in-python/) @@ -562,8 +574,7 @@ with redirect_stdout(f): out = f.getvalue() ``` -# [Make temporal -directory](https://stackoverflow.com/questions/3223604/how-to-create-a-temporary-directory-and-get-its-path-file-name) +# [Make temporal directory](https://stackoverflow.com/questions/3223604/how-to-create-a-temporary-directory-and-get-its-path-file-name) ```python import tempfile @@ -572,6 +583,7 @@ dirpath = tempfile.mkdtemp() ``` # [Change the working directory of a test](https://stackoverflow.com/questions/62044541/change-pytest-working-directory-to-test-case-directory) + The following function-level fixture will change to the test case directory, run the test (`yield`), then change back to the calling directory to avoid side-effects. @@ -584,12 +596,12 @@ def change_test_dir_(request: SubRequest) -> Any: os.chdir(request.config.invocation_dir) ``` -* `request` is a built-in pytest fixture -* `fspath` is the `LocalPath` to the test module being executed -* `dirname` is the directory of the test module -* `request.config.invocationdir` is the folder from which pytest was executed -* `request.config.rootdir` is the pytest root, doesn't change based on where you - run pytest. Not used here, but could be useful. +- `request` is a built-in pytest fixture +- `fspath` is the `LocalPath` to the test module being executed +- `dirname` is the directory of the test module +- `request.config.invocationdir` is the folder from which pytest was executed +- `request.config.rootdir` is the pytest root, doesn't change based on where you + run pytest. Not used here, but could be useful. Any processes that are kicked off by the test will use the test case folder as their working directory and copy their logs, outputs, etc. there, regardless of @@ -597,20 +609,20 @@ where the test suite was executed. # [Remove a substring from the end of a string](https://stackoverflow.com/questions/1038824/how-do-i-remove-a-substring-from-the-end-of-a-string) -On Python 3.9 and newer you can use the `removeprefix` and `removesuffix` methods to -remove an entire substring from either side of the string: +On Python 3.9 and newer you can use the `removeprefix` and `removesuffix` +methods to remove an entire substring from either side of the string: ```python -url = 'abcdc.com' -url.removesuffix('.com') # Returns 'abcdc' -url.removeprefix('abcdc.') # Returns 'com' +url = "abcdc.com" +url.removesuffix(".com") # Returns 'abcdc' +url.removeprefix("abcdc.") # Returns 'com' ``` On Python 3.8 and older you can use `endswith` and slicing: ```python -url = 'abcdc.com' -if url.endswith('.com'): +url = "abcdc.com" +if url.endswith(".com"): url = url[:-4] ``` @@ -618,8 +630,9 @@ Or a regular expression: ```python import re -url = 'abcdc.com' -url = re.sub('\.com$', '', url) + +url = "abcdc.com" +url = re.sub("\.com$", "", url) ``` # [Make a flat list of lists with a list comprehension](https://stackoverflow.com/questions/952914/how-to-make-a-flat-list-out-of-a-list-of-lists) @@ -634,15 +647,15 @@ flat_list = [item for sublist in t for item in sublist] # [Replace all characters of a string with another character](https://stackoverflow.com/questions/48995979/how-to-replace-all-characters-in-a-string-with-one-character/48996018) ```python -mystring = '_'*len(mystring) +mystring = "_" * len(mystring) ``` # [Locate element in list](https://appdividend.com/2019/11/16/how-to-find-element-in-list-in-python/) ```python -a = ['a', 'b'] +a = ["a", "b"] -index = a.index('b') +index = a.index("b") ``` # [Transpose a list of lists](https://stackoverflow.com/questions/6473679/transpose-list-of-lists) @@ -676,9 +689,9 @@ time because if the file or directory exists, that code is not run. # [Check if a dictionary is a subset of another](https://stackoverflow.com/questions/9323749/how-to-check-if-one-dictionary-is-a-subset-of-another-larger-dictionary) -If you have two dictionaries `big = {'a': 1, 'b': 2, 'c':3}` and `small = {'c': -3, 'a': 1}`, and want to check whether `small` is a subset of `big`, use the -next snippet: +If you have two dictionaries `big = {'a': 1, 'b': 2, 'c':3}` and +`small = {'c': 3, 'a': 1}`, and want to check whether `small` is a subset of +`big`, use the next snippet: ```python >>> small.items() <= big.items() @@ -697,6 +710,7 @@ the next code: class Shape: pass + class Rectangle(Shape): def __init__(self, length, width): self.length = length @@ -706,13 +720,15 @@ class Rectangle(Shape): def get_area(self): return self.length * self.width + class Square(Rectangle): - def __init__(self,length): - Rectangle.__init__(self,length,length) + def __init__(self, length): + Rectangle.__init__(self, length, length) ``` -And we want to check if an object `a = Square(5)` is of type `Rectangle`, we could not use -`isinstance` because it'll return `True` as it's a subclass of `Rectangle`: +And we want to check if an object `a = Square(5)` is of type `Rectangle`, we +could not use `isinstance` because it'll return `True` as it's a subclass of +`Rectangle`: ```python >>> isinstance(a, Rectangle) @@ -725,6 +741,7 @@ Instead, use a comparison with `type`: >>> type(a) == Rectangle False ``` + # [Find a static file of a python module](https://stackoverflow.com/questions/39104/finding-a-file-in-a-python-module-distribution) Useful when you want to initialize a configuration file of a cli program when @@ -745,14 +762,15 @@ Then you could import the data with: ```python import pkg_resources -file_path = pkg_resources.resource_filename("pynbox", "assets/config.yaml"), +file_path = (pkg_resources.resource_filename("pynbox", "assets/config.yaml"),) ``` # [Delete a file](https://www.w3schools.com/python/python_file_remove.asp) ```python import os -os.remove('demofile.txt') + +os.remove("demofile.txt") ``` # [Measure elapsed time between lines of code](https://stackoverflow.com/questions/7370801/how-to-measure-elapsed-time-in-python) @@ -785,6 +803,7 @@ pip install html2text ```python import html2text + html = open("foobar.html").read() print(html2text.html2text(html)) ``` @@ -793,6 +812,7 @@ print(html2text.html2text(html)) ```python from dateutil import parser + parser.parse("Aug 28 1999 12:00AM") # datetime.datetime(1999, 8, 28, 0, 0) ``` @@ -805,7 +825,7 @@ datetime.datetime.strptime("2013-W26", "%Y-W%W-%w") Where the datetime format is a string built from the next directives: | Directive | Meaning | Example | -| --- | --- | --- | +| --------- | -------------------------------------------------------------- | ------------------------ | | %a | Abbreviated weekday name. | Sun, Mon, ... | | %A | Full weekday name. | Sunday, Monday, ... | | %w | Weekday as a decimal number. | 0, 1, ..., 6 | @@ -841,24 +861,26 @@ Where the datetime format is a string built from the next directives: # Install a python dependency from a git repository -With [pip you -can](https://stackoverflow.com/questions/16584552/how-to-state-in-requirements-txt-a-direct-github-source): +With +[pip you can](https://stackoverflow.com/questions/16584552/how-to-state-in-requirements-txt-a-direct-github-source): ```bash pip install git+git://github.com/path/to/repository@master ``` -If you want [to hard code it in your `setup.py`](https://stackoverflow.com/questions/32688688/how-to-write-setup-py-to-include-a-git-repository-as-a-dependency/54794506#54794506), you need to: +If you want +[to hard code it in your `setup.py`](https://stackoverflow.com/questions/32688688/how-to-write-setup-py-to-include-a-git-repository-as-a-dependency/54794506#54794506), +you need to: ```python install_requires = [ - 'some-pkg @ git+ssh://git@github.com/someorgname/pkg-repo-name@v1.1#egg=some-pkg', + "some-pkg @ git+ssh://git@github.com/someorgname/pkg-repo-name@v1.1#egg=some-pkg", ] ``` -But [Pypi won't allow you to upload the -package](https://github.com/BaderLab/saber/issues/35), as it will give you -an error: +But +[Pypi won't allow you to upload the package](https://github.com/BaderLab/saber/issues/35), +as it will give you an error: ``` HTTPError: 400 Bad Request from https://test.pypi.org/legacy/ @@ -866,7 +888,8 @@ Invalid value for requires_dist. Error: Can't have direct dependency: 'deepdiff ``` It looks like this is a conscious decision on the PyPI side. Basically, they -don't want pip to reach out to URLs outside their site when installing from PyPI. +don't want pip to reach out to URLs outside their site when installing from +PyPI. An ugly patch is to install the dependencies in a `PostInstall` custom script in the `setup.py` of your program: @@ -888,14 +911,12 @@ class PostInstall(install): # type: ignore install.run(self) print(getoutput("pip install git+git://github.com/lyz-code/deepdiff@master")) -setup( - cmdclass={'install': PostInstall} -) + +setup(cmdclass={"install": PostInstall}) ``` -!!! warning "It may not work!" - Last time I used this solution, when I added the library on a `setup.py` the - direct dependencies weren't installed :S +Warning: It may not work! Last time I used this solution, when I added the +library on a `setup.py` the direct dependencies weren't installed :S # Check or test directories and files @@ -903,12 +924,13 @@ setup( def test_dir(directory): from os.path import exists from os import makedirs + if not exists(directory): makedirs(directory) def test_file(filepath, mode): - ''' Check if a file exist and is accessible. ''' + """Check if a file exist and is accessible.""" def check_mode(os_mode, mode): if os.path.isfile(filepath) and os.access(filepath, os_mode): @@ -916,11 +938,11 @@ def test_file(filepath, mode): else: raise IOError("Can't access the file with mode " + mode) - if mode is 'r': + if mode is "r": check_mode(os.R_OK, mode) - elif mode is 'w': + elif mode is "w": check_mode(os.W_OK, mode) - elif mode is 'a': + elif mode is "a": check_mode(os.R_OK, mode) check_mode(os.W_OK, mode) ``` @@ -936,10 +958,9 @@ os.path.splitext("/path/to/some/file.txt")[0] ```python import os -directory = '/path/to/directory' +directory = "/path/to/directory" for entry in os.scandir(directory): - if (entry.path.endswith(".jpg") - or entry.path.endswith(".png")) and entry.is_file(): + if (entry.path.endswith(".jpg") or entry.path.endswith(".png")) and entry.is_file(): print(entry.path) ``` @@ -955,14 +976,16 @@ if not os.path.exists(directory): ```python from pathlib import Path -Path('path/to/file.txt').touch() +Path("path/to/file.txt").touch() ``` # [Get the first day of next month](https://stackoverflow.com/questions/4130922/how-to-increment-datetime-by-custom-months-in-python-without-using-library) ```python current = datetime.datetime(mydate.year, mydate.month, 1) -next_month = datetime.datetime(mydate.year + int(mydate.month / 12), ((mydate.month % 12) + 1), 1) +next_month = datetime.datetime( + mydate.year + int(mydate.month / 12), ((mydate.month % 12) + 1), 1 +) ``` # [Get the week number of a datetime](https://stackoverflow.com/questions/2600775/how-to-get-week-number-in-python) @@ -981,12 +1004,14 @@ year, weeknumber and weekday in respective order for the given date instance. # [Get the monday of a week number](https://stackoverflow.com/questions/17087314/get-date-from-week-number) -A week number is not enough to generate a date; you need a day of the week as well. Add a default: +A week number is not enough to generate a date; you need a day of the week as +well. Add a default: ```python import datetime + d = "2013-W26" -r = datetime.datetime.strptime(d + '-1', "%Y-W%W-%w") +r = datetime.datetime.strptime(d + "-1", "%Y-W%W-%w") ``` The `-1` and `-%w` pattern tells the parser to pick the Monday in that week. @@ -1004,7 +1029,7 @@ import calendar ```python def int_to_ordinal(number: int) -> str: - '''Convert an integer into its ordinal representation. + """Convert an integer into its ordinal representation. make_ordinal(0) => '0th' make_ordinal(3) => '3rd' @@ -1016,10 +1041,10 @@ def int_to_ordinal(number: int) -> str: Returns: ordinal representation of the number - ''' - suffix = ['th', 'st', 'nd', 'rd', 'th'][min(number % 10, 4)] + """ + suffix = ["th", "st", "nd", "rd", "th"][min(number % 10, 4)] if 11 <= (number % 100) <= 13: - suffix = 'th' + suffix = "th" return f"{number}{suffix}" ``` @@ -1072,7 +1097,6 @@ as keys. For example: [('dave', 'B', 10), ('jane', 'B', 12), ('john', 'A', 15)] ``` - The same technique works for objects with named attributes. For example: ```python @@ -1160,7 +1184,6 @@ To return a new list, use the `sorted()` built-in function: newlist = sorted(ut, key=lambda x: x.body.id_, reverse=True) ``` - # [Iterate over an instance object's data attributes in Python](https://www.saltycrane.com/blog/2008/09/how-iterate-over-instance-objects-data-attributes-python/) ```python @@ -1169,9 +1192,10 @@ class Search: center: str distance: str -se = Search('a', 'b') + +se = Search("a", "b") for key, value in se.__dict__.items(): - print(key, value) + print(key, value) ``` # [Generate ssh key](https://stackoverflow.com/questions/2466401/how-to-generate-ssh-key-pairs-with-python) @@ -1187,17 +1211,15 @@ from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.backends import default_backend as crypto_default_backend private_key = rsa.generate_private_key( - backend=crypto_default_backend(), - public_exponent=65537, - key_size=4096 + backend=crypto_default_backend(), public_exponent=65537, key_size=4096 ) pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, - encryption_algorithm=serialization.NoEncryption() + encryption_algorithm=serialization.NoEncryption(), ) -with open("/tmp/private.key", 'wb') as content_file: +with open("/tmp/private.key", "wb") as content_file: chmod("/tmp/private.key", 0600) content_file.write(pem) @@ -1206,9 +1228,9 @@ public_key = ( encoding=serialization.Encoding.OpenSSH, format=serialization.PublicFormat.OpenSSH, ) - + b' user@email.org' + + b" user@email.org" ) -with open("/tmp/public.key", 'wb') as content_file: +with open("/tmp/public.key", "wb") as content_file: content_file.write(public_key) ``` @@ -1220,33 +1242,34 @@ you need to remove the indentation ```python def test(): # end first line with \ to avoid the empty line! - s = '''\ + s = """\ hello world -''' +""" ``` Which is inconvenient as it breaks some editor source code folding and it's ugly for the eye. -The solution is to use [`textwrap.dedent()`](https://docs.python.org/3/library/textwrap.html) +The solution is to use +[`textwrap.dedent()`](https://docs.python.org/3/library/textwrap.html) ```python import textwrap + def test(): # end first line with \ to avoid the empty line! - s = '''\ + s = """\ hello world - ''' - print(repr(s)) # prints ' hello\n world\n ' + """ + print(repr(s)) # prints ' hello\n world\n ' print(repr(textwrap.dedent(s))) # prints 'hello\n world\n' - ``` -If you forget to add the trailing `\` character of `s = '''\` or use `s -= '''hello`, you're going to have a bad time with [black](black.md). +If you forget to add the trailing `\` character of `s = '''\` or use +`s = '''hello`, you're going to have a bad time with [black](black.md). # [Play a sound](https://linuxhint.com/play_sound_python/) @@ -1256,14 +1279,16 @@ pip install playsound ```python from playsound import playsound -playsound('path/to/file.wav') + +playsound("path/to/file.wav") ``` # [Deep copy a dictionary](https://stackoverflow.com/questions/5105517/deep-copy-of-a-dict-in-python) ```python import copy -d = { ... } + +d = {...} d2 = copy.deepcopy(d) ``` @@ -1291,7 +1316,7 @@ here() # [Check if an object has an attribute](https://stackoverflow.com/questions/610883/how-to-know-if-an-object-has-an-attribute-in-python) ```python -if hasattr(a, 'property'): +if hasattr(a, "property"): a.property ``` @@ -1301,9 +1326,9 @@ if hasattr(a, 'property'): a `break` statement. ```python -for i in [1,2,3]: +for i in [1, 2, 3]: print(i) - if i==3: + if i == 3: break else: print("for loop was not broken") @@ -1324,8 +1349,8 @@ z = {**x, **y} ## [Create user defined exceptions](https://docs.python.org/3/tutorial/errors.html#user-defined-exceptions) Programs may name their own exceptions by creating a new exception class. -Exceptions should typically be derived from the `Exception` class, either directly -or indirectly. +Exceptions should typically be derived from the `Exception` class, either +directly or indirectly. Exception classes are meant to be kept simple, only offering a number of attributes that allow information about the error to be extracted by handlers @@ -1357,19 +1382,19 @@ naming of the standard exceptions. ```python import importlib -module = importlib.import_module('os') +module = importlib.import_module("os") module_class = module.getcwd -relative_module = importlib.import_module('.model', package='mypackage') -class_to_extract = 'MyModel' +relative_module = importlib.import_module(".model", package="mypackage") +class_to_extract = "MyModel" extracted_class = geattr(relative_module, class_to_extract) ``` The first argument specifies what module to import in absolute or relative terms -(e.g. either `pkg.mod` or `..mod`). If the name is specified in relative terms, then -the package argument must be set to the name of the package which is to act as -the anchor for resolving the package name (e.g. `import_module('..mod', -'pkg.subpkg')` will `import pkg.mod`). +(e.g. either `pkg.mod` or `..mod`). If the name is specified in relative terms, +then the package argument must be set to the name of the package which is to act +as the anchor for resolving the package name (e.g. +`import_module('..mod', 'pkg.subpkg')` will `import pkg.mod`). # [Get system's timezone and use it in datetime](https://stackoverflow.com/a/61124241) @@ -1378,6 +1403,7 @@ To obtain timezone information in the form of a `datetime.tzinfo` object, use ```python from dateutil import tz + myTimeZone = tz.tzlocal() ``` @@ -1386,7 +1412,8 @@ This object can be used in the `tz` parameter of `datetime.datetime.now()`: ```python from datetime import datetime from dateutil import tz -localisedDatetime = datetime.now(tz = tz.tzlocal()) + +localisedDatetime = datetime.now(tz=tz.tzlocal()) ``` # [Capitalize a sentence](https://stackoverflow.com/questions/53898070/capitalize-only-the-first-letter-of-sentences-in-python-using-split-function) @@ -1399,8 +1426,8 @@ To change the caps of the first letter of the first word of a sentence use: Add funny Emojis ``` -The `.capitalize` method transforms the rest of words to lowercase. -The `.title` transforms all sentence words to capitalize. +The `.capitalize` method transforms the rest of words to lowercase. The `.title` +transforms all sentence words to capitalize. # [Get the last monday datetime](https://www.pythonprogramming.in/find-the-previous-and-coming-monday-s-date-based-on-current-date.html) @@ -1413,6 +1440,5 @@ last_monday = today - datetime.timedelta(days=today.weekday()) # Issues -* [Pypi won't allow you to upload packages with direct - dependencies](https://github.com/BaderLab/saber/issues/35): update the - section above. +- [Pypi won't allow you to upload packages with direct dependencies](https://github.com/BaderLab/saber/issues/35): + update the section above. diff --git a/docs/fastapi.md b/docs/fastapi.md index 4e8b2d0433d..fd2e415cd99 100644 --- a/docs/fastapi.md +++ b/docs/fastapi.md @@ -10,20 +10,19 @@ hints. The [key features](https://fastapi.tiangolo.com/features/) are: -* Fast: Very high performance, on par with NodeJS and Go (thanks to Starlette - and Pydantic). One of the fastest Python frameworks available. -* Fast to code: Increase the speed to develop features by about 200% to 300%. -* Fewer bugs: Reduce about 40% of human (developer) induced errors. -* Intuitive: Great editor support. Completion everywhere. Less time debugging. -* Easy: Designed to be easy to use and learn. Less time reading docs. -* Short: Minimize code duplication. Multiple features from each parameter - declaration. Fewer bugs. -* Robust: Get production-ready code. With automatic interactive documentation. -* Standards-based: Based on (and fully compatible with) the open standards for - APIs: OpenAPI (previously known as Swagger) and JSON Schema. -* [Authentication with - JWT](https://fastapi.tiangolo.com/tutorial/security/first-steps/): with - a super nice tutorial on how to set it up. +- Fast: Very high performance, on par with NodeJS and Go (thanks to Starlette + and Pydantic). One of the fastest Python frameworks available. +- Fast to code: Increase the speed to develop features by about 200% to 300%. +- Fewer bugs: Reduce about 40% of human (developer) induced errors. +- Intuitive: Great editor support. Completion everywhere. Less time debugging. +- Easy: Designed to be easy to use and learn. Less time reading docs. +- Short: Minimize code duplication. Multiple features from each parameter + declaration. Fewer bugs. +- Robust: Get production-ready code. With automatic interactive documentation. +- Standards-based: Based on (and fully compatible with) the open standards for + APIs: OpenAPI (previously known as Swagger) and JSON Schema. +- [Authentication with JWT](https://fastapi.tiangolo.com/tutorial/security/first-steps/): + with a super nice tutorial on how to set it up. # [Installation](https://fastapi.tiangolo.com/#installation) @@ -39,62 +38,67 @@ pip install uvicorn[standard] # [Simple example](https://fastapi.tiangolo.com/#installation) -* Create a file `main.py` with: +- Create a file `main.py` with: - ```python - from typing import Optional + ```python + from typing import Optional - from fastapi import FastAPI + from fastapi import FastAPI - app = FastAPI() + app = FastAPI() - @app.get("/") - def read_root(): - return {"Hello": "World"} + @app.get("/") + def read_root(): + return {"Hello": "World"} - @app.get("/items/{item_id}") - def read_item(item_id: int, q: Optional[str] = None): - return {"item_id": item_id, "q": q} - ``` + @app.get("/items/{item_id}") + def read_item(item_id: int, q: Optional[str] = None): + return {"item_id": item_id, "q": q} + ``` -* Run the server: +- Run the server: - ```bash - uvicorn main:app --reload - ``` + ```bash + uvicorn main:app --reload + ``` -* Open your browser at http://127.0.0.1:8000/items/5?q=somequery. You will see the JSON response as: +- Open your browser at http://127.0.0.1:8000/items/5?q=somequery. You will see + the JSON response as: - ```json - {"item_id": 5, "q": "somequery"} - ``` + ```json + { + "item_id": 5, + "q": "somequery" + } + ``` You already created an API that: -* Receives HTTP requests in the paths `/` and `/items/{item_id}`. -* Both paths take GET operations (also known as HTTP methods). -* The path `/items/{item_id}` has a path parameter `item_id` that should be an - `int`. -* The path `/items/{item_id}` has an optional `str` query parameter `q`. -* Has interactive API docs made for you: - * Swagger: http://127.0.0.1:8000/docs. - * Redoc: http://127.0.0.1:8000/redoc. +- Receives HTTP requests in the paths `/` and `/items/{item_id}`. +- Both paths take GET operations (also known as HTTP methods). +- The path `/items/{item_id}` has a path parameter `item_id` that should be an + `int`. +- The path `/items/{item_id}` has an optional `str` query parameter `q`. +- Has interactive API docs made for you: + - Swagger: http://127.0.0.1:8000/docs. + - Redoc: http://127.0.0.1:8000/redoc. -You will see the automatic interactive API documentation (provided by Swagger UI): +You will see the automatic interactive API documentation (provided by Swagger +UI): # Sending data to the server When you need to send data from a client (let's say, a browser) to your API, you have three basic options: -* As [path parameters](#path-parameters) in the URL (`/items/2`). -* As [query parameters](#query-parameters) in the URL (`/items/2?skip=true`). -* In the [body](#body-requests) of a POST request. +- As [path parameters](#path-parameters) in the URL (`/items/2`). +- As [query parameters](#query-parameters) in the URL (`/items/2?skip=true`). +- In the [body](#body-requests) of a POST request. -To send simple data use the first two, to send complex or sensitive data, use the -last. +To send simple data use the first two, to send complex or sensitive data, use +the last. It also supports sending data through [cookies](https://fastapi.tiangolo.com/tutorial/cookie-params/) and @@ -114,8 +118,8 @@ def read_item(item_id: int): If you define the type hints of the function arguments, FastAPI will use [pydantic](pydantic.md) data validation. -If you need to use a Linux path as an argument, check [this -workaround](https://fastapi.tiangolo.com/tutorial/path-params/#path-parameters-containing-paths), +If you need to use a Linux path as an argument, check +[this workaround](https://fastapi.tiangolo.com/tutorial/path-params/#path-parameters-containing-paths), but be aware that it's not supported by OpenAPI. ### [Order matters](https://fastapi.tiangolo.com/tutorial/path-params/#order-matters) @@ -146,11 +150,13 @@ use a standard Python `Enum`. ```python from enum import Enum + class ModelName(str, Enum): alexnet = "alexnet" resnet = "resnet" lenet = "lenet" + @app.get("/models/{model_name}") def get_model(model_name: ModelName): if model_name == ModelName.alexnet: @@ -162,8 +168,8 @@ def get_model(model_name: ModelName): return {"model_name": model_name, "message": "Have some residuals"} ``` -These are the basics, FastAPI supports more complex [path parameters and string -validations](https://fastapi.tiangolo.com/tutorial/path-params-numeric-validations/). +These are the basics, FastAPI supports more complex +[path parameters and string validations](https://fastapi.tiangolo.com/tutorial/path-params-numeric-validations/). ## [Query Parameters](https://fastapi.tiangolo.com/tutorial/query-params/) @@ -179,13 +185,13 @@ async def read_item(skip: int = 0, limit: int = 10): return fake_items_db[skip : skip + limit] ``` -The query is the set of key-value pairs that go after the `?` in a URL, separated -by `&` characters. +The query is the set of key-value pairs that go after the `?` in a URL, +separated by `&` characters. For example, in the URL: http://127.0.0.1:8000/items/?skip=0&limit=10 -These are the basics, FastAPI supports more complex [query parameters and string -validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/). +These are the basics, FastAPI supports more complex +[query parameters and string validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/). ## [Request Body](https://fastapi.tiangolo.com/tutorial/body/) @@ -203,6 +209,7 @@ class Item(BaseModel): price: float tax: Optional[float] = None + @app.post("/items/") async def create_item(item: Item): return item @@ -210,22 +217,20 @@ async def create_item(item: Item): With just that Python type declaration, FastAPI will: -* Read the body of the request as JSON. -* Convert the corresponding types (if needed). -* Validate the data: If the data is invalid, it will return a nice and clear - error, indicating exactly where and what was the incorrect data. -* Give you the received data in the parameter `item`. -* Generate JSON Schema definitions for your model. -* Those schemas will be part of the generated OpenAPI schema, and used by the - automatic documentation UIs. +- Read the body of the request as JSON. +- Convert the corresponding types (if needed). +- Validate the data: If the data is invalid, it will return a nice and clear + error, indicating exactly where and what was the incorrect data. +- Give you the received data in the parameter `item`. +- Generate JSON Schema definitions for your model. +- Those schemas will be part of the generated OpenAPI schema, and used by the + automatic documentation UIs. These are the basics, FastAPI supports more complex patterns such as: -* [Using multiple models in the same - query](https://fastapi.tiangolo.com/tutorial/body-multiple-params/). -* [Additional validations of the pydantic - models](https://fastapi.tiangolo.com/tutorial/body-fields/). -* [Nested models](https://fastapi.tiangolo.com/tutorial/body-nested-models/). +- [Using multiple models in the same query](https://fastapi.tiangolo.com/tutorial/body-multiple-params/). +- [Additional validations of the pydantic models](https://fastapi.tiangolo.com/tutorial/body-fields/). +- [Nested models](https://fastapi.tiangolo.com/tutorial/body-nested-models/). # [Sending data to the client](https://fastapi.tiangolo.com/advanced/response-directly/) @@ -345,76 +350,78 @@ In many cases your application could need some external settings or configurations, for example secret keys, database credentials, credentials for email services, etc. -You can load these configurations through [environmental -variables](https://fastapi.tiangolo.com/advanced/settings/#environment-variables), -or you can use the awesome [Pydantic settings -management](https://pydantic-docs.helpmanual.io/usage/settings/), whose -advantages are: +You can load these configurations through +[environmental variables](https://fastapi.tiangolo.com/advanced/settings/#environment-variables), +or you can use the awesome +[Pydantic settings management](https://pydantic-docs.helpmanual.io/usage/settings/), +whose advantages are: -* Do Pydantic's type validation on the fields. -* [Automatically reads the missing values from environmental variables](https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names). -* Supports reading variables from [Dotenv - files](https://pydantic-docs.helpmanual.io/usage/settings/#dotenv-env-support). -* [Supports - secrets](https://pydantic-docs.helpmanual.io/usage/settings/#secret-support). +- Do Pydantic's type validation on the fields. +- [Automatically reads the missing values from environmental variables](https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names). +- Supports reading variables from + [Dotenv files](https://pydantic-docs.helpmanual.io/usage/settings/#dotenv-env-support). +- [Supports secrets](https://pydantic-docs.helpmanual.io/usage/settings/#secret-support). First you define the `Settings` class with all the fields: -!!! note "File: `config.py`" - ```python - from pydantic import BaseSettings +File: `config.py`: + +```python +from pydantic import BaseSettings - class Settings(BaseSettings): - verbose: bool = True - database_url: str = "tinydb://~/.local/share/pyscrobbler/database.tinydb" - ``` +class Settings(BaseSettings): + verbose: bool = True + database_url: str = "tinydb://~/.local/share/pyscrobbler/database.tinydb" +``` -Then in the api definition, [set the -dependency](https://fastapi.tiangolo.com/advanced/settings/#settings-in-a-dependency). +Then in the api definition, +[set the dependency](https://fastapi.tiangolo.com/advanced/settings/#settings-in-a-dependency). -!!! note "File: `api.py`" +File: `api.py`: - ```python - from functools import lru_cache - from fastapi import Depends, FastAPI +```python +from functools import lru_cache +from fastapi import Depends, FastAPI - app = FastAPI() +app = FastAPI() - @lru_cache() - def get_settings() -> Settings: - """Configure the program settings.""" - return Settings() - @app.get("/verbose") - def verbose(settings: Settings = Depends(get_settings)) -> bool: - return settings.verbose - ``` +@lru_cache() +def get_settings() -> Settings: + """Configure the program settings.""" + return Settings() + + +@app.get("/verbose") +def verbose(settings: Settings = Depends(get_settings)) -> bool: + return settings.verbose +``` Where: -* `get_settings` is the dependency function that configures the `Settings` - object. The endpoint `verbose` is [dependant of - `get_settings`](https://fastapi.tiangolo.com/tutorial/dependencies/). -* [The `@lru_cache` - decorator](https://fastapi.tiangolo.com/advanced/settings/#creating-the-settings-only-once-with-lru_cache) - changes the function it decorates to return the same value that was - returned the first time, instead of computing it again, executing the code - of the function every time. +- `get_settings` is the dependency function that configures the `Settings` + object. The endpoint `verbose` is + [dependant of `get_settings`](https://fastapi.tiangolo.com/tutorial/dependencies/). - So, the function will be executed once for each combination of arguments. - And then the values returned by each of those combinations of arguments will - be used again and again whenever the function is called with exactly the - same combination of arguments. +- [The `@lru_cache` decorator](https://fastapi.tiangolo.com/advanced/settings/#creating-the-settings-only-once-with-lru_cache) + changes the function it decorates to return the same value that was returned + the first time, instead of computing it again, executing the code of the + function every time. - Creating the `Settings` object is a costly operation as it needs to check - the environment variables or read a file, so we want to do it just once, not - on each request. + So, the function will be executed once for each combination of arguments. And + then the values returned by each of those combinations of arguments will be + used again and again whenever the function is called with exactly the same + combination of arguments. -This setup makes it easy to [inject testing -configuration](#inject-testing-configuration) so as not to break production -code. + Creating the `Settings` object is a costly operation as it needs to check the + environment variables or read a file, so we want to do it just once, not on + each request. + +This setup makes it easy to +[inject testing configuration](#inject-testing-configuration) so as not to break +production code. ## OpenAPI configuration @@ -486,7 +493,6 @@ tags_metadata = [ ] ``` - app = FastAPI(openapi_tags=tags_metadata) ### [Add a summary and description](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#summary-and-description) @@ -529,30 +535,32 @@ async def read_elements(): # [Deploy with Docker](https://fastapi.tiangolo.com/deployment/docker/). -FastAPI has it's own -optimized [docker](https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker), -which makes the deployment of your applications really easy. +FastAPI has it's own optimized +[docker](https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker), which +makes the deployment of your applications really easy. -* In your project directory create the `Dockerfile` file: +- In your project directory create the `Dockerfile` file: - ```dockerfile - FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7 + ```dockerfile + FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7 - COPY ./app /app - ``` + COPY ./app /app + ``` -* Go to the project directory (in where your Dockerfile is, containing your app directory). -* Build your FastAPI image: +- Go to the project directory (in where your Dockerfile is, containing your app + directory). - ```bash - docker build -t myimage . - ``` +- Build your FastAPI image: -* Run a container based on your image: + ```bash + docker build -t myimage . + ``` - ```bash - docker run -d --name mycontainer -p 80:80 myimage - ``` +- Run a container based on your image: + + ```bash + docker run -d --name mycontainer -p 80:80 myimage + ``` Now you have an optimized FastAPI server in a Docker container. Auto-tuned for your current server (and number of CPU cores). @@ -579,20 +587,22 @@ will be defined in the `app` variable in the `src/program_name/entrypoints/api.py` file. To make things simpler make the `app` variable available on the root of your -package, so you can do `from program_name import app` instead of `from -program_name.entrypoints.api import app`. To do that we need to add `app` to the -`__all__` internal python variable of the `__init__.py` file of our package. +package, so you can do `from program_name import app` instead of +`from program_name.entrypoints.api import app`. To do that we need to add `app` +to the `__all__` internal python variable of the `__init__.py` file of our +package. + +File: `src/program_name/__init__.py`: -!!! note "File: src/program_name/__init__.py" - ```python - from .entrypoints.api import app +```python +from .entrypoints.ap +import app - __all__: List[str] = ['app'] - ``` +__all__: List[str] = ['app'] +``` -The image is -configured through [environmental -variables](https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker#environment-variables) +The image is configured through +[environmental variables](https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker#environment-variables) So we will need to use: @@ -627,6 +637,7 @@ def client_() -> TestClient: """Configure FastAPI TestClient.""" return TestClient(app) + def test_read_main(client: TestClient): response = client.get("/") assert response.status_code == 200 @@ -645,23 +656,23 @@ result = client.post( ## Inject testing configuration -If your application follows the [application configuration -section](#application-configuration), injecting testing configuration is easy -with [dependency -injection](https://fastapi.tiangolo.com/advanced/testing-dependencies/). +If your application follows the +[application configuration section](#application-configuration), injecting +testing configuration is easy with +[dependency injection](https://fastapi.tiangolo.com/advanced/testing-dependencies/). Imagine you have a `db_tinydb` [fixture](pytest.md#fixtures) that sets up the testing database: ```python @pytest.fixture(name="db_tinydb") -def db_tinydb_(tmpdir: LocalPath) -> str: +def db_tinydb_(tmp_path: Path) -> str: """Create an TinyDB database engine. Returns: database_url: Url used to connect to the database. """ - tinydb_file_path = str(tmpdir.join("tinydb.db")) + tinydb_file_path = str(tmp_path / "tinydb.db") return f"tinydb:///{tinydb_file_path}" ``` @@ -689,6 +700,7 @@ attribute, you could try to do: ```python app = FastAPI() + @lru_cache() def get_config() -> Config: """Configure the program settings.""" @@ -696,6 +708,7 @@ def get_config() -> Config: log.info("Loading the config") return Config() # pragma: no cover + if get_config().environment == "testing": @app.get("/seed", status_code=201) @@ -721,7 +734,6 @@ But the injection of the dependencies is only done inside the functions, so that check inside the endpoint, which is not ideal. ```python - @app.get("/seed", status_code=201) def seed_data( config: Config = Depends(get_config), @@ -745,7 +757,8 @@ def seed_data( ## [Create redirections](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse) -Returns an HTTP redirect. Uses a 307 status code (Temporary Redirect) by default. +Returns an HTTP redirect. Uses a 307 status code (Temporary Redirect) by +default. ```python from fastapi import FastAPI @@ -759,7 +772,6 @@ async def read_typer(): return RedirectResponse("https://typer.tiangolo.com") ``` - ## Test that your application works locally Once you have your application [built](#deploy-with-docker) and @@ -773,11 +785,9 @@ Instead, launch an uvicorn application directly with: uvicorn program_name:app --reload ``` -!!! note "" - - The command is assuming that your `app` is available at the root of your - package, look at the [deploy section](#other-project-structures) if you feel - lost. +Note: The command is assuming that your `app` is available at the root of your +package, look at the [deploy section](#other-project-structures) if you feel +lost. ## Resolve the 307 error @@ -809,35 +819,35 @@ The error is telling us that the required `url` parameter is missing. # Logging -By default the [application log messages are not shown in the uvicorn -log](https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker/issues/19), you -need to add the next lines to the file where your app is defined: +By default the +[application log messages are not shown in the uvicorn log](https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker/issues/19), +you need to add the next lines to the file where your app is defined: -!!! note "File: src/program_name/entrypoints/api.py" +File: `src/program_name/entrypoints/api.py`: - ```python - from fastapi import FastAPI - from fastapi.logger import logger - import logging +```python +from fastapi import FastAPI +from fastapi.logger import logger +import logging - log = logging.getLogger("gunicorn.error") - logger.handlers = log.handlers - if __name__ != "main": - logger.setLevel(log.level) - else: - logger.setLevel(logging.DEBUG) +log = logging.getLogger("gunicorn.error") +logger.handlers = log.handlers +if __name__ != "main": + logger.setLevel(log.level) +else: + logger.setLevel(logging.DEBUG) - app = FastAPI() +app = FastAPI() - # rest of the application... - ``` +# rest of the application... +``` ## Logging to Sentry -FastAPI can [integrate with -Sentry](https://philstories.medium.com/integrate-sentry-to-fastapi-7250603c070f) -or similar [application loggers](python_logging.md) through the [ASGI -middleware](https://fastapi.tiangolo.com/advanced/middleware/#other-middlewares). +FastAPI can +[integrate with Sentry](https://philstories.medium.com/integrate-sentry-to-fastapi-7250603c070f) +or similar [application loggers](python_logging.md) through the +[ASGI middleware](https://fastapi.tiangolo.com/advanced/middleware/#other-middlewares). # [Run a FastAPI server in the background for testing purposes](https://stackoverflow.com/questions/57412825/how-to-start-a-uvicorn-fastapi-in-background-when-testing-with-pytest) @@ -845,80 +855,79 @@ Sometimes you want to launch a web server with a simple API to test a program that can't use the [testing client](#testing). First define the API to launch with: -!!! note "File: tests/api_server.py" +File: `tests/api_server.py`: - ```python - from fastapi import FastAPI, HTTPException +```python +from fastapi import FastAPI, HTTPException - app = FastAPI() +app = FastAPI() - @app.get("/existent") - async def existent(): - return {"msg": "exists!"} +@app.get("/existent") +async def existent(): + return {"msg": "exists!"} - @app.get("/inexistent") - async def inexistent(): - raise HTTPException(status_code=404, detail="It doesn't exist") - ``` +@app.get("/inexistent") +async def inexistent(): + raise HTTPException(status_code=404, detail="It doesn't exist") +``` Then create the fixture: -!!! note "File: tests/conftest.py" - ```python - from multiprocessing import Process +File: `tests/conftest.py`: - from typing import Generator - import pytest - import uvicorn +```python +from multiprocessing import Process - from .api_server import app +from typing import Generator +import pytest +import uvicorn +from .api_server import app - def run_server() -> None: - """Command to run the fake api server.""" - uvicorn.run(app) +def run_server() -> None: + """Command to run the fake api server.""" + uvicorn.run(app) - @pytest.fixture() - def _server() -> Generator[None, None, None]: - """Start the fake api server.""" - proc = Process(target=run_server, args=(), daemon=True) - proc.start() - yield - proc.kill() # Cleanup after test - ``` + +@pytest.fixture() +def _server() -> Generator[None, None, None]: + """Start the fake api server.""" + proc = Process(target=run_server, args=(), daemon=True) + proc.start() + yield + proc.kill() # Cleanup after test +``` Now you can use the `server: None` fixture in your tests and run your queries against `http://localhost:8000`. # Interesting features to explore -* [Structure big - applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/). -* [Dependency injection](https://fastapi.tiangolo.com/tutorial/dependencies/). -* [Running background tasks after the request is - finished](https://fastapi.tiangolo.com/tutorial/background-tasks/). -* [Return a different response - model](https://fastapi.tiangolo.com/tutorial/response-model/). -* [Upload files](https://fastapi.tiangolo.com/tutorial/request-files/). -* [Set - authentication](https://fastapi.tiangolo.com/tutorial/security/first-steps/). -* [Host behind a proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/). -* [Static files](https://fastapi.tiangolo.com/tutorial/static-files/). +- [Structure big applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/). +- [Dependency injection](https://fastapi.tiangolo.com/tutorial/dependencies/). +- [Running background tasks after the request is finished](https://fastapi.tiangolo.com/tutorial/background-tasks/). +- [Return a different response model](https://fastapi.tiangolo.com/tutorial/response-model/). +- [Upload files](https://fastapi.tiangolo.com/tutorial/request-files/). +- [Set authentication](https://fastapi.tiangolo.com/tutorial/security/first-steps/). +- [Host behind a proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/). +- [Static files](https://fastapi.tiangolo.com/tutorial/static-files/). # Issues -* [FastAPI does not log - messages](https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker/issues/19): - update `pyscrobbler` and any other maintained applications and remove the - snippet defined in the [logging section](#logging). +- [FastAPI does not log messages](https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker/issues/19): + update `pyscrobbler` and any other maintained applications and remove the + snippet defined in the [logging section](#logging). + # References -* [Docs](https://fastapi.tiangolo.com/) -* [Git](https://github.com/tiangolo/fastapi) -* [Awesome FastAPI](https://github.com/mjhea0/awesome-fastapi) +- [Docs](https://fastapi.tiangolo.com/) + +- [Git](https://github.com/tiangolo/fastapi) + +- [Awesome FastAPI](https://github.com/mjhea0/awesome-fastapi) -* [Testdriven.io course](https://testdriven.io/courses/tdd-fastapi/): suggested - by the developer. +- [Testdriven.io course](https://testdriven.io/courses/tdd-fastapi/): suggested + by the developer. diff --git a/docs/goodconf.md b/docs/goodconf.md index b2b6cc6dc43..7400ab1e35d 100644 --- a/docs/goodconf.md +++ b/docs/goodconf.md @@ -25,17 +25,26 @@ import os from goodconf import GoodConf, Field from pydantic import PostgresDsn + class AppConfig(GoodConf): """Configure my application.""" - DEBUG: bool - DATABASE_URL: PostgresDsn = "postgres://localhost:5432/mydb" - SECRET_KEY: str = Field( + + debug: bool + database_url: PostgresDsn = "postgres://localhost:5432/mydb" + secret_key: str = Field( initial=lambda: base64.b64encode(os.urandom(60)).decode(), description="Used for cryptographic signing. " - "https://docs.djangoproject.com/en/2.0/ref/settings/#secret-key") + "https://docs.djangoproject.com/en/2.0/ref/settings/#secret-key", + ) class Config: - default_files = ["/etc/myproject/myproject.yaml", "myproject.yaml"] + """Define the default files to check.""" + + default_files = [ + os.path.expanduser("~/.local/share/your_program/config.yaml"), + "config.yaml", + ] + config = AppConfig() ``` @@ -48,11 +57,9 @@ configuration files. For more details see Pydantic's docs for examples of loading: -* [Dotenv (.env) - files](https://pydantic-docs.helpmanual.io/usage/settings/#dotenv-env-support). -* [Docker - secrets](https://pydantic-docs.helpmanual.io/usage/settings/#secret-support). +- [Dotenv (.env) files](https://pydantic-docs.helpmanual.io/usage/settings/#dotenv-env-support). +- [Docker secrets](https://pydantic-docs.helpmanual.io/usage/settings/#secret-support). # References -* [Git](https://github.com/lincolnloop/goodconf/) +- [Git](https://github.com/lincolnloop/goodconf/) diff --git a/docs/linux/elasticsearch.md b/docs/linux/elasticsearch.md index b6c6a1522b3..3ee1533425c 100644 --- a/docs/linux/elasticsearch.md +++ b/docs/linux/elasticsearch.md @@ -13,15 +13,26 @@ the ElasticSearch documentation uses cURL command line syntax. This is also the standard practice to describe requests made to ElasticSearch within the user community. +## Get all documents + An example HTTP request using CURL syntax looks like this: ```bash -curl -XPOST "https://localhost:9200/_search" -d' { "query": { "match_all": {} } -}' +curl \ + -H 'Content-Type: application/json' \ + -XPOST "https://localhost:9200/_search" \ + -d' { "query": { "match_all": {} }}' +``` + +## Get documents that match a string + +```bash +curl \ + -H 'Content-Type: application/json' \ + -XPOST "https://localhost:9200/_search" \ + -d' { "query": { "query_string": {"query": "test company"} }}' ``` -The above snippet, when executed in a console, runs the curl program with three -arguments. # Backup diff --git a/docs/linux/mkdocs.md b/docs/linux/mkdocs.md index 499e794d215..965b29380e3 100644 --- a/docs/linux/mkdocs.md +++ b/docs/linux/mkdocs.md @@ -9,112 +9,112 @@ static site generator that's geared towards building project documentation. Documentation source files are written in Markdown, and configured with a single YAML configuration file. -!!! note "" - I've automated the creation of the mkdocs site in [this cookiecutter - template](https://github.com/lyz-code/cookiecutter-python-project). +Note: I've automated the creation of the mkdocs site in +[this cookiecutter template](https://github.com/lyz-code/cookiecutter-python-project). # Installation -* Install the basic packages. - - ```bash - pip install \ - mkdocs \ - mkdocs-material \ - mkdocs-autolink-plugin \ - mkdocs-minify-plugin \ - pymdown-extensions \ - mkdocs-git-revision-date-localized-plugin - ``` - -* Create the `docs` repository. - - ```bash - mkdocs new docs - ``` - -* Although there are [several - themes](https://www.mkdocs.org/user-guide/styling-your-docs/), I usually use - the [material](https://squidfunk.github.io/mkdocs-material) one. I won't - dive into the different options, just show a working template of the - `mkdocs.yaml` file. - - ```yaml - site_name: {{ site_name }} - site_author: {{ your_name }} - site_url: {{ site_url }} - nav: - - Introduction: 'index.md' - - Basic Usage: 'basic_usage.md' - - Configuration: 'configuration.md' - - Update: 'update.md' - - Advanced Usage: - - Projects: "projects.md" - - Tags: "tags.md" - - plugins: - - search - - autolinks - - git-revision-date-localized: - type: timeago - - minify: - minify_html: true - - markdown_extensions: - - admonition - - meta - - toc: - permalink: true - baselevel: 2 - - pymdownx.arithmatex - - pymdownx.betterem: - smart_enable: all - - pymdownx.caret - - pymdownx.critic - - pymdownx.details - - pymdownx.emoji: - emoji_generator: !!python/name:pymdownx.emoji.to_svg - - pymdownx.inlinehilite - - pymdownx.magiclink - - pymdownx.mark - - pymdownx.smartsymbols - - pymdownx.superfences - - pymdownx.tasklist: - custom_checkbox: true - - pymdownx.tilde - - theme: - name: material - custom_dir: "theme" - logo: "images/logo.png" - palette: - primary: 'blue grey' - accent: 'light blue' - - extra_css: - - 'stylesheets/extra.css' - - 'stylesheets/links.css' - - repo_name: {{ repository_name }} # for example: 'lyz-code/pydo' - repo_url: {{ repository_url }} # for example: 'https://github.com/lyz-code/pydo' - ``` - -* [Configure your - logo](https://squidfunk.github.io/mkdocs-material/getting-started/#logo) by - saving it into `docs/images/logo.png`. - -* I like to show a small image above each link so you know where is it pointing - to. To do so add the content of [this - directory](https://github.com/lyz-code/pydo/tree/master/docs/theme) to - `theme`. and - [these](https://github.com/lyz-code/pydo/tree/master/docs/docs/stylesheets) - files under `docs/stylesheets`. -* Initialize the git repository and create the first commit. -* Start the server to see everything is alright. - - ```bash - mkdocs serve - ``` +- Install the basic packages. + + ```bash + pip install \ + mkdocs \ + mkdocs-material \ + mkdocs-autolink-plugin \ + mkdocs-minify-plugin \ + pymdown-extensions \ + mkdocs-git-revision-date-localized-plugin + ``` + +- Create the `docs` repository. + + ```bash + mkdocs new docs + ``` + +- Although there are + [several themes](https://www.mkdocs.org/user-guide/styling-your-docs/), I + usually use the [material](https://squidfunk.github.io/mkdocs-material) one. I + won't dive into the different options, just show a working template of the + `mkdocs.yaml` file. + + ```yaml + site_name: {{site_name: null}: null} + site_author: {{your_name: null}: null} + site_url: {{site_url: null}: null} + nav: + - Introduction: index.md + - Basic Usage: basic_usage.md + - Configuration: configuration.md + - Update: update.md + - Advanced Usage: + - Projects: projects.md + - Tags: tags.md + + plugins: + - search + - autolinks + - git-revision-date-localized: + type: timeago + - minify: + minify_html: true + + markdown_extensions: + - admonition + - meta + - toc: + permalink: true + baselevel: 2 + - pymdownx.arithmatex + - pymdownx.betterem: + smart_enable: all + - pymdownx.caret + - pymdownx.critic + - pymdownx.details + - pymdownx.emoji: + emoji_generator: !%21python/name:pymdownx.emoji.to_svg + - pymdownx.inlinehilite + - pymdownx.magiclink + - pymdownx.mark + - pymdownx.smartsymbols + - pymdownx.superfences + - pymdownx.tasklist: + custom_checkbox: true + - pymdownx.tilde + + theme: + name: material + custom_dir: theme + logo: images/logo.png + palette: + primary: blue grey + accent: light blue + + extra_css: + - stylesheets/extra.css + - stylesheets/links.css + + repo_name: {{repository_name: null}: null} # for example: 'lyz-code/pydo' + repo_url: {{repository_url: null}: null} # for example: 'https://github.com/lyz-code/pydo' + ``` + +- [Configure your logo](https://squidfunk.github.io/mkdocs-material/getting-started/#logo) + by saving it into `docs/images/logo.png`. + +- I like to show a small image above each link so you know where is it pointing + to. To do so add the content of + [this directory](https://github.com/lyz-code/pydo/tree/master/docs/theme) to + `theme`. and + [these](https://github.com/lyz-code/pydo/tree/master/docs/docs/stylesheets) + files under `docs/stylesheets`. + +- Initialize the git repository and create the first commit. + +- Start the server to see everything is alright. + + ```bash + mkdocs serve + ``` ## Material theme customizations @@ -130,7 +130,7 @@ theme: palette: # Light mode - - media: "(prefers-color-scheme: light)" + - media: '(prefers-color-scheme: light)' scheme: default primary: blue grey accent: light blue @@ -139,7 +139,7 @@ theme: name: Switch to dark mode # Dark mode - - media: "(prefers-color-scheme: dark)" + - media: '(prefers-color-scheme: dark)' scheme: slate primary: blue grey accent: light blue @@ -164,73 +164,76 @@ theme: ## Add a github pages hook. -* Save your `requirements.txt`. - - ```bash - pip freeze > requirements.txt - ``` - -* Create the `.github/workflows/gh-pages.yml` file with the following contents. - - ```yaml - name: Github pages - - on: - push: - branches: - - master - - jobs: - deploy: - runs-on: ubuntu-18.04 - steps: - - uses: actions/checkout@v2 - with: - # Number of commits to fetch. 0 indicates all history. - # Default: 1 - fetch-depth: 0 - - - name: Setup Python - uses: actions/setup-python@v1 - with: - python-version: '3.7' - architecture: 'x64' - - - name: Cache dependencies - uses: actions/cache@v1 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Install dependencies - run: | - python3 -m pip install --upgrade pip - python3 -m pip install -r ./requirements.txt - - - run: | - cd docs - mkdocs build - - - name: Deploy - uses: peaceiris/actions-gh-pages@v3 - with: - deploy_key: ${{ secrets.ACTIONS_DEPLOY_KEY }} - publish_dir: ./docs/site - ``` - -* Create an [SSH deploy key](https://github.com/peaceiris/actions-gh-pages#%EF%B8%8F-create-ssh-deploy-key) -* Activate `GitHub Pages` repository configuration with `gh-pages branch`. -* Make a new commit and push to check it's working. +- Save your `requirements.txt`. + + ```bash + pip freeze > requirements.txt + ``` + +- Create the `.github/workflows/gh-pages.yml` file with the following contents. + + ```yaml + name: Github pages + + on: + push: + branches: + - master + + jobs: + deploy: + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v2 + with: + # Number of commits to fetch. 0 indicates all history. + # Default: 1 + fetch-depth: 0 + + - name: Setup Python + uses: actions/setup-python@v1 + with: + python-version: '3.7' + architecture: x64 + + - name: Cache dependencies + uses: actions/cache@v1 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip + python3 -m pip install -r ./requirements.txt + + - run: | + cd docs + mkdocs build + + - name: Deploy + uses: peaceiris/actions-gh-pages@v3 + with: + deploy_key: ${{ secrets.ACTIONS_DEPLOY_KEY }} + publish_dir: ./docs/site + ``` + +- Create an + [SSH deploy key](https://github.com/peaceiris/actions-gh-pages#%EF%B8%8F-create-ssh-deploy-key) + +- Activate `GitHub Pages` repository configuration with `gh-pages branch`. + +- Make a new commit and push to check it's working. ## Create MermaidJS diagrams -Even though the Material theme [supports mermaid -diagrams](https://squidfunk.github.io/mkdocs-material/reference/diagrams/#fn:2) -it's only giving it for the paid users. The [funding needs to reach -5000$](https://squidfunk.github.io/mkdocs-material/insiders/#funding) so it's -released to the general public. +Even though the Material theme +[supports mermaid diagrams](https://squidfunk.github.io/mkdocs-material/reference/diagrams/#fn:2) +it's only giving it for the paid users. The +[funding needs to reach 5000$](https://squidfunk.github.io/mkdocs-material/insiders/#funding) +so it's released to the general public. The alternative is to use the [mkdocs-mermaid2-plugin](https://github.com/fralau/mkdocs-mermaid2-plugin) @@ -239,40 +242,40 @@ dark mode. To [install it](https://github.com/fralau/mkdocs-mermaid2-plugin#installation): -* Download the package: `pip install mkdocs-mermaid2-plugin`. -* Enable the plugin in `mkdocs.yml`. - - ```yaml - plugins: - # Not compatible with mermaid2 - # - minify: - # minify_html: true - - mermaid2: - arguments: - securityLevel: 'loose' - markdown_extensions: - - pymdownx.superfences: - # make exceptions to highlighting of code: - custom_fences: - - name: mermaid - class: mermaid - format: !!python/name:mermaid2.fence_mermaid - ``` +- Download the package: `pip install mkdocs-mermaid2-plugin`. + +- Enable the plugin in `mkdocs.yml`. + + ```yaml + plugins: + # Not compatible with mermaid2 + # - minify: + # minify_html: true + - mermaid2: + arguments: + securityLevel: loose + markdown_extensions: + - pymdownx.superfences: + # make exceptions to highlighting of code: + custom_fences: + - name: mermaid + class: mermaid + format: !%21python/name:mermaid2.fence_mermaid + ``` Check the [MermaidJS](mermaidjs.md) article to see how to create the diagrams. # [Plugin development](https://www.mkdocs.org/user-guide/plugins/) -Like MkDocs, plugins must be written in Python. It is expected that -each plugin would be distributed as a separate Python module. At a minimum, -a MkDocs Plugin must consist of -a [BasePlugin](https://www.mkdocs.org/user-guide/plugins/#baseplugin) subclass -and an [entry point](https://www.mkdocs.org/user-guide/plugins/#entry-point) which +Like MkDocs, plugins must be written in Python. It is expected that each plugin +would be distributed as a separate Python module. At a minimum, a MkDocs Plugin +must consist of a +[BasePlugin](https://www.mkdocs.org/user-guide/plugins/#baseplugin) subclass and +an [entry point](https://www.mkdocs.org/user-guide/plugins/#entry-point) which points to it. The BasePlugin class is meant to have `on_` methods that run actions -on the MkDocs defined -[events](#events). +on the MkDocs defined [events](#events). The same object is called at the different events, so you can save objects from one event to the other in the object attributes. @@ -288,11 +291,12 @@ the list of the `mkdocs.yml` file where they are defined. the `._files` attribute and allows you to `append` files to the collection. As well as extracting the different file types: -* `documentation_pages`: Iterable of markdown page file objects. -* `static_pages`: Iterable of static page file objects. -* `media_files`: Iterable of all files that are not documentation or static pages. -* `javascript_files`: Iterable of javascript files. -* `css_files`: Iterable of css files. +- `documentation_pages`: Iterable of markdown page file objects. +- `static_pages`: Iterable of static page file objects. +- `media_files`: Iterable of all files that are not documentation or static + pages. +- `javascript_files`: Iterable of javascript files. +- `css_files`: Iterable of css files. It is initialized with a list of [`File`](#file) objects. @@ -302,25 +306,25 @@ It is initialized with a list of [`File`](#file) objects. objects points to the source and destination locations of a file. It has the following interesting attributes: -* `name`: Name of the file without the extension. -* `src_path` or `abs_src_path`: Relative or absolute path to the original path, - for example the markdown file. -* `dest_path` or `abs_dest_path`: Relative or absolute path to the destination path, - for example the html file generated from the markdown one. -* `url`: Url where the file is going to be exposed. +- `name`: Name of the file without the extension. +- `src_path` or `abs_src_path`: Relative or absolute path to the original path, + for example the markdown file. +- `dest_path` or `abs_dest_path`: Relative or absolute path to the destination + path, for example the html file generated from the markdown one. +- `url`: Url where the file is going to be exposed. It is initialized with the arguments: -* `path`: Must be a path that exists relative to `src_dir`. -* `src_dir`: Absolute path on the local file system to the directory where the - docs are. -* `dest_dir`: Absolute path on the local file system to the directory where the - site is going to be built. -* `use_directory_urls`: If `False`, a Markdown file is mapped to an HTML file of - the same name (the file extension is changed to `.html`). If True, - a Markdown file is mapped to an HTML index file (`index.html`) nested in - a directory using the "name" of the file in `path`. The `use_directory_urls` - argument has no effect on non-Markdown files. By default MkDocs uses `True`. +- `path`: Must be a path that exists relative to `src_dir`. +- `src_dir`: Absolute path on the local file system to the directory where the + docs are. +- `dest_dir`: Absolute path on the local file system to the directory where the + site is going to be built. +- `use_directory_urls`: If `False`, a Markdown file is mapped to an HTML file of + the same name (the file extension is changed to `.html`). If True, a Markdown + file is mapped to an HTML index file (`index.html`) nested in a directory + using the "name" of the file in `path`. The `use_directory_urls` argument has + no effect on non-Markdown files. By default MkDocs uses `True`. ### [Navigation](https://github.com/mkdocs/mkdocs/blob/master/mkdocs/structure/nav.py#L11) @@ -328,8 +332,9 @@ It is initialized with the arguments: objects hold the information to build the navigation of the site. It has the following interesting attributes: -* `items`: Nested List with full navigation of Sections, SectionPages, Pages, and Links. -* `pages`: Flat List of subset of Pages in nav, in order. +- `items`: Nested List with full navigation of Sections, SectionPages, Pages, + and Links. +- `pages`: Flat List of subset of Pages in nav, in order. The `Navigation` object has no `__eq__` method, so when testing, instead of trying to build a similar `Navigation` object and compare them, you need to @@ -348,11 +353,12 @@ and the MkDocs `config` object. [`mkdocs.structure.nav.Section`](https://github.com/mkdocs/mkdocs/blob/master/mkdocs/structure/nav.py#L32) object models a section of the navigation of a MkDocs site. -To initialize it you need the `title` of the section and the `children` which are -the elements that belong to the section. If you don't yet know the children, +To initialize it you need the `title` of the section and the `children` which +are the elements that belong to the section. If you don't yet know the children, pass an empty list `[]`. ### [SectionPage](https://github.com/oprypin/mkdocs-section-index/blob/master/mkdocs_section_index/__init__.py#L11) + [`mkdocs_section_index.SectionPage`](https://github.com/oprypin/mkdocs-section-index/blob/master/mkdocs_section_index/__init__.py#L11) , part of the [mkdocs-section-index](https://github.com/oprypin/mkdocs-section-index/) plugin, @@ -360,10 +366,10 @@ models [Section](#section) objects that have an associated [Page](#page), allowing you to have nav sections that when clicked, load the Page and not only opens the menu for the children elements. -To initialize it you need the `title` of the section, the [`File`](#file) object of the page, -, the MkDocs `config` object, and the `children` which are the elements that -belong to the section. If you don't yet know the children, pass an empty list -`[]`. +To initialize it you need the `title` of the section, the [`File`](#file) object +of the page, , the MkDocs `config` object, and the `children` which are the +elements that belong to the section. If you don't yet know the children, pass an +empty list `[]`. ## [Events](https://www.mkdocs.org/user-guide/plugins/#events) @@ -375,12 +381,11 @@ should be made here. Parameters: -* `config`: global configuration object +- `config`: global configuration object Returns: -* global configuration object - +- global configuration object ### [on_files](https://www.mkdocs.org/user-guide/plugins/#on_files) @@ -391,78 +396,78 @@ collection. Use Page Events to manipulate page specific data. Parameters: -* `files`: global [files collection](#files) -* `config`: global configuration object +- `files`: global [files collection](#files) +- `config`: global configuration object Returns: -* global [files collection](#files) +- global [files collection](#files) ### [on_nav](https://www.mkdocs.org/user-guide/plugins/#on_nav) The `nav` event is called after the site navigation is created and can be used to alter the site navigation. -!!! warning "" - - Read the following section if you want to [add new files](#adding-new-files). +Warning: Read the following section if you want to +[add new files](#adding-new-files). Parameters: -* `nav`: global [navigation object](#navigation). -* `config`: global configuration object. -* `files`: global [files collection](#files). +- `nav`: global [navigation object](#navigation). +- `config`: global configuration object. +- `files`: global [files collection](#files). Returns: -* global navigation object +- global navigation object ## Adding new files -!!! note "TL;DR: Add them in the `on_config` event." +Note: "TL;DR: Add them in the `on_config` event." To add new files to the repository you will need two phases: -* Create the markdown article pages. -* Add them to the navigation. +- Create the markdown article pages. +- Add them to the navigation. My first idea as a MkDocs user, and newborn plugin developer was to add the navigation items to the `nav` key in the `config` object, as it's more easy to add items to a dictionary I'm used to work with than to dive into the code and -understand how MkDocs creates the navigation. As I understood from the -docs, the files should be created in the `on_files` event. the problem with this -approach is that the only event that allows you to change the `config` is the -`on_config` event, which is before the `on_files` one, so you can't build the -navigation this way after you've created the files. +understand how MkDocs creates the navigation. As I understood from the docs, the +files should be created in the `on_files` event. the problem with this approach +is that the only event that allows you to change the `config` is the `on_config` +event, which is before the `on_files` one, so you can't build the navigation +this way after you've created the files. Next idea was to add the items in the `on_nav` event, that means creating yourself the [`Section`](#section), [`Pages`](#page), [`SectionPages`](#sectionpage) or `Link` objects and append them to the -`nav.items`. [The problem](https://github.com/mkdocs/mkdocs/issues/2324) is that MkDocs initializes and processes the -`Navigation` object in the +`nav.items`. [The problem](https://github.com/mkdocs/mkdocs/issues/2324) is that +MkDocs initializes and processes the `Navigation` object in the [`get_navigation`](https://github.com/mkdocs/mkdocs/blob/master/mkdocs/structure/nav.py#L99) function. If you want to add items with a plugin in the `on_nav` event, you need to manually run all the post processing functions such as building the `pages` attribute, by running the `_get_by_type`, ` _add_previous_and_next_links` or ` _add_parent_links` yourself. Additionally, when building the site you'll get -the `The following pages exist in the docs directory, but are not included in -the "nav" configuration` error, because that check is done *before* all plugins -change the navigation in the `on_nav` object. +the +`The following pages exist in the docs directory, but are not included in the "nav" configuration` +error, because that check is done *before* all plugins change the navigation in +the `on_nav` object. The last approach is to build the files and tweak the navigation in the `on_config` event. This approach has the next advantages: -* You need less knowledge of how MkDocs works. -* You don't need to create the `File` or `Files` objects. -* You don't need to create the `Page`, `Section`, `SectionPage` objects. -* More robust as you rely on existent MkDocs functionality. +- You need less knowledge of how MkDocs works. +- You don't need to create the `File` or `Files` objects. +- You don't need to create the `Page`, `Section`, `SectionPage` objects. +- More robust as you rely on existent MkDocs functionality. # Testing I haven't found any official documentation on how to test MkDocs plugins, in the [issues](https://github.com/mkdocs/mkdocs/issues/1528) they suggest you look at -how they test it in the [search -plugin](https://github.com/mkdocs/mkdocs/blob/master/mkdocs/tests/search_tests.py). +how they test it in the +[search plugin](https://github.com/mkdocs/mkdocs/blob/master/mkdocs/tests/search_tests.py). I've looked at other plugins such as [mkdocs_blog](https://github.com/andyoakley/mkdocs-blog) and used the next way to test [mkdocs-newsletter](https://github.com/lyz-code/mkdocs-newsletter). @@ -472,54 +477,52 @@ program, that's why I feel the definition should be in `src/mkdocs_newsletter/entrypoints/mkdocs_plugin.py`. As any entrypoint, the best way to test them are in end-to-end tests. -You need to have a [working test -site](https://github.com/lyz-code/mkdocs-newsletter/tree/master/tests/assets/test_data) +You need to have a +[working test site](https://github.com/lyz-code/mkdocs-newsletter/tree/master/tests/assets/test_data) in `tests/assets/test_data`, with it's `mkdocs.yml` file that loads your plugin and some fake articles. To prepare the test we can define the next [fixture](pytest.md#fixtures) that prepares the building of the site: -!!! note "File: `tests/conftest.py`" +File: `tests/conftest.py`: - ```python - import os - import shutil +```python +import os +import shutil - from mkdocs import config - from mkdocs.config.base import Config - from py._path.local import LocalPath +from mkdocs import config +from mkdocs.config.base import Config - @pytest.fixture(name="config") - def config_(tmpdir: LocalPath) -> Config: - """Load the mkdocs configuration.""" - repo_path = tmpdir / "test_data" - shutil.copytree("tests/assets/test_data", repo_path) - mkdocs_config = config.load_config( - os.path.join(repo_path, "mkdocs.yml") - ) - mkdocs_config["site_dir"] = os.path.join(repo_path, "site") - return mkdocs_config - ``` + +@pytest.fixture(name="config") +def config_(tmp_path: Path) -> Config: + """Load the mkdocs configuration.""" + repo_path = tmp_path / "test_data" + shutil.copytree("tests/assets/test_data", repo_path) + mkdocs_config = config.load_config(os.path.join(repo_path, "mkdocs.yml")) + mkdocs_config["site_dir"] = os.path.join(repo_path, "site") + return mkdocs_config +``` It does the next steps: -* Copy the fake MkDocs site to a temporal directory -* Prepare the MkDocs `Config` object to build the site. +- Copy the fake MkDocs site to a temporal directory +- Prepare the MkDocs `Config` object to build the site. Now we can use it in the e2e tests: -!!! note "File: tests/e2e/test_plugin.py" +File: `tests/e2e/test_plugin.py`: - ```python - def test_plugin_builds_newsletters(full_repo: Repo, config: Config) -> None: - build.build(config) # act +```python +def test_plugin_builds_newsletters(full_repo: Repo, config: Config) -> None: + build.build(config) # act - newsletter_path = f"{full_repo.working_dir}/site/newsletter/2021_02/index.html" - with open(newsletter_path, "r") as newsletter_file: - newsletter = newsletter_file.read() - assert "February of 2021 - The Blue Book" in newsletter - ``` + newsletter_path = f"{full_repo.working_dir}/site/newsletter/2021_02/index.html" + with open(newsletter_path, "r") as newsletter_file: + newsletter = newsletter_file.read() + assert "February of 2021 - The Blue Book" in newsletter +``` That test is meant to ensure that our plugin works with the MkDocs ecosystem, so the assertions should be done against the created html files. @@ -534,22 +537,21 @@ You can see a full example Once they are closed: -* [Mkdocs Deprecation warning](https://github.com/mkdocs/mkdocs/issues/2794), - once it's solved remove the warning filter on mkdocs-newsletter - `pyproject.toml`. -* [Mkdocs-Material Deprecation warning](https://github.com/squidfunk/mkdocs-material/issues/3695), - once it's solved remove the warning filter on mkdocs-newsletter - `pyproject.toml`. +- [Mkdocs Deprecation warning](https://github.com/mkdocs/mkdocs/issues/2794), + once it's solved remove the warning filter on mkdocs-newsletter + `pyproject.toml`. +- [Mkdocs-Material Deprecation warning](https://github.com/squidfunk/mkdocs-material/issues/3695), + once it's solved remove the warning filter on mkdocs-newsletter + `pyproject.toml`. # References -* [Git](https://github.com/mkdocs/mkdocs/) -* [Homepage](https://www.mkdocs.org/). -* [Material theme configuration guide](https://squidfunk.github.io/mkdocs-material/getting-started/) +- [Git](https://github.com/mkdocs/mkdocs/) +- [Homepage](https://www.mkdocs.org/). +- [Material theme configuration guide](https://squidfunk.github.io/mkdocs-material/getting-started/) ## Plugin development -* [User guide](https://www.mkdocs.org/user-guide/plugins/) -* [List of events](https://www.mkdocs.org/user-guide/plugins/#events) -* [Plugin testing - example](https://github.com/andyoakley/mkdocs-blog/tree/master/tests) +- [User guide](https://www.mkdocs.org/user-guide/plugins/) +- [List of events](https://www.mkdocs.org/user-guide/plugins/#events) +- [Plugin testing example](https://github.com/andyoakley/mkdocs-blog/tree/master/tests) diff --git a/docs/linux_snippets.md b/docs/linux_snippets.md index a0fbade3dca..5dec628e66a 100644 --- a/docs/linux_snippets.md +++ b/docs/linux_snippets.md @@ -4,6 +4,55 @@ date: 20200826 author: Lyz --- +# [df and du showing different results](https://www.cyberciti.biz/tips/freebsd-why-command-df-and-du-reports-different-output.html) + +Sometimes on a linux machine you will notice that both `df` command (display +free disk space) and `du` command (display disk usage statistics) report +different output. Usually, `df` will output a bigger disk usage than `du`. + +The `du` command estimates file space usage, and the `df` command shows file +system disk space usage. + +There are many reasons why this could be happening: + +## Disk mounted over data + +If you mount a disk on a directory that already holds data, then when you run +`du` that data won't show, but `df` knows it's there. + +To troubleshoot this, umount one by one of your disks, and do an `ls` to see if +there's any remaining data in the mount point. + +## Used deleted files + +When a file is deleted under Unix/Linux, the disk space occupied by the file +will not be released immediately in some cases. The result of the command `du` +doesn’t include the size of the deleting file. But the impact of the command +`df` for the deleting file’s size due to its disk space is not released +immediately. Hence, after deleting the file, the results of `df` and `du` are +different until the disk space is freed. + +Open file descriptor is main causes of such wrong information. For example, if a +file called `/tmp/application.log` is open by a third-party application OR by a +user and the same file is deleted, both `df` and `du` report different outputs. +You can use the `lsof` command to verify this: + +```bash +lsof | grep tmp +``` + +To fix it: + +- Use the `lsof` command as discussed above to find a deleted file opened by + other users and apps. See how to list all users in the system for more info. +- Then, close those apps and log out of those Linux and Unix users. +- As a sysadmin you restart any process or `kill` the process under Linux and + Unix that did not release the deleted file. +- Flush the filesystem using the `sync` command that synchronizes cached writes + to persistent disk storage. +- If everything else fails, try restarting the system using the `reboot` command + or `shutdown` command. + # Scan a physical page in Linux Install `xsane` and run it. @@ -230,7 +279,27 @@ open the `/etc/systemd/journald.conf` file and set the `SystemMaxUse` to the amount you want (for example `1000M` for a gigabyte). Once edited restart the service with `sudo systemctl restart systemd-journald`. -## [Set up docker logs rotation](https://medium.com/free-code-camp/how-to-setup-log-rotation-for-a-docker-container-a508093912b2) +## Clean up docker data + +To remove unused `docker` data you can run `docker system prune -a`. This will +remove: + +- All stopped containers +- All networks not used by at least one container +- All images without at least one container associated to them +- All build cache + +Sometimes that's not enough, and your `/var/lib/docker` directory still weights +more than it should. In those cases: + +- Stop the `docker` service. +- Remove or move the data to another directory +- Start the `docker` service. + +In order not to loose your persisted data, you need to configure your dockers to +mount the data from a directory that's not within `/var/lib/docker`. + +### [Set up docker logs rotation](https://medium.com/free-code-camp/how-to-setup-log-rotation-for-a-docker-container-a508093912b2) By default, the stdout and stderr of the container are written in a JSON file located in `/var/lib/docker/containers/[container-id]/[container-id]-json.log`. diff --git a/docs/maison.md b/docs/maison.md index 195fb273c55..043c134677c 100644 --- a/docs/maison.md +++ b/docs/maison.md @@ -5,11 +5,14 @@ author: Lyz --- [Maison](https://github.com/dbatten5/maison) is a Python library to read -configuration settings from configuration files using -[`pydantic`](pydantic.md) behind the scenes. +configuration settings from configuration files using [`pydantic`](pydantic.md) +behind the scenes. It's useful to parse TOML config files. +Note: "If you want to use YAML for your config files use +[`goodconf`](goodconf.md) instead." + # Installation ```bash @@ -29,21 +32,17 @@ print(foo_option) ## [Read from file](https://dbatten5.github.io/maison/usage/#source-files) -By default, `maison` will look for a `pyproject.toml` file. If you prefer to look -elsewhere, provide a `source_files` list to `ProjectConfig` and `maison` will select -the first source file it finds from the list. +By default, `maison` will look for a `pyproject.toml` file. If you prefer to +look elsewhere, provide a `source_files` list to `ProjectConfig` and `maison` +will select the first source file it finds from the list. ```python from maison import ProjectConfig -config = ProjectConfig( - project_name="acme", - source_files=["acme.ini", "pyproject.toml"] -) +config = ProjectConfig(project_name="acme", source_files=["acme.ini", "pyproject.toml"]) ``` - # References -* [Git](https://github.com/dbatten5/maison) -* [Docs](https://dbatten5.github.io/maison/) +- [Git](https://github.com/dbatten5/maison) +- [Docs](https://dbatten5.github.io/maison/) diff --git a/docs/mdformat.md b/docs/mdformat.md index 87b88bb149a..c548fbcdcb6 100644 --- a/docs/mdformat.md +++ b/docs/mdformat.md @@ -87,6 +87,15 @@ There are two kinds of plugins: You can see some plugin examples [here](https://mdformat.readthedocs.io/en/stable/users/plugins.html). +# Issues + +- It doesn't yet + [support admonitions](https://github.com/executablebooks/mdformat/issues/309) +- You can't + [ignore some files](https://github.com/executablebooks/mdformat/issues/359), + nor + [some part of the file](https://github.com/executablebooks/mdformat/issues/53) + # References - [Docs](https://mdformat.readthedocs.io/en/stable/) diff --git a/docs/openproject.md b/docs/openproject.md index de169ad1330..5b20dba282d 100644 --- a/docs/openproject.md +++ b/docs/openproject.md @@ -54,6 +54,9 @@ The things I don't like are: [Proof of Concept environment yourself](#proof-of-concept) if you already know `docker-compose`. - [The status column is not showing the status color](https://community.openproject.org/projects/openproject/work_packages/44944). +- You can't hide an element from a report for a day. For example if there is a + blocked task that you can't work on for today, you can't hide it till + tomorrow. - Even thought the [Community (free) version has many features](https://www.openproject.org/pricing/#features) the next aren't: @@ -142,30 +145,14 @@ I'm going to follow the `docker-compose`: - Tweak the `docker-compose.yaml` [file through the `docker-compose.override.yml`](https://docs.docker.com/compose/extends/) - file for example if you want to override how the volumes are defined: - - ```yaml - volumes: - pgdata: - driver: local - driver_opts: - type: none - o: bind - device: /data/openproject-postgres - opdata: - driver: local - driver_opts: - type: none - o: bind - device: /data/openproject - - ``` - Add the required environmental variables through a `.env` file ``` OPENPROJECT_HOST__NAME=openproject.example.com OPENPROJECT_SECRET_KEY_BASE=secret + PGDATA=/path/to/postgres/data + OPDATA=/path/to/openproject/data ``` Where `secret` is the value of