From 7869c3d227ef0484d6f1547d25fc535c75a6d9b6 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sat, 3 Aug 2024 07:30:44 +1000 Subject: [PATCH 01/60] Bump version --- .pre-commit-config.yaml | 4 +- RELEASES.md | 15 +++++++ nautilus_core/Cargo.lock | 39 ++++++++++--------- nautilus_core/Cargo.toml | 4 +- poetry.lock | 84 ++++++++++++++++++++-------------------- pyproject.toml | 6 +-- version.json | 2 +- 7 files changed, 85 insertions(+), 69 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 79734d8ff7ff..2a3b26d9b087 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -74,7 +74,7 @@ repos: types: [python] - repo: https://github.com/psf/black - rev: 24.4.2 + rev: 24.8.0 hooks: - id: black types_or: [python, pyi] @@ -83,7 +83,7 @@ repos: exclude: "docs/_pygments/monokai.py" - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.5 + rev: v0.5.6 hooks: - id: ruff args: ["--fix"] diff --git a/RELEASES.md b/RELEASES.md index 3c8b2867d37c..3bb7214dd075 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,3 +1,18 @@ +# NautilusTrader 1.198.0 Beta + +Released on TBD (UTC). + +### Enhancements +None + +### Breaking Changes +None + +### Fixes +None + +--- + # NautilusTrader 1.197.0 Beta Released on 2nd August 2024 (UTC). diff --git a/nautilus_core/Cargo.lock b/nautilus_core/Cargo.lock index b439419a56aa..291685fd70f2 100644 --- a/nautilus_core/Cargo.lock +++ b/nautilus_core/Cargo.lock @@ -2564,7 +2564,7 @@ dependencies = [ [[package]] name = "nautilus-adapters" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "chrono", @@ -2594,7 +2594,7 @@ dependencies = [ [[package]] name = "nautilus-backtest" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "cbindgen", @@ -2614,7 +2614,7 @@ dependencies = [ [[package]] name = "nautilus-cli" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "clap 4.5.13", @@ -2631,7 +2631,7 @@ dependencies = [ [[package]] name = "nautilus-common" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "bytes", @@ -2660,7 +2660,7 @@ dependencies = [ [[package]] name = "nautilus-core" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "bytes", @@ -2681,7 +2681,7 @@ dependencies = [ [[package]] name = "nautilus-data" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "chrono", @@ -2707,7 +2707,7 @@ dependencies = [ [[package]] name = "nautilus-execution" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "criterion", @@ -2731,7 +2731,7 @@ dependencies = [ [[package]] name = "nautilus-indicators" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "log", @@ -2744,7 +2744,7 @@ dependencies = [ [[package]] name = "nautilus-infrastructure" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "async-stream", @@ -2772,7 +2772,7 @@ dependencies = [ [[package]] name = "nautilus-model" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "cbindgen", @@ -2800,7 +2800,7 @@ dependencies = [ [[package]] name = "nautilus-network" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "axum", @@ -2825,7 +2825,7 @@ dependencies = [ [[package]] name = "nautilus-persistence" -version = "0.27.0" +version = "0.28.0" dependencies = [ "anyhow", "binary-heap-plus", @@ -2849,7 +2849,7 @@ dependencies = [ [[package]] name = "nautilus-pyo3" -version = "0.27.0" +version = "0.28.0" dependencies = [ "nautilus-adapters", "nautilus-common", @@ -3739,9 +3739,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.5" +version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", @@ -4767,12 +4767,13 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "tempfile" -version = "3.10.1" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53" dependencies = [ "cfg-if", "fastrand", + "once_cell", "rustix", "windows-sys 0.52.0", ] @@ -5774,9 +5775,9 @@ dependencies = [ [[package]] name = "zstd-safe" -version = "7.2.0" +version = "7.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa556e971e7b568dc775c136fc9de8c779b1c2fc3a63defaafadffdbd3181afa" +checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059" dependencies = [ "zstd-sys", ] diff --git a/nautilus_core/Cargo.toml b/nautilus_core/Cargo.toml index f777a1549fdf..47de484a7b43 100644 --- a/nautilus_core/Cargo.toml +++ b/nautilus_core/Cargo.toml @@ -19,7 +19,7 @@ members = [ [workspace.package] rust-version = "1.80.0" -version = "0.27.0" +version = "0.28.0" edition = "2021" authors = ["Nautech Systems "] description = "A high-performance algorithmic trading platform and event-driven backtester" @@ -59,7 +59,7 @@ float-cmp = "0.9.0" iai = "0.1.1" pretty_assertions = "1.4.0" rstest = "0.21.0" -tempfile = "3.10.1" +tempfile = "3.11.0" # build-dependencies cbindgen = "0.26.0" diff --git a/poetry.lock b/poetry.lock index e23c667a35c8..598d0084bf3b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -193,33 +193,33 @@ msgspec = ">=0.18.5" [[package]] name = "black" -version = "24.4.2" +version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] @@ -1802,29 +1802,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.5.5" +version = "0.5.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.5-py3-none-linux_armv6l.whl", hash = "sha256:605d589ec35d1da9213a9d4d7e7a9c761d90bba78fc8790d1c5e65026c1b9eaf"}, - {file = "ruff-0.5.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00817603822a3e42b80f7c3298c8269e09f889ee94640cd1fc7f9329788d7bf8"}, - {file = "ruff-0.5.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:187a60f555e9f865a2ff2c6984b9afeffa7158ba6e1eab56cb830404c942b0f3"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe26fc46fa8c6e0ae3f47ddccfbb136253c831c3289bba044befe68f467bfb16"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad25dd9c5faac95c8e9efb13e15803cd8bbf7f4600645a60ffe17c73f60779b"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f70737c157d7edf749bcb952d13854e8f745cec695a01bdc6e29c29c288fc36e"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cfd7de17cef6ab559e9f5ab859f0d3296393bc78f69030967ca4d87a541b97a0"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09b43e02f76ac0145f86a08e045e2ea452066f7ba064fd6b0cdccb486f7c3e7"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0b856cb19c60cd40198be5d8d4b556228e3dcd545b4f423d1ad812bfdca5884"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3687d002f911e8a5faf977e619a034d159a8373514a587249cc00f211c67a091"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ac9dc814e510436e30d0ba535f435a7f3dc97f895f844f5b3f347ec8c228a523"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:af9bdf6c389b5add40d89b201425b531e0a5cceb3cfdcc69f04d3d531c6be74f"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d40a8533ed545390ef8315b8e25c4bb85739b90bd0f3fe1280a29ae364cc55d8"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cab904683bf9e2ecbbe9ff235bfe056f0eba754d0168ad5407832928d579e7ab"}, - {file = "ruff-0.5.5-py3-none-win32.whl", hash = "sha256:696f18463b47a94575db635ebb4c178188645636f05e934fdf361b74edf1bb2d"}, - {file = "ruff-0.5.5-py3-none-win_amd64.whl", hash = "sha256:50f36d77f52d4c9c2f1361ccbfbd09099a1b2ea5d2b2222c586ab08885cf3445"}, - {file = "ruff-0.5.5-py3-none-win_arm64.whl", hash = "sha256:3191317d967af701f1b73a31ed5788795936e423b7acce82a2b63e26eb3e89d6"}, - {file = "ruff-0.5.5.tar.gz", hash = "sha256:cc5516bdb4858d972fbc31d246bdb390eab8df1a26e2353be2dbc0c2d7f5421a"}, + {file = "ruff-0.5.6-py3-none-linux_armv6l.whl", hash = "sha256:a0ef5930799a05522985b9cec8290b185952f3fcd86c1772c3bdbd732667fdcd"}, + {file = "ruff-0.5.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b652dc14f6ef5d1552821e006f747802cc32d98d5509349e168f6bf0ee9f8f42"}, + {file = "ruff-0.5.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:80521b88d26a45e871f31e4b88938fd87db7011bb961d8afd2664982dfc3641a"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9bc8f328a9f1309ae80e4d392836e7dbc77303b38ed4a7112699e63d3b066ab"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d394940f61f7720ad371ddedf14722ee1d6250fd8d020f5ea5a86e7be217daf"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111a99cdb02f69ddb2571e2756e017a1496c2c3a2aeefe7b988ddab38b416d36"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e395daba77a79f6dc0d07311f94cc0560375ca20c06f354c7c99af3bf4560c5d"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c476acb43c3c51e3c614a2e878ee1589655fa02dab19fe2db0423a06d6a5b1b6"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2ff8003f5252fd68425fd53d27c1f08b201d7ed714bb31a55c9ac1d4c13e2eb"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c94e084ba3eaa80c2172918c2ca2eb2230c3f15925f4ed8b6297260c6ef179ad"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1f77c1c3aa0669fb230b06fb24ffa3e879391a3ba3f15e3d633a752da5a3e670"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f908148c93c02873210a52cad75a6eda856b2cbb72250370ce3afef6fb99b1ed"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:563a7ae61ad284187d3071d9041c08019975693ff655438d8d4be26e492760bd"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:94fe60869bfbf0521e04fd62b74cbca21cbc5beb67cbb75ab33fe8c174f54414"}, + {file = "ruff-0.5.6-py3-none-win32.whl", hash = "sha256:e6a584c1de6f8591c2570e171cc7ce482bb983d49c70ddf014393cd39e9dfaed"}, + {file = "ruff-0.5.6-py3-none-win_amd64.whl", hash = "sha256:d7fe7dccb1a89dc66785d7aa0ac283b2269712d8ed19c63af908fdccca5ccc1a"}, + {file = "ruff-0.5.6-py3-none-win_arm64.whl", hash = "sha256:57c6c0dd997b31b536bff49b9eee5ed3194d60605a4427f735eeb1f9c1b8d264"}, + {file = "ruff-0.5.6.tar.gz", hash = "sha256:07c9e3c2a8e1fe377dd460371c3462671a728c981c3205a5217291422209f642"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index c262c2fae247..c8239659162b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautilus_trader" -version = "1.197.0" +version = "1.198.0" description = "A high-performance algorithmic trading platform and event-driven backtester" authors = ["Nautech Systems "] license = "LGPL-3.0-or-later" @@ -79,12 +79,12 @@ ib = ["nautilus_ibapi", "async-timeout", "defusedxml"] optional = true [tool.poetry.group.dev.dependencies] -black = "^24.4.2" +black = "^24.8.0" docformatter = "^1.7.5" mypy = "^1.11.1" pandas-stubs = "^2.2.2" pre-commit = "^3.8.0" -ruff = "^0.5.5" +ruff = "^0.5.6" types-pytz = "^2024.1" types-requests = "^2.32" types-toml = "^0.10.2" diff --git a/version.json b/version.json index 1c3538eeb053..df81630c9d01 100644 --- a/version.json +++ b/version.json @@ -1,6 +1,6 @@ { "schemaVersion": 1, "label": "", - "message": "v1.197.0", + "message": "v1.198.0", "color": "orange" } From 1075e795300f3c4de94a92af72b15caf2e840b10 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sat, 3 Aug 2024 08:01:17 +1000 Subject: [PATCH 02/60] DataEngine takes reference to MessageBus --- nautilus_core/common/src/messages/data.rs | 6 +++--- nautilus_core/data/src/engine/mod.rs | 17 ++++++++++++----- nautilus_core/data/src/engine/runner.rs | 19 +++++++++++++++++-- 3 files changed, 32 insertions(+), 10 deletions(-) diff --git a/nautilus_core/common/src/messages/data.rs b/nautilus_core/common/src/messages/data.rs index 07fedd893dd5..1efdc0e64551 100644 --- a/nautilus_core/common/src/messages/data.rs +++ b/nautilus_core/common/src/messages/data.rs @@ -37,8 +37,8 @@ pub struct DataResponse { pub client_id: ClientId, pub venue: Venue, pub data_type: DataType, - pub ts_init: UnixNanos, pub data: Payload, + pub ts_init: UnixNanos, } impl DataResponse { @@ -77,13 +77,13 @@ pub struct SubscriptionCommand { } pub enum DataEngineRequest { - DataRequest(DataRequest), + Request(DataRequest), SubscriptionCommand(SubscriptionCommand), } // TODO: Refine this to reduce disparity between enum sizes #[allow(clippy::large_enum_variant)] pub enum DataClientResponse { - DataResponse(DataResponse), + Response(DataResponse), Data(Data), } diff --git a/nautilus_core/data/src/engine/mod.rs b/nautilus_core/data/src/engine/mod.rs index 7063ffe6fb84..0ed786995feb 100644 --- a/nautilus_core/data/src/engine/mod.rs +++ b/nautilus_core/data/src/engine/mod.rs @@ -56,18 +56,20 @@ use nautilus_model::{ }; pub struct DataEngineConfig { - pub debug: bool, pub time_bars_build_with_no_updates: bool, pub time_bars_timestamp_on_close: bool, pub time_bars_interval_type: String, // Make this an enum `BarIntervalType` pub validate_data_sequence: bool, pub buffer_deltas: bool, + pub external_clients: Vec, + pub debug: bool, } pub struct DataEngine { state: PhantomData, clock: Box, cache: Rc>, + msgbus: Rc>, default_client: Option, // order_book_intervals: HashMap<(InstrumentId, usize), Vec>, // TODO // bar_aggregators: // TODO @@ -75,7 +77,6 @@ pub struct DataEngine { synthetic_trade_feeds: HashMap>, buffered_deltas_map: HashMap>, config: DataEngineConfig, - msgbus: MessageBus, } impl DataEngine { @@ -83,8 +84,8 @@ impl DataEngine { pub fn new( clock: Box, cache: Rc>, + msgbus: Rc>, config: DataEngineConfig, - msgbus: MessageBus, ) -> Self { Self { state: PhantomData::, @@ -123,6 +124,7 @@ impl DataEngine { #[must_use] pub fn check_connected(&self) -> bool { self.msgbus + .borrow() .clients .values() .all(|client| client.is_connected()) @@ -131,6 +133,7 @@ impl DataEngine { #[must_use] pub fn check_disconnected(&self) -> bool { self.msgbus + .borrow() .clients .values() .all(|client| !client.is_connected()) @@ -138,7 +141,7 @@ impl DataEngine { #[must_use] pub fn registed_clients(&self) -> Vec { - self.msgbus.clients.keys().copied().collect() + self.msgbus.borrow().clients.keys().copied().collect() } // -- SUBSCRIPTIONS --------------------------------------------------------------------------- @@ -149,7 +152,7 @@ impl DataEngine { T: Clone, { let mut subs = Vec::new(); - for client in self.msgbus.clients.values() { + for client in self.msgbus.borrow().clients.values() { subs.extend(get_subs(client).iter().cloned()); } subs @@ -226,6 +229,7 @@ impl DataEngine { #[must_use] pub fn start(self) -> DataEngine { self.msgbus + .borrow() .clients .values() .for_each(|client| client.start()); @@ -235,6 +239,7 @@ impl DataEngine { #[must_use] pub fn stop(self) -> DataEngine { self.msgbus + .borrow() .clients .values() .for_each(|client| client.stop()); @@ -244,6 +249,7 @@ impl DataEngine { #[must_use] pub fn reset(self) -> Self { self.msgbus + .borrow() .clients .values() .for_each(|client| client.reset()); @@ -253,6 +259,7 @@ impl DataEngine { #[must_use] pub fn dispose(mut self) -> DataEngine { self.msgbus + .borrow() .clients .values() .for_each(|client| client.dispose()); diff --git a/nautilus_core/data/src/engine/runner.rs b/nautilus_core/data/src/engine/runner.rs index 278b8c04b0a8..76bbcf9c7550 100644 --- a/nautilus_core/data/src/engine/runner.rs +++ b/nautilus_core/data/src/engine/runner.rs @@ -1,3 +1,18 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +// https://nautechsystems.io +// +// Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ------------------------------------------------------------------------------------------------- + use std::{cell::RefCell, collections::VecDeque, rc::Rc}; use nautilus_common::{ @@ -36,7 +51,7 @@ impl Runner for LiveRunner { fn run(&mut self, engine: &DataEngine) { while let Some(resp) = self.resp_rx.blocking_recv() { match resp { - DataClientResponse::DataResponse(data_resp) => engine.response(data_resp), + DataClientResponse::Response(data_resp) => engine.response(data_resp), DataClientResponse::Data(data) => engine.process(data), } } @@ -65,7 +80,7 @@ impl Runner for BacktestRunner { fn run(&mut self, engine: &DataEngine) { while let Some(resp) = self.queue.as_ref().borrow_mut().pop_front() { match resp { - DataClientResponse::DataResponse(data_resp) => engine.response(data_resp), + DataClientResponse::Response(data_resp) => engine.response(data_resp), DataClientResponse::Data(data) => engine.process(data), } } From 69d382c64cfebf5d422c1aa7dc0418202179f98f Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sat, 3 Aug 2024 08:03:43 +1000 Subject: [PATCH 03/60] Refine variable naming --- nautilus_core/data/src/engine/runner.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautilus_core/data/src/engine/runner.rs b/nautilus_core/data/src/engine/runner.rs index 76bbcf9c7550..83dab85e347c 100644 --- a/nautilus_core/data/src/engine/runner.rs +++ b/nautilus_core/data/src/engine/runner.rs @@ -51,7 +51,7 @@ impl Runner for LiveRunner { fn run(&mut self, engine: &DataEngine) { while let Some(resp) = self.resp_rx.blocking_recv() { match resp { - DataClientResponse::Response(data_resp) => engine.response(data_resp), + DataClientResponse::Response(resp) => engine.response(resp), DataClientResponse::Data(data) => engine.process(data), } } @@ -80,7 +80,7 @@ impl Runner for BacktestRunner { fn run(&mut self, engine: &DataEngine) { while let Some(resp) = self.queue.as_ref().borrow_mut().pop_front() { match resp { - DataClientResponse::Response(data_resp) => engine.response(data_resp), + DataClientResponse::Response(resp) => engine.response(resp), DataClientResponse::Data(data) => engine.process(data), } } From 392979194f5388dacbe24c36a3607a2f23bc26a5 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sat, 3 Aug 2024 10:01:03 +1000 Subject: [PATCH 04/60] Continue DataEngine implementation in Rust --- nautilus_core/data/src/aggregation.rs | 6 +- nautilus_core/data/src/engine/mod.rs | 163 +++++++++++++++++++------- 2 files changed, 124 insertions(+), 45 deletions(-) diff --git a/nautilus_core/data/src/aggregation.rs b/nautilus_core/data/src/aggregation.rs index 361c48b82f31..1fdff14087cf 100644 --- a/nautilus_core/data/src/aggregation.rs +++ b/nautilus_core/data/src/aggregation.rs @@ -36,9 +36,11 @@ use nautilus_model::{ }; pub trait BarAggregator { + /// The [`BarType`] to be aggregated. fn bar_type(&self) -> BarType; + /// Updates theaggregator with the given price and size. fn update(&mut self, price: Price, size: Quantity, ts_event: UnixNanos); - /// Update the aggregator with the given quote. + /// Updates the aggregator with the given quote. fn handle_quote_tick(&mut self, quote: QuoteTick) { self.update( quote.extract_price(self.bar_type().spec.price_type), @@ -46,7 +48,7 @@ pub trait BarAggregator { quote.ts_event, ); } - /// Update the aggregator with the given trade. + /// Updates the aggregator with the given trade. fn handle_trade_tick(&mut self, trade: TradeTick) { self.update(trade.price, trade.size, trade.ts_event); } diff --git a/nautilus_core/data/src/engine/mod.rs b/nautilus_core/data/src/engine/mod.rs index 0ed786995feb..a02431aea6ff 100644 --- a/nautilus_core/data/src/engine/mod.rs +++ b/nautilus_core/data/src/engine/mod.rs @@ -22,6 +22,7 @@ pub mod runner; use std::{ + any::Any, cell::RefCell, collections::{HashMap, HashSet}, marker::PhantomData, @@ -55,6 +56,8 @@ use nautilus_model::{ instruments::{any::InstrumentAny, synthetic::SyntheticInstrument}, }; +use crate::aggregation::BarAggregator; + pub struct DataEngineConfig { pub time_bars_build_with_no_updates: bool, pub time_bars_timestamp_on_close: bool, @@ -72,7 +75,7 @@ pub struct DataEngine { msgbus: Rc>, default_client: Option, // order_book_intervals: HashMap<(InstrumentId, usize), Vec>, // TODO - // bar_aggregators: // TODO + bar_aggregators: Vec>, // TODO: dyn for now synthetic_quote_feeds: HashMap>, synthetic_trade_feeds: HashMap>, buffered_deltas_map: HashMap>, @@ -92,6 +95,7 @@ impl DataEngine { clock, cache, default_client: None, + bar_aggregators: Vec::new(), synthetic_quote_feeds: HashMap::new(), synthetic_trade_feeds: HashMap::new(), buffered_deltas_map: HashMap::new(), @@ -108,6 +112,7 @@ impl DataEngine { clock: self.clock, cache: self.cache, default_client: self.default_client, + bar_aggregators: self.bar_aggregators, synthetic_quote_feeds: self.synthetic_quote_feeds, synthetic_trade_feeds: self.synthetic_trade_feeds, buffered_deltas_map: self.buffered_deltas_map, @@ -300,34 +305,34 @@ impl DataEngine { } } - pub fn response(&self, response: DataResponse) { + pub fn response(&self, resp: DataResponse) { log::debug!("{}", format!("{RECV}{RES} response")); // TODO: Display for response - match response.data_type.type_name() { + match resp.data_type.type_name() { stringify!(InstrumentAny) => { - let instruments = Arc::downcast::>(response.data.clone()) + let instruments = Arc::downcast::>(resp.data.clone()) .expect("Invalid response data"); self.handle_instruments(instruments); } stringify!(QuoteTick) => { - let quotes = Arc::downcast::>(response.data.clone()) + let quotes = Arc::downcast::>(resp.data.clone()) .expect("Invalid response data"); self.handle_quotes(quotes); } stringify!(TradeTick) => { - let trades = Arc::downcast::>(response.data.clone()) + let trades = Arc::downcast::>(resp.data.clone()) .expect("Invalid response data"); self.handle_trades(trades); } stringify!(Bar) => { - let bars = Arc::downcast::>(response.data.clone()) - .expect("Invalid response data"); + let bars = + Arc::downcast::>(resp.data.clone()).expect("Invalid response data"); self.handle_bars(bars); } _ => {} // Nothing else to handle } - // self.msgbus.response() // TODO: Send response to registered handler + self.msgbus.as_ref().borrow().send_response(resp) } // -- DATA HANDLERS --------------------------------------------------------------------------- @@ -343,39 +348,42 @@ impl DataEngine { log::error!("Error on cache insert: {e}"); } - let instrument_id = instrument.id(); - let topic = format!( - "data.instrument.{}.{}", - instrument_id.venue, instrument_id.symbol - ); + let topic = get_instrument_publish_topic(&instrument); + self.msgbus + .as_ref() + .borrow() + .publish(&topic, &instrument as &dyn Any); // TODO: Optimize } fn handle_delta(&self, delta: OrderBookDelta) { // TODO: Manage buffered deltas // TODO: Manage book - let topic = format!( - "data.book.deltas.{}.{}", - delta.instrument_id.venue, delta.instrument_id.symbol - ); + let topic = get_delta_publish_topic(&delta); + self.msgbus + .as_ref() + .borrow() + .publish(&topic, &delta as &dyn Any); // TODO: Optimize } fn handle_deltas(&self, deltas: OrderBookDeltas) { // TODO: Manage book - let topic = format!( - "data.book.snapshots.{}.{}", // TODO: Revise snapshots topic component - deltas.instrument_id.venue, deltas.instrument_id.symbol - ); + let topic = get_deltas_publish_topic(&deltas); + self.msgbus + .as_ref() + .borrow() + .publish(&topic, &deltas as &dyn Any); // TODO: Optimize } fn handle_depth10(&self, depth: OrderBookDepth10) { // TODO: Manage book - let topic = format!( - "data.book.depth.{}.{}", - depth.instrument_id.venue, depth.instrument_id.symbol - ); + let topic = get_depth_publish_topic(&depth); + self.msgbus + .as_ref() + .borrow() + .publish(&topic, &depth as &dyn Any); // TODO: Optimize } fn handle_quote(&self, quote: QuoteTick) { @@ -385,10 +393,11 @@ impl DataEngine { // TODO: Handle synthetics - let topic = format!( - "data.quotes.{}.{}", - quote.instrument_id.venue, quote.instrument_id.symbol - ); + let topic = get_quote_publish_topic("e); + self.msgbus + .as_ref() + .borrow() + .publish(&topic, "e as &dyn Any); // TODO: Optimize } fn handle_trade(&self, trade: TradeTick) { @@ -398,33 +407,53 @@ impl DataEngine { // TODO: Handle synthetics - let topic = format!( - "data.trades.{}.{}", - trade.instrument_id.venue, trade.instrument_id.symbol - ); + let topic = get_trade_publish_topic(&trade); + self.msgbus + .as_ref() + .borrow() + .publish(&topic, &trade as &dyn Any); // TODO: Optimize } fn handle_bar(&self, bar: Bar) { + // TODO: Handle additional bar logic + if self.config.validate_data_sequence { + if let Some(last_bar) = self.cache.as_ref().borrow().bar(&bar.bar_type) { + if bar.ts_event < last_bar.ts_event { + log::warn!( + "Bar {bar} was prior to last bar `ts_event` {}", + last_bar.ts_event + ); + return; // `bar` is out of sequence + } + if bar.ts_init < last_bar.ts_init { + log::warn!( + "Bar {bar} was prior to last bar `ts_init` {}", + last_bar.ts_init + ); + return; // `bar` is out of sequence + } + // TODO: Implement `bar.is_revision` logic + } + } + if let Err(e) = self.cache.as_ref().borrow_mut().add_bar(bar) { log::error!("Error on cache insert: {e}"); } - // TODO: Handle additional bar logic - - let topic = format!("data.bars.{}", bar.bar_type); + let topic = get_bar_publish_topic(&bar); + self.msgbus + .as_ref() + .borrow() + .publish(&topic, &bar as &dyn Any); // TODO: Optimize } // -- RESPONSE HANDLERS ----------------------------------------------------------------------- fn handle_instruments(&self, instruments: Arc>) { // TODO improve by adding bulk update methods to cache and database + let mut cache = self.cache.as_ref().borrow_mut(); for instrument in instruments.iter() { - if let Err(e) = self - .cache - .as_ref() - .borrow_mut() - .add_instrument(instrument.clone()) - { + if let Err(e) = cache.add_instrument(instrument.clone()) { log::error!("Error on cache insert: {e}"); } } @@ -487,3 +516,51 @@ impl DataEngine { self.transition() } } + +// TODO: Potentially move these +pub fn get_instrument_publish_topic(instrument: &InstrumentAny) -> String { + let instrument_id = instrument.id(); + format!( + "data.instrument.{}.{}", + instrument_id.venue, instrument_id.symbol + ) +} + +pub fn get_delta_publish_topic(delta: &OrderBookDelta) -> String { + format!( + "data.book.delta.{}.{}", + delta.instrument_id.venue, delta.instrument_id.symbol + ) +} + +pub fn get_deltas_publish_topic(delta: &OrderBookDeltas) -> String { + format!( + "data.book.snapshots.{}.{}", + delta.instrument_id.venue, delta.instrument_id.symbol + ) +} + +pub fn get_depth_publish_topic(depth: &OrderBookDepth10) -> String { + format!( + "data.book.depth.{}.{}", + depth.instrument_id.venue, depth.instrument_id.symbol + ) +} + +pub fn get_quote_publish_topic(quote: &QuoteTick) -> String { + format!( + "data.quotes.{}.{}", + quote.instrument_id.venue, quote.instrument_id.symbol + ) +} + +pub fn get_trade_publish_topic(trade: &TradeTick) -> String { + format!( + "data.trades.{}.{}", + trade.instrument_id.venue, trade.instrument_id.symbol + ) +} + +pub fn get_bar_publish_topic(bar: &Bar) -> String { + format!("data.bars.{}", bar.bar_type) +} From 9937227790819a797905aa34d090c41d473103e6 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sat, 3 Aug 2024 10:54:09 +1000 Subject: [PATCH 05/60] Fully qualify logging macros in Rust --- nautilus_core/Cargo.lock | 15 +- nautilus_core/adapters/src/databento/live.rs | 29 ++- .../src/databento/python/historical.rs | 3 +- .../adapters/src/databento/python/live.rs | 21 +- nautilus_core/backtest/src/matching_engine.rs | 5 +- nautilus_core/cli/src/bin/cli.rs | 4 +- nautilus_core/common/src/cache/mod.rs | 231 ++++++++++-------- nautilus_core/common/src/clock.rs | 5 +- nautilus_core/common/src/logging/logger.rs | 8 +- nautilus_core/common/src/logging/mod.rs | 5 +- nautilus_core/common/src/logging/writer.rs | 11 +- nautilus_core/common/src/msgbus/mod.rs | 3 +- nautilus_core/common/src/timer.rs | 9 +- nautilus_core/data/src/engine/mod.rs | 1 - nautilus_core/execution/src/engine.rs | 3 +- .../infrastructure/src/python/redis/msgbus.rs | 3 +- .../infrastructure/src/redis/cache.rs | 29 ++- nautilus_core/infrastructure/src/redis/mod.rs | 7 +- .../infrastructure/src/redis/msgbus.rs | 31 ++- .../infrastructure/src/sql/cache_database.rs | 95 +++---- nautilus_core/infrastructure/src/sql/pg.rs | 78 +++--- nautilus_core/network/src/http.rs | 5 +- nautilus_core/network/src/socket.rs | 70 +++--- nautilus_core/network/src/websocket.rs | 84 +++---- poetry.lock | 2 +- 25 files changed, 402 insertions(+), 355 deletions(-) diff --git a/nautilus_core/Cargo.lock b/nautilus_core/Cargo.lock index 291685fd70f2..5c4331560683 100644 --- a/nautilus_core/Cargo.lock +++ b/nautilus_core/Cargo.lock @@ -5503,11 +5503,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -5553,6 +5553,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-targets" version = "0.48.5" diff --git a/nautilus_core/adapters/src/databento/live.rs b/nautilus_core/adapters/src/databento/live.rs index cde1673b063c..ad8d99637050 100644 --- a/nautilus_core/adapters/src/databento/live.rs +++ b/nautilus_core/adapters/src/databento/live.rs @@ -40,7 +40,6 @@ use tokio::{ sync::mpsc::{self, error::TryRecvError}, time::{timeout, Duration}, }; -use tracing::{debug, error, info, trace}; use super::{ decode::{decode_imbalance_msg, decode_statistics_msg, decode_status_msg}, @@ -106,7 +105,7 @@ impl DatabentoFeedHandler { /// Run the feed handler to begin listening for commands and processing messages. pub async fn run(&mut self) -> anyhow::Result<()> { - debug!("Running feed handler"); + tracing::debug!("Running feed handler"); let clock = get_atomic_clock_realtime(); let mut symbol_map = PitSymbolMap::new(); let mut instrument_id_map: HashMap = HashMap::new(); @@ -124,7 +123,7 @@ impl DatabentoFeedHandler { .build(), ) .await?; - info!("Connected"); + tracing::info!("Connected"); let mut client = if let Ok(client) = result { client @@ -142,13 +141,13 @@ impl DatabentoFeedHandler { loop { if self.msg_tx.is_closed() { - debug!("Message channel was closed: stopping"); + tracing::debug!("Message channel was closed: stopping"); break; }; match self.cmd_rx.try_recv() { Ok(cmd) => { - debug!("Received command: {:?}", cmd); + tracing::debug!("Received command: {:?}", cmd); match cmd { LiveCommand::Subscribe(sub) => { if !self.replay & sub.start.is_some() { @@ -163,13 +162,13 @@ impl DatabentoFeedHandler { }; client.start().await.map_err(to_pyruntime_err)?; running = true; - debug!("Started"); + tracing::debug!("Started"); } LiveCommand::Close => { self.msg_tx.send(LiveMessage::Close).await?; if running { client.close().await.map_err(to_pyruntime_err)?; - debug!("Closed inner client"); + tracing::debug!("Closed inner client"); } break; } @@ -177,7 +176,7 @@ impl DatabentoFeedHandler { } Err(TryRecvError::Empty) => {} // No command yet Err(TryRecvError::Disconnected) => { - debug!("Disconnected"); + tracing::debug!("Disconnected"); break; } } @@ -256,7 +255,7 @@ impl DatabentoFeedHandler { ) { Ok(decoded) => decoded, Err(e) => { - error!("Error decoding record: {e}"); + tracing::error!("Error decoding record: {e}"); continue; } }; @@ -269,7 +268,7 @@ impl DatabentoFeedHandler { // TODO: Temporary for debugging deltas_count += 1; - trace!( + tracing::trace!( "Buffering delta: {} {} {:?} flags={}", deltas_count, delta.ts_event, @@ -314,26 +313,26 @@ impl DatabentoFeedHandler { } self.cmd_rx.close(); - debug!("Closed command receiver"); + tracing::debug!("Closed command receiver"); Ok(()) } async fn send_msg(&mut self, msg: LiveMessage) { - trace!("Sending {msg:?}"); + tracing::trace!("Sending {msg:?}"); match self.msg_tx.send(msg).await { Ok(()) => {} - Err(e) => error!("Error sending message: {e}"), + Err(e) => tracing::error!("Error sending message: {e}"), } } } fn handle_error_msg(msg: &dbn::ErrorMsg) { - error!("{msg:?}"); + tracing::error!("{msg:?}"); } fn handle_system_msg(msg: &dbn::SystemMsg) { - info!("{msg:?}"); + tracing::info!("{msg:?}"); } fn handle_symbol_mapping_msg( diff --git a/nautilus_core/adapters/src/databento/python/historical.rs b/nautilus_core/adapters/src/databento/python/historical.rs index e4c357616929..9dce306d19ef 100644 --- a/nautilus_core/adapters/src/databento/python/historical.rs +++ b/nautilus_core/adapters/src/databento/python/historical.rs @@ -37,7 +37,6 @@ use pyo3::{ types::{PyDict, PyList}, }; use tokio::sync::Mutex; -use tracing::error; use crate::databento::{ common::get_date_time_range, @@ -167,7 +166,7 @@ impl DatabentoHistoricalClient { let result = decode_instrument_def_msg(msg, instrument_id, ts_init); match result { Ok(instrument) => instruments.push(instrument), - Err(e) => error!("{e:?}"), + Err(e) => tracing::error!("{e:?}"), }; } diff --git a/nautilus_core/adapters/src/databento/python/live.rs b/nautilus_core/adapters/src/databento/python/live.rs index c194e5c87b0d..4b9d97f4dfa8 100644 --- a/nautilus_core/adapters/src/databento/python/live.rs +++ b/nautilus_core/adapters/src/databento/python/live.rs @@ -25,7 +25,6 @@ use nautilus_model::{ use pyo3::prelude::*; use time::OffsetDateTime; use tokio::sync::mpsc; -use tracing::{debug, error, trace}; use crate::databento::{ live::{DatabentoFeedHandler, LiveCommand, LiveMessage}, @@ -63,10 +62,10 @@ impl DatabentoLiveClient { callback: PyObject, callback_pyo3: PyObject, ) -> PyResult<()> { - debug!("Processing messages..."); + tracing::debug!("Processing messages..."); // Continue to process messages until channel is hung up while let Some(msg) = msg_rx.recv().await { - trace!("Received message: {:?}", msg); + tracing::trace!("Received message: {:?}", msg); let result = match msg { LiveMessage::Data(data) => Python::with_gil(|py| { let py_obj = data_to_pycapsule(py, data); @@ -103,7 +102,7 @@ impl DatabentoLiveClient { } msg_rx.close(); - debug!("Closed message receiver"); + tracing::debug!("Closed message receiver"); Ok(()) } @@ -115,7 +114,7 @@ impl DatabentoLiveClient { fn call_python(py: Python, callback: &PyObject, py_obj: PyObject) -> PyResult<()> { callback.call1(py, (py_obj,)).map_err(|e| { - error!("Error calling Python: {e}"); + tracing::error!("Error calling Python: {e}"); e })?; Ok(()) @@ -191,7 +190,7 @@ impl DatabentoLiveClient { return Err(to_pyruntime_err("Client already running")); }; - debug!("Starting client"); + tracing::debug!("Starting client"); self.is_running = true; @@ -219,13 +218,13 @@ impl DatabentoLiveClient { ); match proc_handle { - Ok(()) => debug!("Message processor completed"), - Err(e) => error!("Message processor error: {e}"), + Ok(()) => tracing::debug!("Message processor completed"), + Err(e) => tracing::error!("Message processor error: {e}"), } match feed_handle { - Ok(()) => debug!("Feed handler completed"), - Err(e) => error!("Feed handler error: {e}"), + Ok(()) => tracing::debug!("Feed handler completed"), + Err(e) => tracing::error!("Feed handler error: {e}"), } Ok(()) @@ -241,7 +240,7 @@ impl DatabentoLiveClient { return Err(to_pyruntime_err("Client already closed")); }; - debug!("Closing client"); + tracing::debug!("Closing client"); if !self.is_closed() { self.send_command(LiveCommand::Close)?; diff --git a/nautilus_core/backtest/src/matching_engine.rs b/nautilus_core/backtest/src/matching_engine.rs index 4a8300f3b011..e2955072a039 100644 --- a/nautilus_core/backtest/src/matching_engine.rs +++ b/nautilus_core/backtest/src/matching_engine.rs @@ -21,7 +21,6 @@ use std::{any::Any, collections::HashMap, rc::Rc}; -use log::{debug, info}; use nautilus_common::{cache::Cache, msgbus::MessageBus}; use nautilus_core::{nanos::UnixNanos, time::AtomicTime, uuid::UUID4}; use nautilus_execution::matching_core::OrderMatchingCore; @@ -160,7 +159,7 @@ impl OrderMatchingEngine { self.order_count = 0; self.execution_count = 0; - info!("Reset {}", self.instrument.id()); + log::info!("Reset {}", self.instrument.id()); } #[must_use] @@ -197,7 +196,7 @@ impl OrderMatchingEngine { /// Process the venues market for the given order book delta. pub fn process_order_book_delta(&mut self, delta: &OrderBookDelta) { - debug!("Processing {delta}"); + log::debug!("Processing {delta}"); self.book.apply_delta(delta); } diff --git a/nautilus_core/cli/src/bin/cli.rs b/nautilus_core/cli/src/bin/cli.rs index 66599d45550f..f83d1aa6d723 100644 --- a/nautilus_core/cli/src/bin/cli.rs +++ b/nautilus_core/cli/src/bin/cli.rs @@ -14,7 +14,7 @@ // ------------------------------------------------------------------------------------------------- use clap::Parser; -use log::{error, LevelFilter}; +use log::LevelFilter; use nautilus_cli::opt::NautilusCli; #[tokio::main] @@ -25,6 +25,6 @@ async fn main() { .init() .unwrap(); if let Err(e) = nautilus_cli::run(NautilusCli::parse()).await { - error!("Error executing Nautilus CLI: {}", e); + log::error!("Error executing Nautilus CLI: {}", e); } } diff --git a/nautilus_core/common/src/cache/mod.rs b/nautilus_core/common/src/cache/mod.rs index fccdc0802899..d1e36c9378ab 100644 --- a/nautilus_core/common/src/cache/mod.rs +++ b/nautilus_core/common/src/cache/mod.rs @@ -28,7 +28,6 @@ use std::{ use bytes::Bytes; use database::CacheDatabaseAdapter; -use log::{debug, error, info, warn}; use nautilus_core::correctness::{ check_key_not_in_map, check_predicate_false, check_slice_not_empty, check_valid_string, }; @@ -293,7 +292,7 @@ impl Cache { None => HashMap::new(), }; - info!( + log::info!( "Cached {} general object(s) from database", self.general.len() ); @@ -307,7 +306,7 @@ impl Cache { None => HashMap::new(), }; - info!("Cached {} currencies from database", self.general.len()); + log::info!("Cached {} currencies from database", self.general.len()); Ok(()) } @@ -318,7 +317,7 @@ impl Cache { None => HashMap::new(), }; - info!("Cached {} instruments from database", self.general.len()); + log::info!("Cached {} instruments from database", self.general.len()); Ok(()) } @@ -330,7 +329,7 @@ impl Cache { None => HashMap::new(), }; - info!( + log::info!( "Cached {} synthetic instruments from database", self.general.len() ); @@ -344,7 +343,7 @@ impl Cache { None => HashMap::new(), }; - info!( + log::info!( "Cached {} synthetic instruments from database", self.general.len() ); @@ -358,7 +357,7 @@ impl Cache { None => HashMap::new(), }; - info!("Cached {} orders from database", self.general.len()); + log::info!("Cached {} orders from database", self.general.len()); Ok(()) } @@ -369,14 +368,14 @@ impl Cache { None => HashMap::new(), }; - info!("Cached {} positions from database", self.general.len()); + log::info!("Cached {} positions from database", self.general.len()); Ok(()) } /// Clears the current cache index and re-build. pub fn build_index(&mut self) { self.index.clear(); - debug!("Building index"); + log::debug!("Building index"); // Index accounts for account_id in self.accounts.keys() { @@ -555,7 +554,7 @@ impl Cache { .expect("Time went backwards") .as_micros(); - info!("Checking data integrity"); + log::info!("Checking data integrity"); // Check object caches for account_id in self.accounts.keys() { @@ -564,9 +563,10 @@ impl Cache { .venue_account .contains_key(&account_id.get_issuer()) { - error!( + log::error!( "{} in accounts: {} not found in `self.index.venue_account`", - failure, account_id + failure, + account_id ); error_count += 1; } @@ -574,37 +574,42 @@ impl Cache { for (client_order_id, order) in &self.orders { if !self.index.order_strategy.contains_key(client_order_id) { - error!( + log::error!( "{} in orders: {} not found in `self.index.order_strategy`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } if !self.index.orders.contains(client_order_id) { - error!( + log::error!( "{} in orders: {} not found in `self.index.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } if order.is_inflight() && !self.index.orders_inflight.contains(client_order_id) { - error!( + log::error!( "{} in orders: {} not found in `self.index.orders_inflight`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } if order.is_open() && !self.index.orders_open.contains(client_order_id) { - error!( + log::error!( "{} in orders: {} not found in `self.index.orders_open`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } if order.is_closed() && !self.index.orders_closed.contains(client_order_id) { - error!( + log::error!( "{} in orders: {} not found in `self.index.orders_closed`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -614,18 +619,20 @@ impl Cache { .exec_algorithm_orders .contains_key(&exec_algorithm_id) { - error!( + log::error!( "{} in orders: {} not found in `self.index.exec_algorithm_orders`", - failure, exec_algorithm_id + failure, + exec_algorithm_id ); error_count += 1; } if order.exec_spawn_id().is_none() && !self.index.exec_spawn_orders.contains_key(client_order_id) { - error!( + log::error!( "{} in orders: {} not found in `self.index.exec_spawn_orders`", - failure, exec_algorithm_id + failure, + exec_algorithm_id ); error_count += 1; } @@ -634,37 +641,42 @@ impl Cache { for (position_id, position) in &self.positions { if !self.index.position_strategy.contains_key(position_id) { - error!( + log::error!( "{} in positions: {} not found in `self.index.position_strategy`", - failure, position_id + failure, + position_id ); error_count += 1; } if !self.index.position_orders.contains_key(position_id) { - error!( + log::error!( "{} in positions: {} not found in `self.index.position_orders`", - failure, position_id + failure, + position_id ); error_count += 1; } if !self.index.positions.contains(position_id) { - error!( + log::error!( "{} in positions: {} not found in `self.index.positions`", - failure, position_id + failure, + position_id ); error_count += 1; } if position.is_open() && !self.index.positions_open.contains(position_id) { - error!( + log::error!( "{} in positions: {} not found in `self.index.positions_open`", - failure, position_id + failure, + position_id ); error_count += 1; } if position.is_closed() && !self.index.positions_closed.contains(position_id) { - error!( + log::error!( "{} in positions: {} not found in `self.index.positions_closed`", - failure, position_id + failure, + position_id ); error_count += 1; } @@ -673,9 +685,10 @@ impl Cache { // Check indexes for account_id in self.index.venue_account.values() { if !self.accounts.contains_key(account_id) { - error!( + log::error!( "{} in `index.venue_account`: {} not found in `self.accounts`", - failure, account_id + failure, + account_id ); error_count += 1; } @@ -683,9 +696,10 @@ impl Cache { for client_order_id in self.index.venue_order_ids.values() { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.venue_order_ids`: {} not found in `self.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -693,9 +707,10 @@ impl Cache { for client_order_id in self.index.client_order_ids.keys() { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.client_order_ids`: {} not found in `self.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -703,9 +718,10 @@ impl Cache { for client_order_id in self.index.order_position.keys() { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.order_position`: {} not found in `self.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -714,9 +730,10 @@ impl Cache { // Check indexes for client_order_id in self.index.order_strategy.keys() { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.order_strategy`: {} not found in `self.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -724,9 +741,10 @@ impl Cache { for position_id in self.index.position_strategy.keys() { if !self.positions.contains_key(position_id) { - error!( + log::error!( "{} in `index.position_strategy`: {} not found in `self.positions`", - failure, position_id + failure, + position_id ); error_count += 1; } @@ -734,9 +752,10 @@ impl Cache { for position_id in self.index.position_orders.keys() { if !self.positions.contains_key(position_id) { - error!( + log::error!( "{} in `index.position_orders`: {} not found in `self.positions`", - failure, position_id + failure, + position_id ); error_count += 1; } @@ -745,9 +764,10 @@ impl Cache { for (instrument_id, client_order_ids) in &self.index.instrument_orders { for client_order_id in client_order_ids { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.instrument_orders`: {} not found in `self.orders`", - failure, instrument_id + failure, + instrument_id ); error_count += 1; } @@ -756,9 +776,10 @@ impl Cache { for instrument_id in self.index.instrument_positions.keys() { if !self.index.instrument_orders.contains_key(instrument_id) { - error!( + log::error!( "{} in `index.instrument_positions`: {} not found in `index.instrument_orders`", - failure, instrument_id + failure, + instrument_id ); error_count += 1; } @@ -767,9 +788,10 @@ impl Cache { for client_order_ids in self.index.strategy_orders.values() { for client_order_id in client_order_ids { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.strategy_orders`: {} not found in `self.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -779,9 +801,10 @@ impl Cache { for position_ids in self.index.strategy_positions.values() { for position_id in position_ids { if !self.positions.contains_key(position_id) { - error!( + log::error!( "{} in `index.strategy_positions`: {} not found in `self.positions`", - failure, position_id + failure, + position_id ); error_count += 1; } @@ -790,9 +813,10 @@ impl Cache { for client_order_id in &self.index.orders { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.orders`: {} not found in `self.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -800,9 +824,10 @@ impl Cache { for client_order_id in &self.index.orders_emulated { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.orders_emulated`: {} not found in `self.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -810,9 +835,10 @@ impl Cache { for client_order_id in &self.index.orders_inflight { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.orders_inflight`: {} not found in `self.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -820,9 +846,10 @@ impl Cache { for client_order_id in &self.index.orders_open { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.orders_open`: {} not found in `self.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -830,9 +857,10 @@ impl Cache { for client_order_id in &self.index.orders_closed { if !self.orders.contains_key(client_order_id) { - error!( + log::error!( "{} in `index.orders_closed`: {} not found in `self.orders`", - failure, client_order_id + failure, + client_order_id ); error_count += 1; } @@ -840,9 +868,10 @@ impl Cache { for position_id in &self.index.positions { if !self.positions.contains_key(position_id) { - error!( + log::error!( "{} in `index.positions`: {} not found in `self.positions`", - failure, position_id + failure, + position_id ); error_count += 1; } @@ -850,9 +879,10 @@ impl Cache { for position_id in &self.index.positions_open { if !self.positions.contains_key(position_id) { - error!( + log::error!( "{} in `index.positions_open`: {} not found in `self.positions`", - failure, position_id + failure, + position_id ); error_count += 1; } @@ -860,9 +890,10 @@ impl Cache { for position_id in &self.index.positions_closed { if !self.positions.contains_key(position_id) { - error!( + log::error!( "{} in `index.positions_closed`: {} not found in `self.positions`", - failure, position_id + failure, + position_id ); error_count += 1; } @@ -870,9 +901,10 @@ impl Cache { for strategy_id in &self.index.strategies { if !self.index.strategy_orders.contains_key(strategy_id) { - error!( + log::error!( "{} in `index.strategies`: {} not found in `index.strategy_orders`", - failure, strategy_id + failure, + strategy_id ); error_count += 1; } @@ -884,9 +916,10 @@ impl Cache { .exec_algorithm_orders .contains_key(exec_algorithm_id) { - error!( + log::error!( "{} in `index.exec_algorithms`: {} not found in `index.exec_algorithm_orders`", - failure, exec_algorithm_id + failure, + exec_algorithm_id ); error_count += 1; } @@ -899,10 +932,10 @@ impl Cache { - timestamp_us; if error_count == 0 { - info!("Integrity check passed in {}μs", total_us); + log::info!("Integrity check passed in {}μs", total_us); true } else { - error!( + log::error!( "Integrity check failed with {} error{} in {}μs", error_count, if error_count == 1 { "" } else { "s" }, @@ -917,20 +950,20 @@ impl Cache { ///'Open state' is considered to be open orders and open positions. #[must_use] pub fn check_residuals(&self) -> bool { - debug!("Checking residuals"); + log::debug!("Checking residuals"); let mut residuals = false; // Check for any open orders for order in self.orders_open(None, None, None, None) { residuals = true; - warn!("Residual {:?}", order); + log::warn!("Residual {:?}", order); } // Check for any open positions for position in self.positions_open(None, None, None, None) { residuals = true; - warn!("Residual {}", position); + log::warn!("Residual {}", position); } residuals @@ -939,14 +972,14 @@ impl Cache { /// Clears the caches index. pub fn clear_index(&mut self) { self.index.clear(); - debug!("Cleared index"); + log::debug!("Cleared index"); } /// Resets the cache. /// /// All stateful fields are reset to their initial value. pub fn reset(&mut self) { - debug!("Resetting cache"); + log::debug!("Resetting cache"); self.general.clear(); self.quotes.clear(); @@ -964,7 +997,7 @@ impl Cache { self.clear_index(); - info!("Reset cache"); + log::info!("Reset cache"); } /// Dispose of the cache which will close any underlying database adapter. @@ -991,7 +1024,7 @@ impl Cache { check_valid_string(key, stringify!(key))?; check_predicate_false(value.is_empty(), stringify!(value))?; - debug!("Adding general {key}"); + log::debug!("Adding general {key}"); self.general.insert(key.to_string(), value.clone()); if let Some(database) = &mut self.database { @@ -1002,7 +1035,7 @@ impl Cache { /// Adds the given order `book` to the cache. pub fn add_order_book(&mut self, book: OrderBook) -> anyhow::Result<()> { - debug!("Adding `OrderBook` {}", book.instrument_id); + log::debug!("Adding `OrderBook` {}", book.instrument_id); if self.config.save_market_data { if let Some(database) = &mut self.database { @@ -1016,7 +1049,7 @@ impl Cache { /// Adds the given `quote` tick to the cache. pub fn add_quote(&mut self, quote: QuoteTick) -> anyhow::Result<()> { - debug!("Adding `QuoteTick` {}", quote.instrument_id); + log::debug!("Adding `QuoteTick` {}", quote.instrument_id); if self.config.save_market_data { if let Some(database) = &mut self.database { @@ -1037,7 +1070,7 @@ impl Cache { check_slice_not_empty(quotes, stringify!(quotes))?; let instrument_id = quotes[0].instrument_id; - debug!("Adding `QuoteTick`[{}] {}", quotes.len(), instrument_id); + log::debug!("Adding `QuoteTick`[{}] {}", quotes.len(), instrument_id); if self.config.save_market_data { if let Some(database) = &mut self.database { @@ -1060,7 +1093,7 @@ impl Cache { /// Adds the given `trade` tick to the cache. pub fn add_trade(&mut self, trade: TradeTick) -> anyhow::Result<()> { - debug!("Adding `TradeTick` {}", trade.instrument_id); + log::debug!("Adding `TradeTick` {}", trade.instrument_id); if self.config.save_market_data { if let Some(database) = &mut self.database { @@ -1081,7 +1114,7 @@ impl Cache { check_slice_not_empty(trades, stringify!(trades))?; let instrument_id = trades[0].instrument_id; - debug!("Adding `TradeTick`[{}] {}", trades.len(), instrument_id); + log::debug!("Adding `TradeTick`[{}] {}", trades.len(), instrument_id); if self.config.save_market_data { if let Some(database) = &mut self.database { @@ -1104,7 +1137,7 @@ impl Cache { /// Adds the given `bar` to the cache. pub fn add_bar(&mut self, bar: Bar) -> anyhow::Result<()> { - debug!("Adding `Bar` {}", bar.bar_type); + log::debug!("Adding `Bar` {}", bar.bar_type); if self.config.save_market_data { if let Some(database) = &mut self.database { @@ -1125,7 +1158,7 @@ impl Cache { check_slice_not_empty(bars, stringify!(bars))?; let bar_type = bars[0].bar_type; - debug!("Adding `Bar`[{}] {}", bars.len(), bar_type); + log::debug!("Adding `Bar`[{}] {}", bars.len(), bar_type); if self.config.save_market_data { if let Some(database) = &mut self.database { @@ -1148,7 +1181,7 @@ impl Cache { /// Adds the given `currency` to the cache. pub fn add_currency(&mut self, currency: Currency) -> anyhow::Result<()> { - debug!("Adding `Currency` {}", currency.code); + log::debug!("Adding `Currency` {}", currency.code); if let Some(database) = &mut self.database { database.add_currency(¤cy)?; @@ -1160,7 +1193,7 @@ impl Cache { /// Adds the given `instrument` to the cache. pub fn add_instrument(&mut self, instrument: InstrumentAny) -> anyhow::Result<()> { - debug!("Adding `Instrument` {}", instrument.id()); + log::debug!("Adding `Instrument` {}", instrument.id()); if let Some(database) = &mut self.database { database.add_instrument(&instrument)?; @@ -1172,7 +1205,7 @@ impl Cache { /// Adds the given `synthetic` instrument to the cache. pub fn add_synthetic(&mut self, synthetic: SyntheticInstrument) -> anyhow::Result<()> { - debug!("Adding `SyntheticInstrument` {}", synthetic.id); + log::debug!("Adding `SyntheticInstrument` {}", synthetic.id); if let Some(database) = &mut self.database { database.add_synthetic(&synthetic)?; @@ -1184,7 +1217,7 @@ impl Cache { /// Adds the given `account` to the cache. pub fn add_account(&mut self, account: AccountAny) -> anyhow::Result<()> { - debug!("Adding `Account` {}", account.id()); + log::debug!("Adding `Account` {}", account.id()); if let Some(database) = &mut self.database { database.add_account(&account)?; @@ -1280,7 +1313,7 @@ impl Cache { )?; }; - debug!("Adding {:?}", order); + log::debug!("Adding {:?}", order); self.index.orders.insert(client_order_id); self.index diff --git a/nautilus_core/common/src/clock.rs b/nautilus_core/common/src/clock.rs index 4d35a84e254b..c70e335af990 100644 --- a/nautilus_core/common/src/clock.rs +++ b/nautilus_core/common/src/clock.rs @@ -23,7 +23,6 @@ use nautilus_core::{ nanos::UnixNanos, time::{get_atomic_clock_realtime, AtomicTime}, }; -use tracing::error; use ustr::Ustr; use crate::{ @@ -444,7 +443,7 @@ impl Clock for LiveClock { None => {} Some(mut timer) => { if let Err(e) = timer.cancel() { - error!("Error on timer cancel: {:?}", e); + log::error!("Error on timer cancel: {:?}", e); } } } @@ -453,7 +452,7 @@ impl Clock for LiveClock { fn cancel_timers(&mut self) { for timer in &mut self.timers.values_mut() { if let Err(e) = timer.cancel() { - error!("Error on timer cancel: {:?}", e); + log::error!("Error on timer cancel: {:?}", e); } } self.timers.clear(); diff --git a/nautilus_core/common/src/logging/logger.rs b/nautilus_core/common/src/logging/logger.rs index 2ce1fdf30c83..c8e58031a4e9 100644 --- a/nautilus_core/common/src/logging/logger.rs +++ b/nautilus_core/common/src/logging/logger.rs @@ -513,7 +513,7 @@ impl Drop for LogGuard { mod tests { use std::{collections::HashMap, time::Duration}; - use log::{info, LevelFilter}; + use log::LevelFilter; use nautilus_core::uuid::UUID4; use nautilus_model::identifiers::TraderId; use rstest::*; @@ -602,7 +602,7 @@ mod tests { logging_clock_set_static_mode(); logging_clock_set_static_time(1_650_000_000_000_000); - info!( + log::info!( component = "RiskEngine"; "This is a test." ); @@ -663,7 +663,7 @@ mod tests { logging_clock_set_static_mode(); logging_clock_set_static_time(1_650_000_000_000_000); - info!( + log::info!( component = "RiskEngine"; "This is a test." ); @@ -719,7 +719,7 @@ mod tests { logging_clock_set_static_mode(); logging_clock_set_static_time(1_650_000_000_000_000); - info!( + log::info!( component = "RiskEngine"; "This is a test." ); diff --git a/nautilus_core/common/src/logging/mod.rs b/nautilus_core/common/src/logging/mod.rs index cfb576cd0d9e..61897aa08bd9 100644 --- a/nautilus_core/common/src/logging/mod.rs +++ b/nautilus_core/common/src/logging/mod.rs @@ -25,7 +25,6 @@ use std::{ use log::LevelFilter; use nautilus_core::{time::get_atomic_clock_static, uuid::UUID4}; use nautilus_model::identifiers::TraderId; -use tracing::error; use tracing_subscriber::EnvFilter; use ustr::Ustr; @@ -107,7 +106,9 @@ pub fn init_tracing() { tracing_subscriber::fmt() .with_env_filter(EnvFilter::new(v.clone())) .try_init() - .unwrap_or_else(|e| error!("Cannot set tracing subscriber because of error: {e}")); + .unwrap_or_else(|e| { + tracing::error!("Cannot set tracing subscriber because of error: {e}") + }); println!("Initialized tracing logs with RUST_LOG={v}"); } } diff --git a/nautilus_core/common/src/logging/writer.rs b/nautilus_core/common/src/logging/writer.rs index 68501bb635c3..ec14a9ca39bd 100644 --- a/nautilus_core/common/src/logging/writer.rs +++ b/nautilus_core/common/src/logging/writer.rs @@ -21,7 +21,6 @@ use std::{ use chrono::{DateTime, Utc}; use log::LevelFilter; -use tracing::error; use crate::logging::logger::LogLine; @@ -162,7 +161,7 @@ impl FileWriter { Some(ref format) if format == "json" => true, None => false, Some(ref unrecognized) => { - error!( + tracing::error!( "Unrecognized log file format: {unrecognized}. Using plain text format as default." ); false @@ -187,7 +186,7 @@ impl FileWriter { level: fileout_level, }), Err(e) => { - error!("Error creating log file: {}", e); + tracing::error!("Error creating log file: {}", e); None } } @@ -259,20 +258,20 @@ impl LogWriter for FileWriter { self.buf = BufWriter::new(file); self.path = file_path; } - Err(e) => error!("Error creating log file: {}", e), + Err(e) => tracing::error!("Error creating log file: {}", e), } } match self.buf.write_all(line.as_bytes()) { Ok(()) => {} - Err(e) => error!("Error writing to file: {e:?}"), + Err(e) => tracing::error!("Error writing to file: {e:?}"), } } fn flush(&mut self) { match self.buf.flush() { Ok(()) => {} - Err(e) => error!("Error flushing file: {e:?}"), + Err(e) => tracing::error!("Error flushing file: {e:?}"), } } diff --git a/nautilus_core/common/src/msgbus/mod.rs b/nautilus_core/common/src/msgbus/mod.rs index a4aae55e24f1..5a2948333974 100644 --- a/nautilus_core/common/src/msgbus/mod.rs +++ b/nautilus_core/common/src/msgbus/mod.rs @@ -27,7 +27,6 @@ use std::{ }; use indexmap::IndexMap; -use log::error; use nautilus_core::uuid::UUID4; use nautilus_model::{ data::Data, @@ -326,7 +325,7 @@ impl MessageBus { let sub = Subscription::new(topic, handler, priority); if self.subscriptions.contains_key(&sub) { - error!("{sub:?} already exists."); + log::error!("{sub:?} already exists."); return; } diff --git a/nautilus_core/common/src/timer.rs b/nautilus_core/common/src/timer.rs index f3aaf84a3689..fcf76f9c1185 100644 --- a/nautilus_core/common/src/timer.rs +++ b/nautilus_core/common/src/timer.rs @@ -36,7 +36,6 @@ use tokio::{ sync::oneshot, time::{Duration, Instant}, }; -use tracing::{debug, error, trace}; use ustr::Ustr; use crate::{handlers::EventHandler, runtime::get_runtime}; @@ -247,7 +246,7 @@ impl LiveTimer { // SAFETY: Guaranteed to be non-zero let interval_ns = NonZeroU64::new(std::cmp::max(interval_ns, 1)).unwrap(); - debug!("Creating timer '{}'", name); + log::debug!("Creating timer '{}'", name); Ok(Self { name: Ustr::from(name), interval_ns, @@ -325,7 +324,7 @@ impl LiveTimer { } }, _ = (&mut cancel_rx) => { - trace!("Received timer cancel"); + tracing::trace!("Received timer cancel"); break; // Timer canceled }, } @@ -339,7 +338,7 @@ impl LiveTimer { /// Cancels the timer (the timer will not generate a final event). pub fn cancel(&mut self) -> anyhow::Result<()> { - debug!("Cancel timer '{}'", self.name); + log::debug!("Cancel timer '{}'", self.name); if !self.is_expired.load(atomic::Ordering::SeqCst) { if let Some(sender) = self.canceler.take() { // Send cancellation signal @@ -366,7 +365,7 @@ fn call_python_with_time_event( match handler.callback.call1(py, (capsule,)) { Ok(_) => {} - Err(e) => error!("Error on callback: {:?}", e), + Err(e) => tracing::error!("Error on callback: {:?}", e), }; }); } diff --git a/nautilus_core/data/src/engine/mod.rs b/nautilus_core/data/src/engine/mod.rs index a02431aea6ff..00dc8096284a 100644 --- a/nautilus_core/data/src/engine/mod.rs +++ b/nautilus_core/data/src/engine/mod.rs @@ -31,7 +31,6 @@ use std::{ sync::Arc, }; -use log; use nautilus_common::{ cache::Cache, client::DataClientAdapter, diff --git a/nautilus_core/execution/src/engine.rs b/nautilus_core/execution/src/engine.rs index 47a01e1fc2f0..46a1baec84d9 100644 --- a/nautilus_core/execution/src/engine.rs +++ b/nautilus_core/execution/src/engine.rs @@ -25,7 +25,6 @@ use std::{ rc::Rc, }; -use log::debug; use nautilus_common::{ cache::Cache, clock::Clock, generators::position_id::PositionIdGenerator, msgbus::MessageBus, }; @@ -150,7 +149,7 @@ where // -- COMMAND HANDLERS ---------------------------------------------------- fn execute_command(&self, command: TradingCommand) { - debug!("<--[CMD] {:?}", command); // TODO: Log constants + log::debug!("<--[CMD] {:?}", command); // TODO: Log constants // TODO: Refine getting the client (no need for two expects) let client = if let Some(client) = self.clients.get(&command.client_id()) { diff --git a/nautilus_core/infrastructure/src/python/redis/msgbus.rs b/nautilus_core/infrastructure/src/python/redis/msgbus.rs index 81cb1a7e2ee8..238c8b5c5e52 100644 --- a/nautilus_core/infrastructure/src/python/redis/msgbus.rs +++ b/nautilus_core/infrastructure/src/python/redis/msgbus.rs @@ -24,7 +24,6 @@ use nautilus_core::{ }; use nautilus_model::identifiers::TraderId; use pyo3::{prelude::*, types::PyBytes}; -use tracing::error; use crate::redis::msgbus::RedisMessageBusDatabase; @@ -69,7 +68,7 @@ impl RedisMessageBusDatabase { fn call_python(py: Python, callback: &PyObject, py_obj: PyObject) -> PyResult<()> { callback.call1(py, (py_obj,)).map_err(|e| { - error!("Error calling Python: {e}"); + tracing::error!("Error calling Python: {e}"); e })?; Ok(()) diff --git a/nautilus_core/infrastructure/src/redis/cache.rs b/nautilus_core/infrastructure/src/redis/cache.rs index 9097d51e5b23..452709bb2231 100644 --- a/nautilus_core/infrastructure/src/redis/cache.rs +++ b/nautilus_core/infrastructure/src/redis/cache.rs @@ -41,7 +41,6 @@ use nautilus_model::{ types::currency::Currency, }; use redis::{Commands, Connection, Pipeline}; -use tracing::{debug, error}; use ustr::Ustr; use super::{REDIS_DELIMITER, REDIS_FLUSHDB}; @@ -169,13 +168,13 @@ impl RedisCacheDatabase { } pub fn close(&mut self) -> anyhow::Result<()> { - debug!("Closing cache database adapter"); + tracing::debug!("Closing cache database adapter"); self.tx .send(DatabaseCommand::close()) .map_err(anyhow::Error::new)?; if let Some(handle) = self.handle.take() { - debug!("Joining '{CACHE_WRITE}' thread"); + tracing::debug!("Joining '{CACHE_WRITE}' thread"); handle.join().map_err(|e| anyhow::anyhow!("{:?}", e)) } else { Err(anyhow::anyhow!("Cache database already shutdown")) @@ -191,7 +190,7 @@ impl RedisCacheDatabase { pub fn keys(&mut self, pattern: &str) -> anyhow::Result> { let pattern = format!("{}{REDIS_DELIMITER}{}", self.trader_key, pattern); - debug!("Querying keys: {pattern}"); + tracing::debug!("Querying keys: {pattern}"); match self.con.keys(pattern) { Ok(keys) => Ok(keys), Err(e) => Err(e.into()), @@ -287,7 +286,7 @@ fn drain_buffer(conn: &mut Connection, trader_key: &str, buffer: &mut VecDeque collection, Err(e) => { - error!("{e}"); + tracing::error!("{e}"); continue; // Continue to next message } }; @@ -298,25 +297,25 @@ fn drain_buffer(conn: &mut Connection, trader_key: &str, buffer: &mut VecDeque { if let Some(payload) = msg.payload { if let Err(e) = insert(&mut pipe, collection, &key, payload) { - error!("{e}"); + tracing::error!("{e}"); } } else { - error!("Null `payload` for `insert`"); + tracing::error!("Null `payload` for `insert`"); } } DatabaseOperation::Update => { if let Some(payload) = msg.payload { if let Err(e) = update(&mut pipe, collection, &key, payload) { - error!("{e}"); + tracing::error!("{e}"); } } else { - error!("Null `payload` for `update`"); + tracing::error!("Null `payload` for `update`"); }; } DatabaseOperation::Delete => { // `payload` can be `None` for a delete operation if let Err(e) = delete(&mut pipe, collection, &key, msg.payload) { - error!("{e}"); + tracing::error!("{e}"); } } DatabaseOperation::Close => panic!("Close command should not be drained"), @@ -324,7 +323,7 @@ fn drain_buffer(conn: &mut Connection, trader_key: &str, buffer: &mut VecDeque(conn) { - error!("{e}"); + tracing::error!("{e}"); } } @@ -679,7 +678,7 @@ impl CacheDatabaseAdapter for RedisCacheDatabaseAdapter { currencies.insert(currency_code, currency); } None => { - error!("Currency not found: {currency_code}"); + tracing::error!("Currency not found: {currency_code}"); } } } @@ -698,7 +697,7 @@ impl CacheDatabaseAdapter for RedisCacheDatabaseAdapter { instruments.insert(instrument_id, instrument); } None => { - error!("Instrument not found: {instrument_id}"); + tracing::error!("Instrument not found: {instrument_id}"); } } } @@ -731,7 +730,7 @@ impl CacheDatabaseAdapter for RedisCacheDatabaseAdapter { accounts.insert(account_id, account); } None => { - error!("Account not found: {account_id}"); + tracing::error!("Account not found: {account_id}"); } } } @@ -751,7 +750,7 @@ impl CacheDatabaseAdapter for RedisCacheDatabaseAdapter { orders.insert(client_order_id, order); } None => { - error!("Order not found: {client_order_id}"); + tracing::error!("Order not found: {client_order_id}"); } } } diff --git a/nautilus_core/infrastructure/src/redis/mod.rs b/nautilus_core/infrastructure/src/redis/mod.rs index f60f149af992..31fae989e87c 100644 --- a/nautilus_core/infrastructure/src/redis/mod.rs +++ b/nautilus_core/infrastructure/src/redis/mod.rs @@ -25,7 +25,6 @@ use nautilus_core::uuid::UUID4; use nautilus_model::identifiers::TraderId; use redis::*; use semver::Version; -use tracing::{debug, info}; const REDIS_MIN_VERSION: &str = "6.2.0"; const REDIS_DELIMITER: char = ':'; @@ -83,9 +82,9 @@ pub fn create_redis_connection( con_name: &str, config: DatabaseConfig, ) -> anyhow::Result { - debug!("Creating {con_name} redis connection"); + tracing::debug!("Creating {con_name} redis connection"); let (redis_url, redacted_url) = get_redis_url(config.clone()); - debug!("Connecting to {redacted_url}"); + tracing::debug!("Connecting to {redacted_url}"); let timeout = Duration::from_secs(config.timeout as u64); let client = redis::Client::open(redis_url)?; let mut con = client.get_connection_with_timeout(timeout)?; @@ -96,7 +95,7 @@ pub fn create_redis_connection( let min_version = Version::parse(REDIS_MIN_VERSION)?; if version >= min_version { - info!(con_msg); + tracing::info!(con_msg); } else { // TODO: Using `log` error here so that the message is displayed regardless of whether // the logging config has pyo3 enabled. Later we can standardize this to `tracing`. diff --git a/nautilus_core/infrastructure/src/redis/msgbus.rs b/nautilus_core/infrastructure/src/redis/msgbus.rs index 92a58e002847..27dad72ce923 100644 --- a/nautilus_core/infrastructure/src/redis/msgbus.rs +++ b/nautilus_core/infrastructure/src/redis/msgbus.rs @@ -37,7 +37,6 @@ use nautilus_core::{ use nautilus_model::identifiers::TraderId; use redis::*; use streams::StreamReadOptions; -use tracing::{debug, error}; use super::{REDIS_MINID, REDIS_XTRIM}; use crate::redis::{create_redis_connection, get_stream_key}; @@ -128,13 +127,13 @@ impl MessageBusDatabaseAdapter for RedisMessageBusDatabase { if let Err(e) = self.pub_tx.send(msg) { // This will occur for now when the Python task // blindly attempts to publish to a closed channel. - debug!("Failed to send message: {}", e); + tracing::debug!("Failed to send message: {}", e); } Ok(()) } fn close(&mut self) -> anyhow::Result<()> { - debug!("Closing message bus database adapter"); + tracing::debug!("Closing message bus database adapter"); self.stream_signal.store(true, Ordering::Relaxed); @@ -143,20 +142,20 @@ impl MessageBusDatabaseAdapter for RedisMessageBusDatabase { payload: Bytes::new(), // Empty }; if let Err(e) = self.pub_tx.send(msg) { - error!("Failed to send close message: {:?}", e); + tracing::error!("Failed to send close message: {:?}", e); } if let Some(handle) = self.pub_handle.take() { - debug!("Joining '{MSGBUS_PUBLISH}' thread"); + tracing::debug!("Joining '{MSGBUS_PUBLISH}' thread"); if let Err(e) = handle.join().map_err(|e| anyhow::anyhow!("{:?}", e)) { - error!("Error joining '{MSGBUS_PUBLISH}' thread: {:?}", e); + tracing::error!("Error joining '{MSGBUS_PUBLISH}' thread: {:?}", e); } } if let Some(handle) = self.stream_handle.take() { - debug!("Joining '{MSGBUS_STREAM}' thread"); + tracing::debug!("Joining '{MSGBUS_STREAM}' thread"); if let Err(e) = handle.join().map_err(|e| anyhow::anyhow!("{:?}", e)) { - error!("Error joining '{MSGBUS_STREAM}' thread: {:?}", e); + tracing::error!("Error joining '{MSGBUS_STREAM}' thread: {:?}", e); } } Ok(()) @@ -189,7 +188,7 @@ pub fn publish_messages( instance_id: UUID4, config: MessageBusConfig, ) -> anyhow::Result<()> { - debug!("Starting message publishing"); + tracing::debug!("Starting message publishing"); let db_config = config .database .as_ref() @@ -295,7 +294,7 @@ fn drain_buffer( .query(conn); if let Err(e) = result { - error!("Error trimming stream '{stream_key}': {e}"); + tracing::error!("Error trimming stream '{stream_key}': {e}"); } else { last_trim_index.insert( stream_key.to_string(), @@ -315,7 +314,7 @@ pub fn stream_messages( stream_keys: Vec, stream_signal: Arc, ) -> anyhow::Result<()> { - debug!("Starting message streaming"); + tracing::debug!("Starting message streaming"); let mut con = create_redis_connection(MSGBUS_STREAM, config)?; let stream_keys = &stream_keys @@ -323,7 +322,7 @@ pub fn stream_messages( .map(String::as_str) .collect::>(); - debug!("Listening to streams: [{}]", stream_keys.join(", ")); + tracing::debug!("Listening to streams: [{}]", stream_keys.join(", ")); // Start streaming from current timestamp let clock = get_atomic_clock_realtime(); @@ -334,7 +333,7 @@ pub fn stream_messages( 'outer: loop { if stream_signal.load(Ordering::Relaxed) { - debug!("Received terminate signal"); + tracing::debug!("Received terminate signal"); break; } let result: Result = @@ -354,12 +353,12 @@ pub fn stream_messages( match decode_bus_message(array) { Ok(msg) => { if tx.blocking_send(msg).is_err() { - debug!("Channel closed"); + tracing::debug!("Channel closed"); break 'outer; // End streaming } } Err(e) => { - error!("{:?}", e); + tracing::error!("{:?}", e); continue; } } @@ -373,7 +372,7 @@ pub fn stream_messages( } } } - debug!("Completed message streaming"); + tracing::debug!("Completed message streaming"); Ok(()) } diff --git a/nautilus_core/infrastructure/src/sql/cache_database.rs b/nautilus_core/infrastructure/src/sql/cache_database.rs index 0c8776044456..6b366cbd67a4 100644 --- a/nautilus_core/infrastructure/src/sql/cache_database.rs +++ b/nautilus_core/infrastructure/src/sql/cache_database.rs @@ -19,7 +19,6 @@ use std::{ }; use bytes::Bytes; -use log::error; use nautilus_common::cache::database::CacheDatabaseAdapter; use nautilus_core::nanos::UnixNanos; use nautilus_model::{ @@ -330,13 +329,13 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { .map(|currency| (currency.code, currency)) .collect(); if let Err(e) = tx.send(mapping) { - error!("Failed to send currencies: {:?}", e); + log::error!("Failed to send currencies: {:?}", e); } } Err(e) => { - error!("Failed to load currencies: {:?}", e); + log::error!("Failed to load currencies: {:?}", e); if let Err(e) = tx.send(HashMap::new()) { - error!("Failed to send empty currencies: {:?}", e); + log::error!("Failed to send empty currencies: {:?}", e); } } } @@ -356,13 +355,13 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { .map(|instrument| (instrument.id(), instrument)) .collect(); if let Err(e) = tx.send(mapping) { - error!("Failed to send instruments: {:?}", e); + log::error!("Failed to send instruments: {:?}", e); } } Err(e) => { - error!("Failed to load instruments: {:?}", e); + log::error!("Failed to load instruments: {:?}", e); if let Err(e) = tx.send(HashMap::new()) { - error!("Failed to send empty instruments: {:?}", e); + log::error!("Failed to send empty instruments: {:?}", e); } } } @@ -386,13 +385,13 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { .map(|account| (account.id(), account)) .collect(); if let Err(e) = tx.send(mapping) { - error!("Failed to send accounts: {:?}", e); + log::error!("Failed to send accounts: {:?}", e); } } Err(e) => { - error!("Failed to load accounts: {:?}", e); + log::error!("Failed to load accounts: {:?}", e); if let Err(e) = tx.send(HashMap::new()) { - error!("Failed to send empty accounts: {:?}", e); + log::error!("Failed to send empty accounts: {:?}", e); } } } @@ -412,13 +411,13 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { .map(|order| (order.client_order_id(), order)) .collect(); if let Err(e) = tx.send(mapping) { - error!("Failed to send orders: {:?}", e); + log::error!("Failed to send orders: {:?}", e); } } Err(e) => { - error!("Failed to load orders: {:?}", e); + log::error!("Failed to load orders: {:?}", e); if let Err(e) = tx.send(HashMap::new()) { - error!("Failed to send empty orders: {:?}", e); + log::error!("Failed to send empty orders: {:?}", e); } } } @@ -447,13 +446,13 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { match result { Ok(currency) => { if let Err(e) = tx.send(currency) { - error!("Failed to send currency {}: {:?}", code, e); + log::error!("Failed to send currency {}: {:?}", code, e); } } Err(e) => { - error!("Failed to load currency {}: {:?}", code, e); + log::error!("Failed to load currency {}: {:?}", code, e); if let Err(e) = tx.send(None) { - error!("Failed to send None for currency {}: {:?}", code, e); + log::error!("Failed to send None for currency {}: {:?}", code, e); } } } @@ -474,15 +473,16 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { match result { Ok(instrument) => { if let Err(e) = tx.send(instrument) { - error!("Failed to send instrument {}: {:?}", instrument_id, e); + log::error!("Failed to send instrument {}: {:?}", instrument_id, e); } } Err(e) => { - error!("Failed to load instrument {}: {:?}", instrument_id, e); + log::error!("Failed to load instrument {}: {:?}", instrument_id, e); if let Err(e) = tx.send(None) { - error!( + log::error!( "Failed to send None for instrument {}: {:?}", - instrument_id, e + instrument_id, + e ); } } @@ -507,13 +507,13 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { match result { Ok(account) => { if let Err(e) = tx.send(account) { - error!("Failed to send account {}: {:?}", account_id, e); + log::error!("Failed to send account {}: {:?}", account_id, e); } } Err(e) => { - error!("Failed to load account {}: {:?}", account_id, e); + log::error!("Failed to load account {}: {:?}", account_id, e); if let Err(e) = tx.send(None) { - error!("Failed to send None for account {}: {:?}", account_id, e); + log::error!("Failed to send None for account {}: {:?}", account_id, e); } } } @@ -530,11 +530,11 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { match result { Ok(order) => { if let Err(e) = tx.send(order) { - error!("Failed to send order {}: {:?}", client_order_id, e); + log::error!("Failed to send order {}: {:?}", client_order_id, e); } } Err(e) => { - error!("Failed to load order {}: {:?}", client_order_id, e); + log::error!("Failed to load order {}: {:?}", client_order_id, e); let _ = tx.send(None); } } @@ -630,21 +630,24 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { match result { Ok(quotes) => { if let Err(er) = tx.send(quotes) { - error!( + log::error!( "Failed to send quotes for instrument {}: {:?}", - instrument_id, er + instrument_id, + er ); } } Err(e) => { - error!( + log::error!( "Failed to load quotes for instrument {}: {:?}", - instrument_id, e + instrument_id, + e ); if let Err(e) = tx.send(Vec::new()) { - error!( + log::error!( "Failed to send empty quotes for instrument {}: {:?}", - instrument_id, e + instrument_id, + e ); } } @@ -669,21 +672,24 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { match result { Ok(trades) => { if let Err(e) = tx.send(trades) { - error!( + log::error!( "Failed to send trades for instrument {}: {:?}", - instrument_id, e + instrument_id, + e ); } } Err(e) => { - error!( + log::error!( "Failed to load trades for instrument {}: {:?}", - instrument_id, e + instrument_id, + e ); if let Err(e) = tx.send(Vec::new()) { - error!( + log::error!( "Failed to send empty trades for instrument {}: {:?}", - instrument_id, e + instrument_id, + e ); } } @@ -708,21 +714,24 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { match result { Ok(bars) => { if let Err(e) = tx.send(bars) { - error!( + log::error!( "Failed to send bars for instrument {}: {:?}", - instrument_id, e + instrument_id, + e ); } } Err(e) => { - error!( + log::error!( "Failed to load bars for instrument {}: {:?}", - instrument_id, e + instrument_id, + e ); if let Err(e) = tx.send(Vec::new()) { - error!( + log::error!( "Failed to send empty bars for instrument {}: {:?}", - instrument_id, e + instrument_id, + e ); } } diff --git a/nautilus_core/infrastructure/src/sql/pg.rs b/nautilus_core/infrastructure/src/sql/pg.rs index f62e1d13750c..e30ba8af8209 100644 --- a/nautilus_core/infrastructure/src/sql/pg.rs +++ b/nautilus_core/infrastructure/src/sql/pg.rs @@ -15,7 +15,6 @@ use derive_builder::Builder; use sqlx::{postgres::PgConnectOptions, query, ConnectOptions, PgPool}; -use tracing::log::{error, info}; use crate::sql::NAUTILUS_TABLES; @@ -151,15 +150,15 @@ pub async fn init_postgres( password: String, schema_dir: Option, ) -> anyhow::Result<()> { - info!("Initializing Postgres database with target permissions and schema"); + tracing::info!("Initializing Postgres database with target permissions and schema"); // Create public schema match sqlx::query("CREATE SCHEMA IF NOT EXISTS public;") .execute(pg) .await { - Ok(_) => info!("Schema public created successfully"), - Err(e) => error!("Error creating schema public: {:?}", e), + Ok(_) => tracing::info!("Schema public created successfully"), + Err(e) => tracing::error!("Error creating schema public: {:?}", e), } // Create role if not exists @@ -167,12 +166,12 @@ pub async fn init_postgres( .execute(pg) .await { - Ok(_) => info!("Role {} created successfully", database), + Ok(_) => tracing::info!("Role {} created successfully", database), Err(e) => { if e.to_string().contains("already exists") { - info!("Role {} already exists", database); + tracing::info!("Role {} already exists", database); } else { - error!("Error creating role {}: {:?}", database, e); + tracing::error!("Error creating role {}: {:?}", database, e); } } } @@ -183,7 +182,7 @@ pub async fn init_postgres( std::fs::read_dir(schema_dir)?.collect::, std::io::Error>>()?; for file in &mut sql_files { let file_name = file.file_name(); - info!("Executing schema file: {:?}", file_name); + tracing::info!("Executing schema file: {:?}", file_name); let file_path = file.path(); let sql_content = std::fs::read_to_string(file_path.clone())?; for sql_statement in sql_content.split(';').filter(|s| !s.trim().is_empty()) { @@ -192,7 +191,7 @@ pub async fn init_postgres( .await .map_err(|err| { if err.to_string().contains("already exists") { - info!("Already exists error on statement, skipping"); + tracing::info!("Already exists error on statement, skipping"); } else { panic!( "Error executing statement {} with error: {:?}", @@ -209,10 +208,11 @@ pub async fn init_postgres( .execute(pg) .await { - Ok(_) => info!("Connect privileges granted to role {}", database), - Err(e) => error!( + Ok(_) => tracing::info!("Connect privileges granted to role {}", database), + Err(e) => tracing::error!( "Error granting connect privileges to role {}: {:?}", - database, e + database, + e ), } @@ -221,10 +221,11 @@ pub async fn init_postgres( .execute(pg) .await { - Ok(_) => info!("All schema privileges granted to role {}", database), - Err(e) => error!( + Ok(_) => tracing::info!("All schema privileges granted to role {}", database), + Err(e) => tracing::error!( "Error granting all privileges to role {}: {:?}", - database, e + database, + e ), } @@ -239,10 +240,11 @@ pub async fn init_postgres( .execute(pg) .await { - Ok(_) => info!("All tables privileges granted to role {}", database), - Err(e) => error!( + Ok(_) => tracing::info!("All tables privileges granted to role {}", database), + Err(e) => tracing::error!( "Error granting all privileges to role {}: {:?}", - database, e + database, + e ), } @@ -257,10 +259,11 @@ pub async fn init_postgres( .execute(pg) .await { - Ok(_) => info!("All sequences privileges granted to role {}", database), - Err(e) => error!( + Ok(_) => tracing::info!("All sequences privileges granted to role {}", database), + Err(e) => tracing::error!( "Error granting all privileges to role {}: {:?}", - database, e + database, + e ), } @@ -275,10 +278,11 @@ pub async fn init_postgres( .execute(pg) .await { - Ok(_) => info!("All functions privileges granted to role {}", database), - Err(e) => error!( + Ok(_) => tracing::info!("All functions privileges granted to role {}", database), + Err(e) => tracing::error!( "Error granting all privileges to role {}: {:?}", - database, e + database, + e ), } @@ -291,8 +295,8 @@ pub async fn drop_postgres(pg: &PgPool, database: String) -> anyhow::Result<()> .execute(pg) .await { - Ok(_) => info!("Dropped owned objects by role {}", database), - Err(e) => error!("Error dropping owned by role {}: {:?}", database, e), + Ok(_) => tracing::info!("Dropped owned objects by role {}", database), + Err(e) => tracing::error!("Error dropping owned by role {}: {:?}", database, e), } // Revoke connect @@ -300,10 +304,11 @@ pub async fn drop_postgres(pg: &PgPool, database: String) -> anyhow::Result<()> .execute(pg) .await { - Ok(_) => info!("Revoked connect privileges from role {}", database), - Err(e) => error!( + Ok(_) => tracing::info!("Revoked connect privileges from role {}", database), + Err(e) => tracing::error!( "Error revoking connect privileges from role {}: {:?}", - database, e + database, + e ), } @@ -312,10 +317,11 @@ pub async fn drop_postgres(pg: &PgPool, database: String) -> anyhow::Result<()> .execute(pg) .await { - Ok(_) => info!("Revoked all privileges from role {}", database), - Err(e) => error!( + Ok(_) => tracing::info!("Revoked all privileges from role {}", database), + Err(e) => tracing::error!( "Error revoking all privileges from role {}: {:?}", - database, e + database, + e ), } @@ -324,8 +330,8 @@ pub async fn drop_postgres(pg: &PgPool, database: String) -> anyhow::Result<()> .execute(pg) .await { - Ok(_) => info!("Dropped schema public"), - Err(e) => error!("Error dropping schema public: {:?}", e), + Ok(_) => tracing::info!("Dropped schema public"), + Err(e) => tracing::error!("Error dropping schema public: {:?}", e), } // Drop role @@ -333,8 +339,8 @@ pub async fn drop_postgres(pg: &PgPool, database: String) -> anyhow::Result<()> .execute(pg) .await { - Ok(_) => info!("Dropped role {}", database), - Err(e) => error!("Error dropping role {}: {:?}", database, e), + Ok(_) => tracing::info!("Dropped role {}", database), + Err(e) => tracing::error!("Error dropping role {}: {:?}", database, e), } Ok(()) } diff --git a/nautilus_core/network/src/http.rs b/nautilus_core/network/src/http.rs index 18a198919528..dc0c168ad696 100644 --- a/nautilus_core/network/src/http.rs +++ b/nautilus_core/network/src/http.rs @@ -27,7 +27,6 @@ use reqwest::{ header::{HeaderMap, HeaderName}, Method, Response, Url, }; -use tracing::trace; use crate::ratelimiter::{clock::MonotonicClock, quota::Quota, RateLimiter}; @@ -68,7 +67,7 @@ impl InnerHttpClient { None => request_builder.build()?, }; - trace!("{request:?}"); + tracing::trace!("{request:?}"); let response = self.client.execute(request).await?; self.to_response(response).await @@ -78,7 +77,7 @@ impl InnerHttpClient { &self, response: Response, ) -> Result> { - trace!("{response:?}"); + tracing::trace!("{response:?}"); let headers: HashMap = self .header_keys diff --git a/nautilus_core/network/src/socket.rs b/nautilus_core/network/src/socket.rs index a46117fd649b..e19384f534b2 100644 --- a/nautilus_core/network/src/socket.rs +++ b/nautilus_core/network/src/socket.rs @@ -37,7 +37,6 @@ use tokio_tungstenite::{ tungstenite::{client::IntoClientRequest, stream::Mode, Error}, MaybeTlsStream, }; -use tracing::{debug, error}; type TcpWriter = WriteHalf>; type SharedTcpWriter = Arc>>>; @@ -140,9 +139,9 @@ impl SocketClientInner { url: &str, mode: Mode, ) -> Result<(TcpReader, TcpWriter), Error> { - debug!("Connecting to server"); + tracing::debug!("Connecting to server"); let stream = TcpStream::connect(url).await?; - debug!("Making TLS connection"); + tracing::debug!("Making TLS connection"); let request = url.into_client_request()?; tcp_tls(&request, mode, stream, None).await.map(split) } @@ -161,16 +160,16 @@ impl SocketClientInner { match reader.read_buf(&mut buf).await { // Connection has been terminated or vector buffer is completely Ok(0) => { - error!("Cannot read anymore bytes"); + tracing::error!("Cannot read anymore bytes"); break; } Err(e) => { - error!("Failed with error: {e}"); + tracing::error!("Failed with error: {e}"); break; } // Received bytes of data Ok(bytes) => { - debug!("Received {bytes} bytes of data"); + tracing::debug!("Received {bytes} bytes of data"); // While received data has a line break // drain it and pass it to the handler @@ -185,7 +184,7 @@ impl SocketClientInner { if let Err(e) = Python::with_gil(|py| handler.call1(py, (data.as_slice(),))) { - error!("Call to handler failed: {e}"); + tracing::error!("Call to handler failed: {e}"); break; } } @@ -207,11 +206,11 @@ impl SocketClientInner { message.extend(suffix); loop { sleep(duration).await; - debug!("Sending heartbeat"); + tracing::debug!("Sending heartbeat"); let mut guard = writer.lock().await; match guard.write_all(&message).await { - Ok(()) => debug!("Sent heartbeat"), - Err(e) => error!("Failed to send heartbeat: {e}"), + Ok(()) => tracing::debug!("Sent heartbeat"), + Err(e) => tracing::error!("Failed to send heartbeat: {e}"), } } }) @@ -225,7 +224,7 @@ impl SocketClientInner { /// Closing the connection is an async call which cannot be done by the /// drop method so it must be done explicitly. pub async fn shutdown(&mut self) -> Result<(), std::io::Error> { - debug!("Abort read task"); + tracing::debug!("Abort read task"); if !self.read_task.is_finished() { self.read_task.abort(); } @@ -233,12 +232,12 @@ impl SocketClientInner { // Cancel heart beat task if let Some(ref handle) = self.heartbeat_task.take() { if !handle.is_finished() { - debug!("Abort heartbeat task"); + tracing::debug!("Abort heartbeat task"); handle.abort(); } } - debug!("Shutdown writer"); + tracing::debug!("Shutdown writer"); let mut writer = self.writer.lock().await; writer.shutdown().await } @@ -257,15 +256,15 @@ impl SocketClientInner { suffix, handler, } = &self.config; - debug!("Reconnecting client"); + tracing::debug!("Reconnecting client"); let (reader, new_writer) = Self::tls_connect_with_server(url, *mode).await?; - debug!("Use new writer end"); + tracing::debug!("Use new writer end"); let mut guard = self.writer.lock().await; *guard = new_writer; drop(guard); - debug!("Recreate reader and heartbeat task"); + tracing::debug!("Recreate reader and heartbeat task"); self.read_task = Self::spawn_read_task(reader, handler.clone(), suffix.clone()); self.heartbeat_task = Self::spawn_heartbeat_task(heartbeat.clone(), self.writer.clone(), suffix.clone()); @@ -332,8 +331,8 @@ impl SocketClient { if let Some(handler) = post_connection { Python::with_gil(|py| match handler.call0(py) { - Ok(_) => debug!("Called `post_connection` handler"), - Err(e) => error!("Error calling `post_connection` handler: {e}"), + Ok(_) => tracing::debug!("Called `post_connection` handler"), + Err(e) => tracing::error!("Error calling `post_connection` handler: {e}"), }); } @@ -360,10 +359,10 @@ impl SocketClient { .await { Ok(_) => { - debug!("Controller task finished"); + tracing::debug!("Controller task finished"); } Err(_) => { - error!("Timeout waiting for controller task to finish"); + tracing::error!("Timeout waiting for controller task to finish"); } } } @@ -394,33 +393,37 @@ impl SocketClient { match (disconnected, inner.is_alive()) { (false, false) => match inner.reconnect().await { Ok(()) => { - debug!("Reconnected successfully"); + tracing::debug!("Reconnected successfully"); if let Some(ref handler) = post_reconnection { Python::with_gil(|py| match handler.call0(py) { - Ok(_) => debug!("Called `post_reconnection` handler"), + Ok(_) => tracing::debug!("Called `post_reconnection` handler"), Err(e) => { - error!("Error calling `post_reconnection` handler: {e}"); + tracing::error!( + "Error calling `post_reconnection` handler: {e}" + ); } }); } } Err(e) => { - error!("Reconnect failed {e}"); + tracing::error!("Reconnect failed {e}"); break; } }, (true, true) => { - debug!("Shutting down inner client"); + tracing::debug!("Shutting down inner client"); match inner.shutdown().await { - Ok(()) => debug!("Closed connection"), - Err(e) => error!("Error on `shutdown`: {e}"), + Ok(()) => tracing::debug!("Closed connection"), + Err(e) => tracing::error!("Error on `shutdown`: {e}"), } if let Some(ref handler) = post_disconnection { Python::with_gil(|py| match handler.call0(py) { - Ok(_) => debug!("Called `post_disconnection` handler"), + Ok(_) => tracing::debug!("Called `post_disconnection` handler"), Err(e) => { - error!("Error calling `post_disconnection` handler: {e}"); + tracing::error!( + "Error calling `post_disconnection` handler: {e}" + ); } }); } @@ -530,7 +533,6 @@ mod tests { time::{sleep, Duration}, }; use tokio_tungstenite::tungstenite::stream::Mode; - use tracing::debug; use tracing_test::traced_test; use crate::socket::{SocketClient, SocketConfig}; @@ -556,7 +558,7 @@ mod tests { // keep listening for new connections loop { let (mut stream, _) = server.accept().await.unwrap(); - debug!("socket:test Server accepted connection"); + tracing::debug!("socket:test Server accepted connection"); // keep receiving messages from connection // and sending them back as it is @@ -566,7 +568,7 @@ mod tests { let mut buf = Vec::new(); loop { let bytes = stream.read_buf(&mut buf).await.unwrap(); - debug!("socket:test Server received {bytes} bytes"); + tracing::debug!("socket:test Server received {bytes} bytes"); // Terminate if 0 bytes have been read // Connection has been terminated or vector buffer is completely @@ -580,10 +582,10 @@ mod tests { { let close_message = b"close".as_slice(); if &buf[0..*i] == close_message { - debug!("socket:test Client sent closing message"); + tracing::debug!("socket:test Client sent closing message"); return; } else { - debug!("socket:test Server sending message"); + tracing::debug!("socket:test Server sending message"); stream .write_all(buf.drain(0..i + 2).as_slice()) .await diff --git a/nautilus_core/network/src/websocket.rs b/nautilus_core/network/src/websocket.rs index 4334a27fb5b1..32eca98e0a6f 100644 --- a/nautilus_core/network/src/websocket.rs +++ b/nautilus_core/network/src/websocket.rs @@ -35,7 +35,6 @@ use tokio_tungstenite::{ tungstenite::{client::IntoClientRequest, http::HeaderValue, Error, Message}, MaybeTlsStream, WebSocketStream, }; -use tracing::{debug, error}; type MessageWriter = SplitSink>, Message>; type SharedMessageWriter = @@ -155,7 +154,7 @@ impl WebSocketClientInner { message: Option, writer: SharedMessageWriter, ) -> Option> { - debug!("Started task `heartbeat`"); + tracing::debug!("Started task `heartbeat`"); heartbeat.map(|duration| { task::spawn(async move { let duration = Duration::from_secs(duration); @@ -167,8 +166,8 @@ impl WebSocketClientInner { None => guard.send(Message::Ping(vec![])).await, }; match guard_send_response { - Ok(()) => debug!("Sent ping"), - Err(e) => error!("Error sending ping: {e}"), + Ok(()) => tracing::debug!("Sent ping"), + Err(e) => tracing::error!("Error sending ping: {e}"), } } }) @@ -181,59 +180,59 @@ impl WebSocketClientInner { handler: PyObject, ping_handler: Option, ) -> task::JoinHandle<()> { - debug!("Started task `read`"); + tracing::debug!("Started task `read`"); task::spawn(async move { loop { match reader.next().await { Some(Ok(Message::Binary(data))) => { - debug!("Received message "); + tracing::debug!("Received message "); if let Err(e) = Python::with_gil(|py| handler.call1(py, (PyBytes::new(py, &data),))) { - error!("Error calling handler: {e}"); + tracing::error!("Error calling handler: {e}"); break; } continue; } Some(Ok(Message::Text(data))) => { - debug!("Received message: {data}"); + tracing::debug!("Received message: {data}"); if let Err(e) = Python::with_gil(|py| { handler.call1(py, (PyBytes::new(py, data.as_bytes()),)) }) { - error!("Error calling handler: {e}"); + tracing::error!("Error calling handler: {e}"); break; } continue; } Some(Ok(Message::Ping(ping))) => { let payload = String::from_utf8(ping.clone()).expect("Invalid payload"); - debug!("Received ping: {payload}",); + tracing::debug!("Received ping: {payload}",); if let Some(ref handler) = ping_handler { if let Err(e) = Python::with_gil(|py| handler.call1(py, (PyBytes::new(py, &ping),))) { - error!("Error calling handler: {e}"); + tracing::error!("Error calling handler: {e}"); break; } } continue; } Some(Ok(Message::Pong(_))) => { - debug!("Received pong"); + tracing::debug!("Received pong"); } Some(Ok(Message::Close(_))) => { - error!("Received close message - terminating"); + tracing::error!("Received close message - terminating"); break; } Some(Ok(_)) => (), Some(Err(e)) => { - error!("Received error message - terminating: {e}"); + tracing::error!("Received error message - terminating: {e}"); break; } // Internally tungstenite considers the connection closed when polling // for the next message in the stream returns None. None => { - error!("No message received - terminating"); + tracing::error!("No message received - terminating"); break; } } @@ -248,25 +247,25 @@ impl WebSocketClientInner { /// Closing the connection is an async call which cannot be done by the /// drop method so it must be done explicitly. pub async fn shutdown(&mut self) { - debug!("Closing connection"); + tracing::debug!("Closing connection"); if !self.read_task.is_finished() { self.read_task.abort(); - debug!("Aborted message read task"); + tracing::debug!("Aborted message read task"); } // Cancel heart beat task if let Some(ref handle) = self.heartbeat_task.take() { if !handle.is_finished() { handle.abort(); - debug!("Aborted heartbeat task"); + tracing::debug!("Aborted heartbeat task"); } } - debug!("Closing writer"); + tracing::debug!("Closing writer"); let mut write_half = self.writer.lock().await; write_half.close().await.unwrap(); - debug!("Closed connection"); + tracing::debug!("Closed connection"); } /// Reconnect with server. @@ -342,7 +341,7 @@ impl WebSocketClient { post_reconnection: Option, post_disconnection: Option, ) -> Result { - debug!("Connecting"); + tracing::debug!("Connecting"); let inner = WebSocketClientInner::connect_url(config).await?; let writer = inner.writer.clone(); let disconnect_mode = Arc::new(AtomicBool::new(false)); @@ -356,8 +355,8 @@ impl WebSocketClient { if let Some(handler) = post_connection { Python::with_gil(|py| match handler.call0(py) { - Ok(_) => debug!("Called `post_connection` handler"), - Err(e) => error!("Error calling `post_connection` handler: {e}"), + Ok(_) => tracing::debug!("Called `post_connection` handler"), + Err(e) => tracing::error!("Error calling `post_connection` handler: {e}"), }); }; @@ -378,7 +377,7 @@ impl WebSocketClient { /// Controller task will periodically check the disconnect mode /// and shutdown the client if it is alive pub async fn disconnect(&self) { - debug!("Disconnecting"); + tracing::debug!("Disconnecting"); self.disconnect_mode.store(true, Ordering::SeqCst); match tokio::time::timeout(Duration::from_secs(5), async { @@ -389,16 +388,16 @@ impl WebSocketClient { .await { Ok(_) => { - debug!("Controller task finished"); + tracing::debug!("Controller task finished"); } Err(_) => { - error!("Timeout waiting for controller task to finish"); + tracing::error!("Timeout waiting for controller task to finish"); } } } pub async fn send_bytes(&self, data: Vec) -> Result<(), Error> { - debug!("Sending bytes: {:?}", data); + tracing::debug!("Sending bytes: {:?}", data); let mut guard = self.writer.lock().await; guard.send(Message::Binary(data)).await } @@ -406,8 +405,8 @@ impl WebSocketClient { pub async fn send_close_message(&self) { let mut guard = self.writer.lock().await; match guard.send(Message::Close(None)).await { - Ok(()) => debug!("Sent close message"), - Err(e) => error!("Error sending close message: {e}"), + Ok(()) => tracing::debug!("Sent close message"), + Err(e) => tracing::error!("Error sending close message: {e}"), } } @@ -426,29 +425,33 @@ impl WebSocketClient { match (disconnected, inner.is_alive()) { (false, false) => match inner.reconnect().await { Ok(()) => { - debug!("Reconnected successfully"); + tracing::debug!("Reconnected successfully"); if let Some(ref handler) = post_reconnection { Python::with_gil(|py| match handler.call0(py) { - Ok(_) => debug!("Called `post_reconnection` handler"), + Ok(_) => tracing::debug!("Called `post_reconnection` handler"), Err(e) => { - error!("Error calling `post_reconnection` handler: {e}"); + tracing::error!( + "Error calling `post_reconnection` handler: {e}" + ); } }); } } Err(e) => { - error!("Reconnect failed {e}"); + tracing::error!("Reconnect failed {e}"); break; } }, (true, true) => { - debug!("Shutting down inner client"); + tracing::debug!("Shutting down inner client"); inner.shutdown().await; if let Some(ref handler) = post_disconnection { Python::with_gil(|py| match handler.call0(py) { - Ok(_) => debug!("Called `post_reconnection` handler"), + Ok(_) => tracing::debug!("Called `post_reconnection` handler"), Err(e) => { - error!("Error calling `post_reconnection` handler: {e}"); + tracing::error!( + "Error calling `post_reconnection` handler: {e}" + ); } }); } @@ -519,7 +522,7 @@ impl WebSocketClient { data: Vec, py: Python<'py>, ) -> PyResult> { - debug!("Sending bytes {:?}", data); + tracing::debug!("Sending bytes {:?}", data); let writer = slf.writer.clone(); pyo3_asyncio_0_21::tokio::future_into_py(py, async move { let mut guard = writer.lock().await; @@ -541,7 +544,7 @@ impl WebSocketClient { data: String, py: Python<'py>, ) -> PyResult> { - debug!("Sending text: {}", data); + tracing::debug!("Sending text: {}", data); let writer = slf.writer.clone(); pyo3_asyncio_0_21::tokio::future_into_py(py, async move { let mut guard = writer.lock().await; @@ -564,7 +567,7 @@ impl WebSocketClient { py: Python<'py>, ) -> PyResult> { let data_str = String::from_utf8(data.clone()).map_err(to_pyvalue_err)?; - debug!("Sending pong: {}", data_str); + tracing::debug!("Sending pong: {}", data_str); let writer = slf.writer.clone(); pyo3_asyncio_0_21::tokio::future_into_py(py, async move { let mut guard = writer.lock().await; @@ -607,7 +610,6 @@ mod tests { http::HeaderValue, }, }; - use tracing::debug; use tracing_test::traced_test; use crate::websocket::{WebSocketClient, WebSocketConfig}; @@ -668,7 +670,7 @@ mod tests { websocket.send(msg).await.unwrap(); } else if msg.is_close() { if let Err(e) = websocket.close(None).await { - debug!("Connection already closed {e}"); + tracing::debug!("Connection already closed {e}"); }; break; } diff --git a/poetry.lock b/poetry.lock index 598d0084bf3b..cda08a7ba026 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2362,4 +2362,4 @@ ib = ["async-timeout", "defusedxml", "nautilus_ibapi"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "b01b3078c5f50099c522af1755508453c4fd27e89202e69492fc9527ddb6bf83" +content-hash = "1e34ff64907a35e48bd8a2238a323679b11f0b7a10d92ab4e11921284b1dbef9" From b8185ba742074c0505110f458d61a26fd8b10103 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sat, 3 Aug 2024 16:27:18 +1000 Subject: [PATCH 06/60] Remove redundant clippy allows --- nautilus_core/common/src/msgbus/mod.rs | 1 - nautilus_core/infrastructure/src/redis/msgbus.rs | 1 - 2 files changed, 2 deletions(-) diff --git a/nautilus_core/common/src/msgbus/mod.rs b/nautilus_core/common/src/msgbus/mod.rs index 5a2948333974..15cded93b0b2 100644 --- a/nautilus_core/common/src/msgbus/mod.rs +++ b/nautilus_core/common/src/msgbus/mod.rs @@ -163,7 +163,6 @@ impl Hash for Subscription { feature = "python", pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.common") )] -#[allow(clippy::type_complexity)] // Complexity will reduce when Cython eliminated pub struct MessageBus { /// The trader ID associated with the message bus. pub trader_id: TraderId, diff --git a/nautilus_core/infrastructure/src/redis/msgbus.rs b/nautilus_core/infrastructure/src/redis/msgbus.rs index 27dad72ce923..f99067083d7e 100644 --- a/nautilus_core/infrastructure/src/redis/msgbus.rs +++ b/nautilus_core/infrastructure/src/redis/msgbus.rs @@ -307,7 +307,6 @@ fn drain_buffer( pipe.query::<()>(conn).map_err(anyhow::Error::from) } -#[allow(clippy::type_complexity)] pub fn stream_messages( tx: tokio::sync::mpsc::Sender, config: DatabaseConfig, From a7195316628a31533a7cf25c1b0d8d874a5d11d5 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sat, 3 Aug 2024 16:27:30 +1000 Subject: [PATCH 07/60] Adjust callout level --- docs/concepts/message_bus.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/concepts/message_bus.md b/docs/concepts/message_bus.md index 029da03e3cc6..1b9becba7622 100644 --- a/docs/concepts/message_bus.md +++ b/docs/concepts/message_bus.md @@ -149,7 +149,7 @@ Indicates whether the producer will write a separate stream for each topic. This useful for Redis backings, which do not support wildcard topics when listening to streams. If set to False, all messages will be written to the same stream. -:::warning +:::info Redis does not support wildcard stream topics. For better compatibility with Redis, it is recommended to set this option to False. ::: From 192f5b04b29fc85e939bbc1240689e778d504938 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sat, 3 Aug 2024 18:12:41 +1000 Subject: [PATCH 08/60] Update docs --- docs/concepts/message_bus.md | 95 ++++++++++++++++++++++++++++++++---- 1 file changed, 86 insertions(+), 9 deletions(-) diff --git a/docs/concepts/message_bus.md b/docs/concepts/message_bus.md index 1b9becba7622..49a269ce6c10 100644 --- a/docs/concepts/message_bus.md +++ b/docs/concepts/message_bus.md @@ -153,15 +153,6 @@ If set to False, all messages will be written to the same stream. Redis does not support wildcard stream topics. For better compatibility with Redis, it is recommended to set this option to False. ::: -#### External streams - -The external stream keys the node will listen to for publishing deserialized message payloads on the internal message bus. - -:::tip -Set the `LiveDataEngineConfig.external_clients` with the list of `client_id`s intended for external streaming. -The `DataEngine` will filter out subscription commands for these clients, ensuring that the external streaming provides the necessary data for the subscriptions. -::: - ### Types filtering When messages are published on the message bus, they are serialized and written to a stream if a backing @@ -193,3 +184,89 @@ Rather than a maximum lookback window based on the current wall clock time. ::: The minimum supported Redis version is 6.2.0. + +## External streams + +The message bus within a `TradingNode` (node) is referred to as the "internal message bus". +A producer node is one which publishes messages onto an external stream (see [external publishing](#external-publishing)). +The consumer node listens to external streams to receive and publish deserialized message payloads on its internal message bus. + + ┌───────────────────────────┐ + │ │ + │ │ + │ │ + │ Producer Node │ + │ │ + │ │ + │ │ + │ │ + │ │ + │ │ + └─────────────┬─────────────┘ + │ + │ + ┌───────────────────────────────▼──────────────────────────────┐ + │ │ + │ Stream │ + │ │ + └─────────────┬────────────────────────────────────┬───────────┘ + │ │ + │ │ + ┌─────────────▼───────────┐ ┌─────────────▼───────────┐ + │ │ │ │ + │ │ │ │ + │ Consumer Node 1 │ │ Consumer Node 2 │ + │ │ │ │ + │ │ │ │ + │ │ │ │ + │ │ │ │ + │ │ │ │ + │ │ │ │ + │ │ │ │ + └─────────────────────────┘ └─────────────────────────┘ + +:::tip +Set the `LiveDataEngineConfig.external_clients` with the list of `client_id`s intended to represent the external streaming clients. +The `DataEngine` will filter out subscription commands for these clients, ensuring that the external streaming provides the necessary data for any subscriptions to these clients. +::: + +### Example configuration + +The following example details a streaming setup where a producer node publishes Binance data externally, +and a downstream consumer node publishes these data messages onto its internal message bus. + +#### Producer node + +We configure the `MessageBus` of the producer node to publish to a `"binance"` stream. +The settings `use_trader_id`, `use_trader_prefix`, and `use_instance_id` are all set to `False` +to ensure a simple and predictable stream key that the consumer nodes can register for. + +```python + message_bus=MessageBusConfig( + database=DatabaseConfig(timeout=2), + use_trader_id=False, + use_trader_prefix=False, + use_instance_id=False, + streams_prefix="binance", # <--- + stream_per_topic=False, + autotrim_mins=30, + ), +``` + +#### Consumer node + +We configure the `MessageBus` of the consumer node to receive messages from the same `"binance"` stream. +The node will listen to the external stream keys to publish these messages onto its internal message bus. +Additionally, we declare the client ID `"BINANCE_EXT"` as an external client. This ensures that the +`DataEngine` does not attempt to send data commands to this client ID, as we expect these messages to be +published onto the internal message bus from the external stream, to which the node has subscribed to the relevant topics. + +```python + data_engine=LiveDataEngineConfig( + external_clients=[ClientId("BINANCE_EXT")], + ), + message_bus=MessageBusConfig( + database=DatabaseConfig(timeout=2), + external_streams=["binance"], # <--- + ), +``` From 2a38ee276e815f47d922bada833ce4ae0b5f9bf8 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sat, 3 Aug 2024 18:25:25 +1000 Subject: [PATCH 09/60] Fix docs build --- nautilus_core/common/src/msgbus/mod.rs | 8 ++++---- nautilus_core/common/src/msgbus/stubs.rs | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/nautilus_core/common/src/msgbus/mod.rs b/nautilus_core/common/src/msgbus/mod.rs index 15cded93b0b2..5bae79b65eba 100644 --- a/nautilus_core/common/src/msgbus/mod.rs +++ b/nautilus_core/common/src/msgbus/mod.rs @@ -269,7 +269,7 @@ impl MessageBus { Ok(()) } - /// Registers a new [`DataClientAdaptor`] + /// Registers a new [`DataClientAdapter`] pub fn register_client(&mut self, client: DataClientAdapter, routing: Option) { if let Some(routing) = routing { self.routing_map.insert(routing, client.client_id()); @@ -280,7 +280,7 @@ impl MessageBus { self.clients.insert(client.client_id, client); } - /// Deregisters a [`DataClientAdaptor`] + /// Deregisters a [`DataClientAdapter`] pub fn deregister_client(&mut self, client_id: &ClientId) { // TODO: We could return a `Result` but then this is part of system wiring and instead of // propagating results all over the place it may be cleaner to just immediately fail @@ -419,7 +419,7 @@ impl MessageBus { /// Data specific functions impl MessageBus { - /// Send a [`DataRequest`] to an endpoint that must be a [`DataClient`] implementation. + /// Send a [`DataRequest`] to an endpoint that must be a data client implementation. pub fn send_data_request(&self, message: DataRequest) { // TODO: log error if let Some(client) = self.get_client(&message.client_id, message.venue) { @@ -427,7 +427,7 @@ impl MessageBus { } } - /// Send a [`SubscriptionCommand`] to an endpoint that must be a [`DataClient`] implementation. + /// Send a [`SubscriptionCommand`] to an endpoint that must be a data client implementation. pub fn send_subscription_command(&self, message: SubscriptionCommand) { if let Some(client) = self.get_client(&message.client_id, message.venue) { client.through_execute(message); diff --git a/nautilus_core/common/src/msgbus/stubs.rs b/nautilus_core/common/src/msgbus/stubs.rs index acf44a9dd879..abc0d91a5951 100644 --- a/nautilus_core/common/src/msgbus/stubs.rs +++ b/nautilus_core/common/src/msgbus/stubs.rs @@ -56,6 +56,7 @@ impl MessageHandler for StubMessageHandler { } #[must_use] +#[allow(unused_must_use)] // TODO: Temporary to fix docs build pub fn get_stub_shareable_handler(id: Ustr) -> ShareableMessageHandler { ShareableMessageHandler(Rc::new(StubMessageHandler { id, From 565002b12b00a14f3d848477a1f49dcea352d4a9 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sat, 3 Aug 2024 21:38:14 +1000 Subject: [PATCH 10/60] Update Option Greeks example --- docs/concepts/advanced/custom_data.md | 65 +++++++++++++++++++++------ 1 file changed, 51 insertions(+), 14 deletions(-) diff --git a/docs/concepts/advanced/custom_data.md b/docs/concepts/advanced/custom_data.md index 1588f99ca2f8..ed307bad736a 100644 --- a/docs/concepts/advanced/custom_data.md +++ b/docs/concepts/advanced/custom_data.md @@ -104,13 +104,16 @@ def on_data(self, data: Data) -> None: ## Option Greeks example This example demonstrates how to create a custom data type for option Greeks, specifically the delta. -By following these steps, you can create custom data types, subscribe to them, publish them, and store -them in the `Cache` for efficient retrieval. +By following these steps, you can create custom data types, subscribe to them, publish them, and store +them in the `Cache` or `ParquetDataCatalog` for efficient retrieval. ```python import msgspec -from nautilus_trader.core.data import Data, DataType +from nautilus_trader.core.data import Data +from nautilus_trader.model.data import DataType from nautilus_trader.serialization.base import register_serializable_type +from nautilus_trader.serialization.arrow.serializer import register_arrow +import pyarrow as pa from nautilus_trader.model.identifiers import InstrumentId from nautilus_trader.core.datetime import dt_to_unix_nanos, unix_nanos_to_dt, format_iso8601 @@ -121,15 +124,19 @@ def unix_nanos_to_str(unix_nanos): class GreeksData(Data): - def __init__(self, instrument_id: InstrumentId, ts_event: int, ts_init: int, delta: float): + def __init__( + self, instrument_id: InstrumentId = InstrumentId.from_str("ES.GLBX"), + ts_event: int = 0, + ts_init: int = 0, + delta: float = 0.0, + ) -> None: self.instrument_id = instrument_id self._ts_event = ts_event self._ts_init = ts_init - self.delta = delta def __repr__(self): - return (f"GreeksData(instrument_id={self.instrument_id}, ts_event={unix_nanos_to_str(self._ts_event)}, ts_init={unix_nanos_to_str(self._ts_init)}, delta={self.delta:.2f})") + return (f"GreeksData(ts_init={unix_nanos_to_str(self._ts_init)}, instrument_id={self.instrument_id}, delta={self.delta:.2f})") @property def ts_event(self): @@ -144,20 +151,37 @@ class GreeksData(Data): "instrument_id": self.instrument_id.value, "ts_event": self._ts_event, "ts_init": self._ts_init, - - "delta": self.delta + "delta": self.delta, } - def to_bytes(self): - return msgspec.msgpack.encode(self.to_dict()) - @classmethod def from_dict(cls, data: dict): return GreeksData(InstrumentId.from_str(data["instrument_id"]), data["ts_event"], data["ts_init"], data["delta"]) + def to_bytes(self): + return msgspec.msgpack.encode(self.to_dict()) + @classmethod def from_bytes(cls, data: bytes): return cls.from_dict(msgspec.msgpack.decode(data)) + + def to_catalog(self): + return pa.RecordBatch.from_pylist([self.to_dict()], schema=GreeksData.schema()) + + @classmethod + def from_catalog(cls, table: pa.Table): + return [GreeksData.from_dict(d) for d in table.to_pylist()] + + @classmethod + def schema(cls): + return pa.schema( + { + "instrument_id": pa.string(), + "ts_event": pa.int64(), + "ts_init": pa.int64(), + "delta": pa.float64(), + } + ) ``` ### Publishing and receiving data @@ -178,7 +202,7 @@ def on_data(self, data): print("Data", data) ``` -### Writing and reading data +### Writing and reading data using the cache Here is an example of writing and reading data using the `Cache` from an actor (which includes strategies): @@ -186,9 +210,22 @@ Here is an example of writing and reading data using the `Cache` from an actor ( def greeks_key(instrument_id: InstrumentId): return f"{instrument_id}_GREEKS" -def cache_greeks(self, instrument_id: InstrumentId, greeks_data: GreeksData): - self.cache.add(greeks_key(instrument_id), greeks_data.to_bytes()) +def cache_greeks(self, greeks_data: GreeksData): + self.cache.add(greeks_key(greeks_data.instrument_id), greeks_data.to_bytes()) def greeks_from_cache(self, instrument_id: InstrumentId): return GreeksData.from_bytes(self.cache.get(greeks_key(instrument_id))) ``` + +### Writing and reading data using a catalog + +For streaming custom data to feather files or writing it to parquet files in a catalog (`register_arrow` needs to be used): + +```python +register_arrow(GreeksData, GreeksData.schema(), GreeksData.to_catalog, GreeksData.from_catalog) + +from nautilus_trader.persistence.catalog import ParquetDataCatalog +catalog = ParquetDataCatalog('.') + +catalog.write_data([GreeksData()]) +``` From 1328dbb479b32713574609a300823f4a889d6e73 Mon Sep 17 00:00:00 2001 From: Filip Macek Date: Sun, 4 Aug 2024 00:25:17 +0200 Subject: [PATCH 11/60] Bootstrapping process_order for OrderMatchingEngine in Rust (#1824) --- nautilus_core/Cargo.lock | 1 + nautilus_core/backtest/Cargo.toml | 3 +- nautilus_core/backtest/src/matching_engine.rs | 485 +++++++++++++----- nautilus_core/common/src/msgbus/mod.rs | 6 + nautilus_core/common/src/msgbus/stubs.rs | 45 ++ .../tests/test_cache_database_postgres.rs | 2 +- nautilus_core/model/src/events/order/any.rs | 23 + nautilus_core/model/src/instruments/any.rs | 44 ++ .../model/src/instruments/futures_contract.rs | 8 +- nautilus_core/model/src/instruments/mod.rs | 7 + nautilus_core/model/src/instruments/stubs.rs | 24 +- 11 files changed, 506 insertions(+), 142 deletions(-) diff --git a/nautilus_core/Cargo.lock b/nautilus_core/Cargo.lock index 5c4331560683..e3a4af16f812 100644 --- a/nautilus_core/Cargo.lock +++ b/nautilus_core/Cargo.lock @@ -2598,6 +2598,7 @@ version = "0.28.0" dependencies = [ "anyhow", "cbindgen", + "chrono", "log", "nautilus-common", "nautilus-core", diff --git a/nautilus_core/backtest/Cargo.toml b/nautilus_core/backtest/Cargo.toml index c37e46d4000b..b10b8cf86de7 100644 --- a/nautilus_core/backtest/Cargo.toml +++ b/nautilus_core/backtest/Cargo.toml @@ -14,8 +14,9 @@ crate-type = ["rlib", "staticlib"] nautilus-common = { path = "../common" } nautilus-core = { path = "../core" } nautilus-execution = { path = "../execution" } -nautilus-model = { path = "../model" } +nautilus-model = { path = "../model" , features = ["stubs"]} anyhow = { workspace = true } +chrono = { workspace = true } log = { workspace = true } pyo3 = { workspace = true, optional = true } ustr = { workspace = true } diff --git a/nautilus_core/backtest/src/matching_engine.rs b/nautilus_core/backtest/src/matching_engine.rs index e2955072a039..ec055cb7a003 100644 --- a/nautilus_core/backtest/src/matching_engine.rs +++ b/nautilus_core/backtest/src/matching_engine.rs @@ -31,14 +31,14 @@ use nautilus_model::{ }, enums::{AccountType, BookType, LiquiditySide, MarketStatus, OmsType}, events::order::{ - OrderAccepted, OrderCancelRejected, OrderCanceled, OrderExpired, OrderFilled, - OrderModifyRejected, OrderRejected, OrderTriggered, OrderUpdated, + OrderAccepted, OrderCancelRejected, OrderCanceled, OrderEventAny, OrderExpired, + OrderFilled, OrderModifyRejected, OrderRejected, OrderTriggered, OrderUpdated, }, identifiers::{ AccountId, ClientOrderId, InstrumentId, PositionId, StrategyId, TradeId, TraderId, Venue, VenueOrderId, }, - instruments::any::InstrumentAny, + instruments::{any::InstrumentAny, EXPIRING_INSTRUMENT_TYPES}, orderbook::book::OrderBook, orders::{ any::{OrderAny, PassiveOrderAny, StopOrderAny}, @@ -49,6 +49,7 @@ use nautilus_model::{ }; use ustr::Ustr; +#[derive(Debug, Clone)] pub struct OrderMatchingEngineConfig { pub bar_execution: bool, pub reject_stop_orders: bool, @@ -59,6 +60,21 @@ pub struct OrderMatchingEngineConfig { pub use_reduce_only: bool, } +#[allow(clippy::derivable_impls)] +impl Default for OrderMatchingEngineConfig { + fn default() -> Self { + OrderMatchingEngineConfig { + bar_execution: false, + reject_stop_orders: false, + support_gtd_orders: false, + support_contingent_orders: false, + use_position_ids: false, + use_random_ids: false, + use_reduce_only: false, + } + } +} + /// An order matching engine for a single market. pub struct OrderMatchingEngine { /// The venue for the matching engine. @@ -201,6 +217,47 @@ impl OrderMatchingEngine { self.book.apply_delta(delta); } + // -- TRADING COMMANDS ---------------------------------------------------- + pub fn process_order(&mut self, order: &OrderAny, account_id: AccountId) { + if self.core.order_exists(order.client_order_id()) { + self.generate_order_rejected(order, "Order already exists".into()); + return; + } + + // Index identifiers + self.account_ids.insert(order.trader_id(), account_id); + + // Check for instrument expiration or activation + if EXPIRING_INSTRUMENT_TYPES.contains(&self.instrument.instrument_class()) { + if let Some(activation_ns) = self.instrument.activation_ns() { + if self.clock.get_time_ns() < activation_ns { + self.generate_order_rejected( + order, + format!( + "Contract {} is not yet active, activation {}", + self.instrument.id(), + self.instrument.activation_ns().unwrap() + ) + .into(), + ); + } + } + if let Some(expiration_ns) = self.instrument.expiration_ns() { + if self.clock.get_time_ns() >= expiration_ns { + self.generate_order_rejected( + order, + format!( + "Contract {} has expired, expiration {}", + self.instrument.id(), + self.instrument.expiration_ns().unwrap() + ) + .into(), + ); + } + } + } + } + // -- ORDER PROCESSING ---------------------------------------------------- /// Iterate the matching engine by processing the bid and ask order sides @@ -290,19 +347,22 @@ impl OrderMatchingEngine { let account_id = order .account_id() .unwrap_or(self.account_ids.get(&order.trader_id()).unwrap().to_owned()); - let event = OrderRejected::new( - order.trader_id(), - order.strategy_id(), - order.instrument_id(), - order.client_order_id(), - account_id, - reason, - UUID4::new(), - ts_now, - ts_now, - false, - ) - .unwrap(); + + let event = OrderEventAny::Rejected( + OrderRejected::new( + order.trader_id(), + order.strategy_id(), + order.instrument_id(), + order.client_order_id(), + account_id, + reason, + UUID4::new(), + ts_now, + ts_now, + false, + ) + .unwrap(), + ); self.msgbus.send("ExecEngine.process", &event as &dyn Any); } @@ -311,19 +371,21 @@ impl OrderMatchingEngine { let account_id = order .account_id() .unwrap_or(self.account_ids.get(&order.trader_id()).unwrap().to_owned()); - let event = OrderAccepted::new( - order.trader_id(), - order.strategy_id(), - order.instrument_id(), - order.client_order_id(), - venue_order_id, - account_id, - UUID4::new(), - ts_now, - ts_now, - false, - ) - .unwrap(); + let event = OrderEventAny::Accepted( + OrderAccepted::new( + order.trader_id(), + order.strategy_id(), + order.instrument_id(), + order.client_order_id(), + venue_order_id, + account_id, + UUID4::new(), + ts_now, + ts_now, + false, + ) + .unwrap(), + ); self.msgbus.send("ExecEngine.process", &event as &dyn Any); } @@ -339,18 +401,21 @@ impl OrderMatchingEngine { reason: Ustr, ) { let ts_now = self.clock.get_time_ns(); - let event = OrderModifyRejected::new( - trader_id, - strategy_id, - instrument_id, - client_order_id, - reason, - UUID4::new(), - ts_now, - ts_now, - false, - Some(venue_order_id), - Some(account_id), + let event = OrderEventAny::ModifyRejected( + OrderModifyRejected::new( + trader_id, + strategy_id, + instrument_id, + client_order_id, + reason, + UUID4::new(), + ts_now, + ts_now, + false, + Some(venue_order_id), + Some(account_id), + ) + .unwrap(), ); self.msgbus.send("ExecEngine.process", &event as &dyn Any); } @@ -367,20 +432,22 @@ impl OrderMatchingEngine { reason: Ustr, ) { let ts_now = self.clock.get_time_ns(); - let event = OrderCancelRejected::new( - trader_id, - strategy_id, - instrument_id, - client_order_id, - reason, - UUID4::new(), - ts_now, - ts_now, - false, - Some(venue_order_id), - Some(account_id), - ) - .unwrap(); + let event = OrderEventAny::CancelRejected( + OrderCancelRejected::new( + trader_id, + strategy_id, + instrument_id, + client_order_id, + reason, + UUID4::new(), + ts_now, + ts_now, + false, + Some(venue_order_id), + Some(account_id), + ) + .unwrap(), + ); self.msgbus.send("ExecEngine.process", &event as &dyn Any); } @@ -392,76 +459,84 @@ impl OrderMatchingEngine { trigger_price: Price, ) { let ts_now = self.clock.get_time_ns(); - let event = OrderUpdated::new( - order.trader_id(), - order.strategy_id(), - order.instrument_id(), - order.client_order_id(), - quantity, - UUID4::new(), - ts_now, - ts_now, - false, - order.venue_order_id(), - order.account_id(), - Some(price), - Some(trigger_price), - ) - .unwrap(); + let event = OrderEventAny::Updated( + OrderUpdated::new( + order.trader_id(), + order.strategy_id(), + order.instrument_id(), + order.client_order_id(), + quantity, + UUID4::new(), + ts_now, + ts_now, + false, + order.venue_order_id(), + order.account_id(), + Some(price), + Some(trigger_price), + ) + .unwrap(), + ); self.msgbus.send("ExecEngine.process", &event as &dyn Any); } fn generate_order_canceled(&self, order: &OrderAny, venue_order_id: VenueOrderId) { let ts_now = self.clock.get_time_ns(); - let event = OrderCanceled::new( - order.trader_id(), - order.strategy_id(), - order.instrument_id(), - order.client_order_id(), - UUID4::new(), - ts_now, - ts_now, - false, - Some(venue_order_id), - order.account_id(), - ) - .unwrap(); + let event = OrderEventAny::Canceled( + OrderCanceled::new( + order.trader_id(), + order.strategy_id(), + order.instrument_id(), + order.client_order_id(), + UUID4::new(), + ts_now, + ts_now, + false, + Some(venue_order_id), + order.account_id(), + ) + .unwrap(), + ); self.msgbus.send("ExecEngine.process", &event as &dyn Any); } fn generate_order_triggered(&self, order: &OrderAny) { let ts_now = self.clock.get_time_ns(); - let event = OrderTriggered::new( - order.trader_id(), - order.strategy_id(), - order.instrument_id(), - order.client_order_id(), - UUID4::new(), - ts_now, - ts_now, - false, - order.venue_order_id(), - order.account_id(), - ) - .unwrap(); + let event = OrderEventAny::Triggered( + OrderTriggered::new( + order.trader_id(), + order.strategy_id(), + order.instrument_id(), + order.client_order_id(), + UUID4::new(), + ts_now, + ts_now, + false, + order.venue_order_id(), + order.account_id(), + ) + .unwrap(), + ); self.msgbus.send("ExecEngine.process", &event as &dyn Any); } fn generate_order_expired(&self, order: &OrderAny) { let ts_now = self.clock.get_time_ns(); - let event = OrderExpired::new( - order.trader_id(), - order.strategy_id(), - order.instrument_id(), - order.client_order_id(), - UUID4::new(), - ts_now, - ts_now, - false, - order.venue_order_id(), - order.account_id(), - ) - .unwrap(); + let event = OrderEventAny::Expired( + OrderExpired::new( + order.trader_id(), + order.strategy_id(), + order.instrument_id(), + order.client_order_id(), + UUID4::new(), + ts_now, + ts_now, + false, + order.venue_order_id(), + order.account_id(), + ) + .unwrap(), + ); self.msgbus.send("ExecEngine.process", &event as &dyn Any); } @@ -481,27 +556,177 @@ impl OrderMatchingEngine { let account_id = order .account_id() .unwrap_or(self.account_ids.get(&order.trader_id()).unwrap().to_owned()); - let event = OrderFilled::new( - order.trader_id(), - order.strategy_id(), - order.instrument_id(), - order.client_order_id(), - venue_order_id, - account_id, - self.generate_trade_id(), - order.order_side(), - order.order_type(), - last_qty, - last_px, - quote_currency, - liquidity_side, - UUID4::new(), - ts_now, - ts_now, - false, - Some(venue_position_id), - Some(commission), + let event = OrderEventAny::Filled( + OrderFilled::new( + order.trader_id(), + order.strategy_id(), + order.instrument_id(), + order.client_order_id(), + venue_order_id, + account_id, + self.generate_trade_id(), + order.order_side(), + order.order_type(), + last_qty, + last_px, + quote_currency, + liquidity_side, + UUID4::new(), + ts_now, + ts_now, + false, + Some(venue_position_id), + Some(commission), + ) + .unwrap(), ); self.msgbus.send("ExecEngine.process", &event as &dyn Any); } } + +//////////////////////////////////////////////////////////////////////////////// +// Tests +//////////////////////////////////////////////////////////////////////////////// +#[cfg(test)] +mod tests { + use std::{rc::Rc, sync::LazyLock}; + + use chrono::{TimeZone, Utc}; + use nautilus_common::{ + cache::Cache, + msgbus::{ + stubs::{get_message_saving_handler, MessageSavingHandler}, + MessageBus, + }, + }; + use nautilus_core::{nanos::UnixNanos, time::AtomicTime}; + use nautilus_model::{ + enums::{AccountType, BookType, OmsType, OrderSide}, + events::order::{OrderEventAny, OrderEventType}, + identifiers::AccountId, + instruments::{any::InstrumentAny, stubs::futures_contract_es}, + orders::stubs::TestOrderStubs, + types::quantity::Quantity, + }; + use rstest::rstest; + use ustr::Ustr; + + use crate::matching_engine::{OrderMatchingEngine, OrderMatchingEngineConfig}; + + static ATOMIC_TIME: LazyLock = + LazyLock::new(|| AtomicTime::new(true, UnixNanos::default())); + + fn get_order_matching_engine( + instrument: InstrumentAny, + msgbus: Rc, + ) -> OrderMatchingEngine { + let cache = Rc::new(Cache::default()); + let config = OrderMatchingEngineConfig::default(); + OrderMatchingEngine::new( + instrument, + 1, + BookType::L1_MBP, + OmsType::Netting, + AccountType::Cash, + &ATOMIC_TIME, + msgbus, + cache, + config, + ) + } + + #[rstest] + fn test_order_matching_engine_instrument_already_expired() { + let account_id = AccountId::from("SIM-001"); + let time = AtomicTime::new(true, UnixNanos::default()); + let mut msgbus = MessageBus::default(); + let instrument = InstrumentAny::FuturesContract(futures_contract_es(None, None)); + + // Register saving message handler to exec engine endpoint + let exec_engine_endpoint = "ExecEngine.process"; + let msg_handler = + get_message_saving_handler::(Ustr::from(exec_engine_endpoint)); + msgbus.register(exec_engine_endpoint, msg_handler.clone()); + + // Create engine and process order + let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus)); + let order = TestOrderStubs::market_order( + instrument.id(), + OrderSide::Buy, + Quantity::from("1"), + None, + None, + ); + engine.process_order(&order, account_id); + + // Get messages and test + let saved_messages = msg_handler + .0 + .as_ref() + .as_any() + .downcast_ref::>() + .unwrap() + .get_messages(); + assert_eq!(saved_messages.len(), 1); + let first_message = saved_messages.first().unwrap(); + assert_eq!(first_message.event_type(), OrderEventType::Rejected); + assert_eq!( + first_message.message().unwrap(), + Ustr::from("Contract ESZ1.GLBX has expired, expiration 1625702400000000000") + ); + } + + #[rstest] + fn test_order_matching_engine_instrument_not_active() { + let account_id = AccountId::from("SIM-001"); + let time = AtomicTime::new(true, UnixNanos::default()); + let mut msgbus = MessageBus::default(); + let activation = UnixNanos::from( + Utc.with_ymd_and_hms(2222, 4, 8, 0, 0, 0) + .unwrap() + .timestamp_nanos_opt() + .unwrap() as u64, + ); + let expiration = UnixNanos::from( + Utc.with_ymd_and_hms(2223, 7, 8, 0, 0, 0) + .unwrap() + .timestamp_nanos_opt() + .unwrap() as u64, + ); + let instrument = + InstrumentAny::FuturesContract(futures_contract_es(Some(activation), Some(expiration))); + + // Register saving message handler to exec engine endpoint + let exec_engine_endpoint = "ExecEngine.process"; + let msg_handler = + get_message_saving_handler::(Ustr::from(exec_engine_endpoint)); + msgbus.register(exec_engine_endpoint, msg_handler.clone()); + + // Create engine and process order + let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus)); + let order = TestOrderStubs::market_order( + instrument.id(), + OrderSide::Buy, + Quantity::from("1"), + None, + None, + ); + engine.process_order(&order, account_id); + + // Get messages and test + let saved_messages = msg_handler + .0 + .as_ref() + .as_any() + .downcast_ref::>() + .unwrap() + .get_messages(); + assert_eq!(saved_messages.len(), 1); + let first_message = saved_messages.first().unwrap(); + assert_eq!(first_message.event_type(), OrderEventType::Rejected); + assert_eq!( + first_message.message().unwrap(), + Ustr::from("Contract ESZ1.GLBX is not yet active, activation 7960723200000000000") + ); + } +} diff --git a/nautilus_core/common/src/msgbus/mod.rs b/nautilus_core/common/src/msgbus/mod.rs index 5bae79b65eba..3462e07f8874 100644 --- a/nautilus_core/common/src/msgbus/mod.rs +++ b/nautilus_core/common/src/msgbus/mod.rs @@ -484,6 +484,12 @@ pub fn is_matching(topic: &Ustr, pattern: &Ustr) -> bool { table[n][m] } +impl Default for MessageBus { + fn default() -> Self { + Self::new(TraderId::from("TRADER-001"), UUID4::new(), None, None) + } +} + //////////////////////////////////////////////////////////////////////////////// // Tests //////////////////////////////////////////////////////////////////////////////// diff --git a/nautilus_core/common/src/msgbus/stubs.rs b/nautilus_core/common/src/msgbus/stubs.rs index abc0d91a5951..90ee7cc24176 100644 --- a/nautilus_core/common/src/msgbus/stubs.rs +++ b/nautilus_core/common/src/msgbus/stubs.rs @@ -15,6 +15,7 @@ use std::{ any::Any, + cell::RefCell, rc::Rc, sync::{ atomic::{AtomicBool, Ordering}, @@ -104,3 +105,47 @@ pub fn get_call_check_shareable_handler(id: Ustr) -> ShareableMessageHandler { called: Arc::new(AtomicBool::new(false)), })) } + +// Handler which saves the messages it receives +#[derive(Debug, Clone)] +pub struct MessageSavingHandler { + id: Ustr, + messages: Rc>>, +} + +impl MessageSavingHandler { + #[must_use] + pub fn get_messages(&self) -> Vec { + self.messages.borrow().clone() + } +} + +impl MessageHandler for MessageSavingHandler { + fn id(&self) -> Ustr { + self.id + } + + fn handle(&self, message: &dyn Any) { + let mut messages = self.messages.borrow_mut(); + match message.downcast_ref::() { + Some(m) => messages.push(m.clone()), + None => panic!("MessageSavingHandler: message type mismatch"), + } + } + + fn handle_response(&self, _resp: DataResponse) {} + + fn handle_data(&self, _resp: &Data) {} + + fn as_any(&self) -> &dyn Any { + self + } +} + +#[must_use] +pub fn get_message_saving_handler(id: Ustr) -> ShareableMessageHandler { + ShareableMessageHandler(Rc::new(MessageSavingHandler:: { + id, + messages: Rc::new(RefCell::new(Vec::new())), + })) +} diff --git a/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs b/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs index 5250f55e7cc7..a41b05087c73 100644 --- a/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs +++ b/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs @@ -90,7 +90,7 @@ mod serial_tests { let crypto_perpetual = crypto_perpetual_ethusdt(); let currency_pair = currency_pair_ethusdt(); let equity = equity_aapl(); - let futures_contract = futures_contract_es(); + let futures_contract = futures_contract_es(None, None); let options_contract = options_contract_appl(); // Insert all the instruments pg_cache diff --git a/nautilus_core/model/src/events/order/any.rs b/nautilus_core/model/src/events/order/any.rs index 098fb49c0470..310ac96a7608 100644 --- a/nautilus_core/model/src/events/order/any.rs +++ b/nautilus_core/model/src/events/order/any.rs @@ -16,6 +16,7 @@ use nautilus_core::nanos::UnixNanos; use serde::{Deserialize, Serialize}; use strum::Display; +use ustr::Ustr; use super::OrderEventType; use crate::{ @@ -142,6 +143,28 @@ impl OrderEventAny { Self::Filled(event) => event.ts_event, } } + + pub fn message(&self) -> Option { + match self { + Self::Initialized(_) => None, + Self::Denied(event) => Some(event.reason), + Self::Emulated(_) => None, + Self::Released(_) => None, + Self::Submitted(_) => None, + Self::Accepted(_) => None, + Self::Rejected(event) => Some(event.reason), + Self::Canceled(_) => None, + Self::Expired(_) => None, + Self::Triggered(_) => None, + Self::PendingUpdate(_) => None, + Self::PendingCancel(_) => None, + Self::ModifyRejected(event) => Some(event.reason), + Self::CancelRejected(event) => Some(event.reason), + Self::Updated(_) => None, + Self::PartiallyFilled(_) => None, + Self::Filled(_) => None, + } + } } impl From for OrderFilled { diff --git a/nautilus_core/model/src/instruments/any.rs b/nautilus_core/model/src/instruments/any.rs index 72160ead5337..9e2632f1bb01 100644 --- a/nautilus_core/model/src/instruments/any.rs +++ b/nautilus_core/model/src/instruments/any.rs @@ -13,6 +13,7 @@ // limitations under the License. // ------------------------------------------------------------------------------------------------- +use nautilus_core::nanos::UnixNanos; use rust_decimal::Decimal; use super::{ @@ -21,6 +22,7 @@ use super::{ options_contract::OptionsContract, options_spread::OptionsSpread, Instrument, }; use crate::{ + enums::InstrumentClass, identifiers::InstrumentId, types::{currency::Currency, money::Money, price::Price, quantity::Quantity}, }; @@ -193,6 +195,48 @@ impl InstrumentAny { } } + #[must_use] + pub fn instrument_class(&self) -> InstrumentClass { + match self { + Self::CryptoFuture(inst) => inst.instrument_class(), + Self::CryptoPerpetual(inst) => inst.instrument_class(), + Self::CurrencyPair(inst) => inst.instrument_class(), + Self::Equity(inst) => inst.instrument_class(), + Self::FuturesContract(inst) => inst.instrument_class(), + Self::FuturesSpread(inst) => inst.instrument_class(), + Self::OptionsContract(inst) => inst.instrument_class(), + Self::OptionsSpread(inst) => inst.instrument_class(), + } + } + + #[must_use] + pub fn activation_ns(&self) -> Option { + match self { + Self::CryptoFuture(inst) => inst.activation_ns(), + Self::CryptoPerpetual(inst) => inst.activation_ns(), + Self::CurrencyPair(inst) => inst.activation_ns(), + Self::Equity(inst) => inst.activation_ns(), + Self::FuturesContract(inst) => inst.activation_ns(), + Self::FuturesSpread(inst) => inst.activation_ns(), + Self::OptionsContract(inst) => inst.activation_ns(), + Self::OptionsSpread(inst) => inst.activation_ns(), + } + } + + #[must_use] + pub fn expiration_ns(&self) -> Option { + match self { + Self::CryptoFuture(inst) => inst.expiration_ns(), + Self::CryptoPerpetual(inst) => inst.expiration_ns(), + Self::CurrencyPair(inst) => inst.expiration_ns(), + Self::Equity(inst) => inst.expiration_ns(), + Self::FuturesContract(inst) => inst.expiration_ns(), + Self::FuturesSpread(inst) => inst.expiration_ns(), + Self::OptionsContract(inst) => inst.expiration_ns(), + Self::OptionsSpread(inst) => inst.expiration_ns(), + } + } + pub fn make_price(&self, value: f64) -> anyhow::Result { match self { Self::CryptoFuture(inst) => inst.make_price(value), diff --git a/nautilus_core/model/src/instruments/futures_contract.rs b/nautilus_core/model/src/instruments/futures_contract.rs index 3d87b254134a..efb5a0ef01be 100644 --- a/nautilus_core/model/src/instruments/futures_contract.rs +++ b/nautilus_core/model/src/instruments/futures_contract.rs @@ -269,11 +269,11 @@ impl Instrument for FuturesContract { mod tests { use rstest::rstest; - use crate::instruments::{futures_contract::FuturesContract, stubs::*}; + use crate::instruments::stubs::*; #[rstest] - fn test_equality(futures_contract_es: FuturesContract) { - let cloned = futures_contract_es; - assert_eq!(futures_contract_es, cloned); + fn test_equality() { + let futures_contract = futures_contract_es(None, None); + assert_eq!(futures_contract, futures_contract.clone()); } } diff --git a/nautilus_core/model/src/instruments/mod.rs b/nautilus_core/model/src/instruments/mod.rs index 59883f917d5a..0b33b9363d6f 100644 --- a/nautilus_core/model/src/instruments/mod.rs +++ b/nautilus_core/model/src/instruments/mod.rs @@ -143,3 +143,10 @@ pub trait Instrument: 'static + Send { Quantity::new(value, self.size_precision()).unwrap() // TODO: Handle error properly } } + +pub const EXPIRING_INSTRUMENT_TYPES: [InstrumentClass; 4] = [ + InstrumentClass::Future, + InstrumentClass::FutureSpread, + InstrumentClass::Option, + InstrumentClass::OptionSpread, +]; diff --git a/nautilus_core/model/src/instruments/stubs.rs b/nautilus_core/model/src/instruments/stubs.rs index 4ed17215b293..241f5417dae9 100644 --- a/nautilus_core/model/src/instruments/stubs.rs +++ b/nautilus_core/model/src/instruments/stubs.rs @@ -323,18 +323,30 @@ pub fn equity_aapl() -> Equity { // FuturesContract //////////////////////////////////////////////////////////////////////////////// -#[fixture] -pub fn futures_contract_es() -> FuturesContract { - let activation = Utc.with_ymd_and_hms(2021, 4, 8, 0, 0, 0).unwrap(); - let expiration = Utc.with_ymd_and_hms(2021, 7, 8, 0, 0, 0).unwrap(); +pub fn futures_contract_es( + activation: Option, + expiration: Option, +) -> FuturesContract { + let activation = activation.unwrap_or(UnixNanos::from( + Utc.with_ymd_and_hms(2021, 4, 8, 0, 0, 0) + .unwrap() + .timestamp_nanos_opt() + .unwrap() as u64, + )); + let expiration = expiration.unwrap_or(UnixNanos::from( + Utc.with_ymd_and_hms(2021, 7, 8, 0, 0, 0) + .unwrap() + .timestamp_nanos_opt() + .unwrap() as u64, + )); FuturesContract::new( InstrumentId::from("ESZ1.GLBX"), Symbol::from("ESZ1"), AssetClass::Index, Some(Ustr::from("XCME")), Ustr::from("ES"), - UnixNanos::from(activation.timestamp_nanos_opt().unwrap() as u64), - UnixNanos::from(expiration.timestamp_nanos_opt().unwrap() as u64), + activation, + expiration, Currency::USD(), 2, Price::from("0.01"), From dff0b0145abf4bf649484ea1a45db00bf8eb4ddb Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sun, 4 Aug 2024 07:14:53 +1000 Subject: [PATCH 12/60] Update dependencies --- nautilus_core/Cargo.lock | 40 ++++++++++------------------------------ poetry.lock | 18 +++++++++--------- 2 files changed, 19 insertions(+), 39 deletions(-) diff --git a/nautilus_core/Cargo.lock b/nautilus_core/Cargo.lock index e3a4af16f812..2f62a41bc801 100644 --- a/nautilus_core/Cargo.lock +++ b/nautilus_core/Cargo.lock @@ -39,7 +39,7 @@ dependencies = [ "getrandom", "once_cell", "version_check", - "zerocopy 0.7.35", + "zerocopy", ] [[package]] @@ -1709,9 +1709,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" +checksum = "7f211bbe8e69bbd0cfdea405084f128ae8b4aaa6b0b522fc8f2b009084797920" dependencies = [ "crc32fast", "miniz_oxide", @@ -3407,11 +3407,11 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.18" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee4364d9f3b902ef14fab8a1ddffb783a1cb6b4bba3bfc1fa3922732c7de97f" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" dependencies = [ - "zerocopy 0.6.6", + "zerocopy", ] [[package]] @@ -4055,9 +4055,9 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.2" +version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" dependencies = [ "base64", "rustls-pki-types", @@ -5727,34 +5727,14 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" -[[package]] -name = "zerocopy" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854e949ac82d619ee9a14c66a1b674ac730422372ccb759ce0c39cabcf2bf8e6" -dependencies = [ - "byteorder", - "zerocopy-derive 0.6.6", -] - [[package]] name = "zerocopy" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ - "zerocopy-derive 0.7.35", -] - -[[package]] -name = "zerocopy-derive" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "125139de3f6b9d625c39e2efdd73d41bdac468ccd556556440e322be0e1bbd91" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.72", + "byteorder", + "zerocopy-derive", ] [[package]] diff --git a/poetry.lock b/poetry.lock index cda08a7ba026..37d961a98cba 100644 --- a/poetry.lock +++ b/poetry.lock @@ -146,22 +146,22 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.1.0-py3-none-any.whl", hash = "sha256:377b47448cb61fea38533f671fba0d0f8a96fd58facd4dc518e3dac9dbea0905"}, + {file = "attrs-24.1.0.tar.gz", hash = "sha256:adbdec84af72d38be7628e353a09b6a6790d15cd71819f6e9d7b0faa8a125745"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "babel" From fe999cb3bba33c9b720d08658f5189433ed4805e Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sun, 4 Aug 2024 10:51:44 +1000 Subject: [PATCH 13/60] Continue DataEngine implementation in Rust --- nautilus_core/Cargo.lock | 1 + nautilus_core/backtest/Cargo.toml | 1 + nautilus_core/backtest/src/data_client.rs | 231 ++++++++++++ nautilus_core/backtest/src/exchange.rs | 14 + nautilus_core/backtest/src/lib.rs | 2 + nautilus_core/backtest/src/matching_engine.rs | 58 ++- nautilus_core/common/src/cache/mod.rs | 9 +- nautilus_core/common/src/lib.rs | 1 - nautilus_core/common/src/messages/data.rs | 21 ++ nautilus_core/common/src/msgbus/handler.rs | 42 +++ nautilus_core/common/src/msgbus/mod.rs | 120 ++---- nautilus_core/common/src/msgbus/stubs.rs | 8 +- .../common/src/msgbus/switchboard.rs | 36 ++ nautilus_core/common/src/python/handler.rs | 4 +- nautilus_core/common/src/python/msgbus.rs | 2 +- nautilus_core/{common => data}/src/client.rs | 49 ++- nautilus_core/data/src/engine/mod.rs | 353 +++++++++++------- nautilus_core/data/src/engine/runner.rs | 51 ++- nautilus_core/data/src/lib.rs | 2 + nautilus_core/data/src/mocks.rs | 248 ++++++++++++ .../tests/test_cache_postgres.rs | 5 +- 21 files changed, 967 insertions(+), 291 deletions(-) create mode 100644 nautilus_core/backtest/src/data_client.rs create mode 100644 nautilus_core/backtest/src/exchange.rs create mode 100644 nautilus_core/common/src/msgbus/handler.rs create mode 100644 nautilus_core/common/src/msgbus/switchboard.rs rename nautilus_core/{common => data}/src/client.rs (95%) create mode 100644 nautilus_core/data/src/mocks.rs diff --git a/nautilus_core/Cargo.lock b/nautilus_core/Cargo.lock index 2f62a41bc801..9013fb5f31fd 100644 --- a/nautilus_core/Cargo.lock +++ b/nautilus_core/Cargo.lock @@ -2602,6 +2602,7 @@ dependencies = [ "log", "nautilus-common", "nautilus-core", + "nautilus-data", "nautilus-execution", "nautilus-model", "pyo3", diff --git a/nautilus_core/backtest/Cargo.toml b/nautilus_core/backtest/Cargo.toml index b10b8cf86de7..d0dac5f3f6fa 100644 --- a/nautilus_core/backtest/Cargo.toml +++ b/nautilus_core/backtest/Cargo.toml @@ -13,6 +13,7 @@ crate-type = ["rlib", "staticlib"] [dependencies] nautilus-common = { path = "../common" } nautilus-core = { path = "../core" } +nautilus-data = { path = "../data" } nautilus-execution = { path = "../execution" } nautilus-model = { path = "../model" , features = ["stubs"]} anyhow = { workspace = true } diff --git a/nautilus_core/backtest/src/data_client.rs b/nautilus_core/backtest/src/data_client.rs new file mode 100644 index 000000000000..26bce3ef29d1 --- /dev/null +++ b/nautilus_core/backtest/src/data_client.rs @@ -0,0 +1,231 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +// https://nautechsystems.io +// +// Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ------------------------------------------------------------------------------------------------- + +// Under development +#![allow(dead_code)] +#![allow(unused_variables)] + +use std::{cell::RefCell, rc::Rc}; + +use nautilus_common::{ + cache::Cache, + messages::data::{DataRequest, Payload}, + msgbus::MessageBus, +}; +use nautilus_core::{nanos::UnixNanos, uuid::UUID4}; +use nautilus_data::client::DataClient; +use nautilus_model::{ + data::{ + bar::{Bar, BarType}, + quote::QuoteTick, + trade::TradeTick, + DataType, + }, + enums::BookType, + identifiers::{ClientId, InstrumentId, Venue}, + instruments::any::InstrumentAny, +}; + +pub struct BacktestDataClient { + cache: Rc>, + msgbus: Rc>, + pub client_id: ClientId, + pub venue: Venue, +} + +impl DataClient for BacktestDataClient { + fn client_id(&self) -> ClientId { + self.client_id + } + fn venue(&self) -> Option { + Some(self.venue) + } + + fn start(&self) {} + fn stop(&self) {} + fn reset(&self) {} + fn dispose(&self) {} + fn is_connected(&self) -> bool { + true + } + fn is_disconnected(&self) -> bool { + false + } + + // -- COMMAND HANDLERS --------------------------------------------------------------------------- + + /// Parse command and call specific function + fn subscribe(&mut self, _data_type: DataType) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_instruments(&mut self, _venue: Option) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_instrument(&mut self, _instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_order_book_deltas( + &mut self, + _instrument_id: InstrumentId, + _book_type: BookType, + _depth: Option, + ) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_order_book_snapshots( + &mut self, + instrument_id: InstrumentId, + book_type: BookType, + depth: Option, + ) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_quote_ticks(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_trade_ticks(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_bars(&mut self, bar_type: BarType) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_instrument_status(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_instrument_close(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe(&mut self, data_type: DataType) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_instruments(&mut self, venue: Option) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_instrument(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_order_book_deltas(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_order_book_snapshots( + &mut self, + instrument_id: InstrumentId, + ) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_quote_ticks(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_trade_ticks(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_bars(&mut self, bar_type: BarType) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_instrument_status(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_instrument_close(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + // -- DATA REQUEST HANDLERS --------------------------------------------------------------------------- + + fn request_data(&self, request: DataRequest) { + todo!() + } + + fn request_instruments( + &self, + correlation_id: UUID4, + venue: Venue, + start: Option, + end: Option, + ) -> Vec { + todo!() + } + + fn request_instrument( + &self, + correlation_id: UUID4, + instrument_id: InstrumentId, + start: Option, + end: Option, + ) -> InstrumentAny { + todo!() + } + + // TODO: figure out where to call this and it's return type + fn request_order_book_snapshot( + &self, + correlation_id: UUID4, + instrument_id: InstrumentId, + depth: Option, + ) -> Payload { + todo!() + } + + fn request_quote_ticks( + &self, + correlation_id: UUID4, + instrument_id: InstrumentId, + start: Option, + end: Option, + limit: Option, + ) -> Vec { + todo!() + } + + fn request_trade_ticks( + &self, + correlation_id: UUID4, + instrument_id: InstrumentId, + start: Option, + end: Option, + limit: Option, + ) -> Vec { + todo!() + } + + fn request_bars( + &self, + correlation_id: UUID4, + bar_type: BarType, + start: Option, + end: Option, + limit: Option, + ) -> Vec { + todo!() + } +} diff --git a/nautilus_core/backtest/src/exchange.rs b/nautilus_core/backtest/src/exchange.rs new file mode 100644 index 000000000000..97d459d8d1e8 --- /dev/null +++ b/nautilus_core/backtest/src/exchange.rs @@ -0,0 +1,14 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +// https://nautechsystems.io +// +// Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ------------------------------------------------------------------------------------------------- diff --git a/nautilus_core/backtest/src/lib.rs b/nautilus_core/backtest/src/lib.rs index 67b6560fe7b1..f47310d049e4 100644 --- a/nautilus_core/backtest/src/lib.rs +++ b/nautilus_core/backtest/src/lib.rs @@ -27,6 +27,8 @@ //! - `ffi`: Enables the C foreign function interface (FFI) from `cbindgen` //! - `python`: Enables Python bindings from `pyo3` +pub mod data_client; pub mod engine; +pub mod exchange; pub mod matching_engine; pub mod models; diff --git a/nautilus_core/backtest/src/matching_engine.rs b/nautilus_core/backtest/src/matching_engine.rs index ec055cb7a003..d5a9b4e8a234 100644 --- a/nautilus_core/backtest/src/matching_engine.rs +++ b/nautilus_core/backtest/src/matching_engine.rs @@ -329,15 +329,12 @@ impl OrderMatchingEngine { fn generate_trade_id(&mut self) -> TradeId { self.execution_count += 1; - TradeId::new(self.generate_trade_id_str().as_str()).unwrap() - } - - fn generate_trade_id_str(&self) -> Ustr { - if self.config.use_random_ids { - UUID4::new().to_string().into() + let trade_id = if self.config.use_random_ids { + UUID4::new().to_string() } else { - format!("{}-{}-{}", self.venue, self.raw_id, self.execution_count).into() - } + format!("{}-{}-{}", self.venue, self.raw_id, self.execution_count) + }; + TradeId::from(trade_id.as_str()) } // -- EVENT GENERATORS ----------------------------------------------------- @@ -363,7 +360,10 @@ impl OrderMatchingEngine { ) .unwrap(), ); - self.msgbus.send("ExecEngine.process", &event as &dyn Any); + self.msgbus.send( + &self.msgbus.switchboard.exec_engine_process, + &event as &dyn Any, + ); } fn generate_order_accepted(&self, order: &OrderAny, venue_order_id: VenueOrderId) { @@ -386,7 +386,10 @@ impl OrderMatchingEngine { ) .unwrap(), ); - self.msgbus.send("ExecEngine.process", &event as &dyn Any); + self.msgbus.send( + &self.msgbus.switchboard.exec_engine_process, + &event as &dyn Any, + ); } #[allow(clippy::too_many_arguments)] @@ -417,7 +420,10 @@ impl OrderMatchingEngine { ) .unwrap(), ); - self.msgbus.send("ExecEngine.process", &event as &dyn Any); + self.msgbus.send( + &self.msgbus.switchboard.exec_engine_process, + &event as &dyn Any, + ); } #[allow(clippy::too_many_arguments)] @@ -448,7 +454,10 @@ impl OrderMatchingEngine { ) .unwrap(), ); - self.msgbus.send("ExecEngine.process", &event as &dyn Any); + self.msgbus.send( + &self.msgbus.switchboard.exec_engine_process, + &event as &dyn Any, + ); } fn generate_order_updated( @@ -477,7 +486,10 @@ impl OrderMatchingEngine { ) .unwrap(), ); - self.msgbus.send("ExecEngine.process", &event as &dyn Any); + self.msgbus.send( + &self.msgbus.switchboard.exec_engine_process, + &event as &dyn Any, + ); } fn generate_order_canceled(&self, order: &OrderAny, venue_order_id: VenueOrderId) { @@ -497,7 +509,10 @@ impl OrderMatchingEngine { ) .unwrap(), ); - self.msgbus.send("ExecEngine.process", &event as &dyn Any); + self.msgbus.send( + &self.msgbus.switchboard.exec_engine_process, + &event as &dyn Any, + ); } fn generate_order_triggered(&self, order: &OrderAny) { @@ -517,7 +532,10 @@ impl OrderMatchingEngine { ) .unwrap(), ); - self.msgbus.send("ExecEngine.process", &event as &dyn Any); + self.msgbus.send( + &self.msgbus.switchboard.exec_engine_process, + &event as &dyn Any, + ); } fn generate_order_expired(&self, order: &OrderAny) { @@ -537,7 +555,10 @@ impl OrderMatchingEngine { ) .unwrap(), ); - self.msgbus.send("ExecEngine.process", &event as &dyn Any); + self.msgbus.send( + &self.msgbus.switchboard.exec_engine_process, + &event as &dyn Any, + ); } #[allow(clippy::too_many_arguments)] @@ -580,7 +601,10 @@ impl OrderMatchingEngine { ) .unwrap(), ); - self.msgbus.send("ExecEngine.process", &event as &dyn Any); + self.msgbus.send( + &self.msgbus.switchboard.exec_engine_process, + &event as &dyn Any, + ); } } diff --git a/nautilus_core/common/src/cache/mod.rs b/nautilus_core/common/src/cache/mod.rs index d1e36c9378ab..0f5e762153b1 100644 --- a/nautilus_core/common/src/cache/mod.rs +++ b/nautilus_core/common/src/cache/mod.rs @@ -224,14 +224,17 @@ pub struct Cache { impl Default for Cache { /// Creates a new default [`Cache`] instance. fn default() -> Self { - Self::new(CacheConfig::default(), None) + Self::new(Some(CacheConfig::default()), None) } } impl Cache { /// Creates a new [`Cache`] instance. #[must_use] - pub fn new(config: CacheConfig, database: Option>) -> Self { + pub fn new( + config: Option, + database: Option>, + ) -> Self { let index = CacheIndex { venue_account: HashMap::new(), venue_orders: HashMap::new(), @@ -264,7 +267,7 @@ impl Cache { }; Self { - config, + config: config.unwrap_or_default(), index, database, general: HashMap::new(), diff --git a/nautilus_core/common/src/lib.rs b/nautilus_core/common/src/lib.rs index f9aec2e8bb28..da9d2ddbdb5f 100644 --- a/nautilus_core/common/src/lib.rs +++ b/nautilus_core/common/src/lib.rs @@ -47,6 +47,5 @@ pub mod xrate; #[cfg(feature = "ffi")] pub mod ffi; -pub mod client; #[cfg(feature = "python")] pub mod python; diff --git a/nautilus_core/common/src/messages/data.rs b/nautilus_core/common/src/messages/data.rs index 1efdc0e64551..951049fc4cab 100644 --- a/nautilus_core/common/src/messages/data.rs +++ b/nautilus_core/common/src/messages/data.rs @@ -67,6 +67,7 @@ pub enum Action { Unsubscribe, } +#[derive(Debug, Clone)] pub struct SubscriptionCommand { pub client_id: ClientId, pub venue: Venue, @@ -76,6 +77,26 @@ pub struct SubscriptionCommand { pub ts_init: UnixNanos, } +impl SubscriptionCommand { + pub fn new( + client_id: ClientId, + venue: Venue, + data_type: DataType, + action: Action, + command_id: UUID4, + ts_init: UnixNanos, + ) -> Self { + Self { + client_id, + venue, + data_type, + action, + command_id, + ts_init, + } + } +} + pub enum DataEngineRequest { Request(DataRequest), SubscriptionCommand(SubscriptionCommand), diff --git a/nautilus_core/common/src/msgbus/handler.rs b/nautilus_core/common/src/msgbus/handler.rs new file mode 100644 index 000000000000..d90547d393ad --- /dev/null +++ b/nautilus_core/common/src/msgbus/handler.rs @@ -0,0 +1,42 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +// https://nautechsystems.io +// +// Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ------------------------------------------------------------------------------------------------- + +use std::{any::Any, rc::Rc}; + +use nautilus_model::data::Data; +use ustr::Ustr; + +use crate::messages::data::DataResponse; + +pub trait MessageHandler: Any { + fn id(&self) -> Ustr; + fn handle(&self, message: &dyn Any); + fn handle_response(&self, resp: DataResponse); + fn handle_data(&self, data: Data); + fn as_any(&self) -> &dyn Any; +} + +#[derive(Clone)] +#[repr(transparent)] +pub struct ShareableMessageHandler(pub Rc); + +impl From> for ShareableMessageHandler { + fn from(value: Rc) -> Self { + Self(value) + } +} + +// Message handlers are not expected to be sent across thread boundaries +unsafe impl Send for ShareableMessageHandler {} diff --git a/nautilus_core/common/src/msgbus/mod.rs b/nautilus_core/common/src/msgbus/mod.rs index 3462e07f8874..c64fb80747ff 100644 --- a/nautilus_core/common/src/msgbus/mod.rs +++ b/nautilus_core/common/src/msgbus/mod.rs @@ -16,52 +16,28 @@ //! A common in-memory `MessageBus` for loosely coupled message passing patterns. pub mod database; +pub mod handler; pub mod stubs; +pub mod switchboard; use std::{ any::Any, collections::HashMap, fmt::Debug, hash::{Hash, Hasher}, - rc::Rc, }; +use handler::ShareableMessageHandler; use indexmap::IndexMap; use nautilus_core::uuid::UUID4; -use nautilus_model::{ - data::Data, - identifiers::{ClientId, TraderId, Venue}, -}; +use nautilus_model::{data::Data, identifiers::TraderId}; +use switchboard::MessagingSwitchboard; use ustr::Ustr; -use crate::{ - client::DataClientAdapter, - messages::data::{DataRequest, DataResponse, SubscriptionCommand}, -}; +use crate::messages::data::DataResponse; pub const CLOSE_TOPIC: &str = "CLOSE"; -pub trait MessageHandler: Any { - fn id(&self) -> Ustr; - fn handle(&self, message: &dyn Any); - fn handle_response(&self, resp: DataResponse); - fn handle_data(&self, resp: &Data); - fn as_any(&self) -> &dyn Any; -} - -#[derive(Clone)] -#[repr(transparent)] -pub struct ShareableMessageHandler(pub Rc); - -impl From> for ShareableMessageHandler { - fn from(value: Rc) -> Self { - Self(value) - } -} - -// Message handlers are not expected to be sent across thread boundaries -unsafe impl Send for ShareableMessageHandler {} - // Represents a subscription to a particular topic. // // This is an internal class intended to be used by the message bus to organize @@ -172,6 +148,8 @@ pub struct MessageBus { pub name: String, /// If the message bus is backed by a database. pub has_backing: bool, + /// The switchboard for built-in endpoints. + pub switchboard: MessagingSwitchboard, /// Mapping from topic to the corresponding handler /// a topic can be a string with wildcards /// * '?' - any character @@ -182,9 +160,6 @@ pub struct MessageBus { patterns: IndexMap>, /// Handles a message or a request destined for a specific endpoint. endpoints: IndexMap, - /// Handles data and subscriptions requests for a specific data client - pub clients: IndexMap, - routing_map: HashMap, } /// Message bus is not meant to be passed between threads @@ -203,11 +178,10 @@ impl MessageBus { trader_id, instance_id, name: name.unwrap_or(stringify!(MessageBus).to_owned()), + switchboard: MessagingSwitchboard::default(), subscriptions: IndexMap::new(), patterns: IndexMap::new(), endpoints: IndexMap::new(), - clients: IndexMap::new(), - routing_map: HashMap::new(), has_backing: false, } } @@ -269,38 +243,6 @@ impl MessageBus { Ok(()) } - /// Registers a new [`DataClientAdapter`] - pub fn register_client(&mut self, client: DataClientAdapter, routing: Option) { - if let Some(routing) = routing { - self.routing_map.insert(routing, client.client_id()); - log::info!("Set client {} routing for {routing}", client.client_id()); - } - - log::info!("Registered client {}", client.client_id()); - self.clients.insert(client.client_id, client); - } - - /// Deregisters a [`DataClientAdapter`] - pub fn deregister_client(&mut self, client_id: &ClientId) { - // TODO: We could return a `Result` but then this is part of system wiring and instead of - // propagating results all over the place it may be cleaner to just immediately fail - // for these sorts of design-time errors? - // correctness::check_key_in_map(&client_id, &self.clients, "client_id", "clients").unwrap(); - - self.clients.shift_remove(client_id); - log::info!("Deregistered client {client_id}"); - } - - fn get_client(&self, client_id: &ClientId, venue: Venue) -> Option<&DataClientAdapter> { - match self.clients.get(client_id) { - Some(client) => Some(client), - None => self - .routing_map - .get(&venue) - .and_then(|client_id: &ClientId| self.clients.get(client_id)), - } - } - /// Registers the given `handler` for the `endpoint` address. pub fn register(&mut self, endpoint: &str, handler: ShareableMessageHandler) { // Updates value if key already exists @@ -400,8 +342,8 @@ impl MessageBus { } /// Sends a message to an endpoint. - pub fn send(&self, endpoint: &str, message: &dyn Any) { - if let Some(handler) = self.get_endpoint(&Ustr::from(endpoint)) { + pub fn send(&self, endpoint: &Ustr, message: &dyn Any) { + if let Some(handler) = self.get_endpoint(endpoint) { handler.0.handle(message); } } @@ -417,22 +359,22 @@ impl MessageBus { } } -/// Data specific functions +/// Data specific functions. impl MessageBus { - /// Send a [`DataRequest`] to an endpoint that must be a data client implementation. - pub fn send_data_request(&self, message: DataRequest) { - // TODO: log error - if let Some(client) = self.get_client(&message.client_id, message.venue) { - let _ = client.request(message); - } - } - - /// Send a [`SubscriptionCommand`] to an endpoint that must be a data client implementation. - pub fn send_subscription_command(&self, message: SubscriptionCommand) { - if let Some(client) = self.get_client(&message.client_id, message.venue) { - client.through_execute(message); - } - } + // /// Send a [`DataRequest`] to an endpoint that must be a data client implementation. + // pub fn send_data_request(&self, message: DataRequest) { + // // TODO: log error + // if let Some(client) = self.get_client(&message.client_id, message.venue) { + // let _ = client.request(message); + // } + // } + // + // /// Send a [`SubscriptionCommand`] to an endpoint that must be a data client implementation. + // pub fn send_subscription_command(&self, message: SubscriptionCommand) { + // if let Some(client) = self.get_client(&message.client_id, message.venue) { + // client.through_execute(message); + // } + // } /// Send a [`DataResponse`] to an endpoint that must be an actor. pub fn send_response(&self, message: DataResponse) { @@ -447,7 +389,7 @@ impl MessageBus { let matching_subs = self.matching_subscriptions(&topic); for sub in matching_subs { - sub.handler.0.handle_data(&message); + sub.handler.0.handle_data(message.clone()); } } } @@ -566,13 +508,13 @@ mod tests { #[rstest] fn test_endpoint_send() { let mut msgbus = stub_msgbus(); - let endpoint = "MyEndpoint"; + let endpoint = Ustr::from("MyEndpoint"); let handler_id = Ustr::from("1"); let handler = get_call_check_shareable_handler(handler_id); - msgbus.register(endpoint, handler.clone()); - assert!(msgbus.get_endpoint(&Ustr::from(endpoint)).is_some()); + msgbus.register(endpoint.as_str(), handler.clone()); + assert!(msgbus.get_endpoint(&endpoint).is_some()); // check if the handler called variable is false assert!(!handler @@ -584,7 +526,7 @@ mod tests { .was_called()); // Send a message to the endpoint - msgbus.send(endpoint, &"Test Message"); + msgbus.send(&endpoint, &"Test Message"); // Check if the handler was called assert!(handler diff --git a/nautilus_core/common/src/msgbus/stubs.rs b/nautilus_core/common/src/msgbus/stubs.rs index 90ee7cc24176..fee86d36a0dd 100644 --- a/nautilus_core/common/src/msgbus/stubs.rs +++ b/nautilus_core/common/src/msgbus/stubs.rs @@ -29,7 +29,7 @@ use ustr::Ustr; use crate::{ messages::data::DataResponse, - msgbus::{MessageHandler, ShareableMessageHandler}, + msgbus::{handler::MessageHandler, ShareableMessageHandler}, }; // Stub message handler which logs the data it receives @@ -49,7 +49,7 @@ impl MessageHandler for StubMessageHandler { fn handle_response(&self, _resp: DataResponse) {} - fn handle_data(&self, _resp: &Data) {} + fn handle_data(&self, _resp: Data) {} fn as_any(&self) -> &dyn Any { self @@ -91,7 +91,7 @@ impl MessageHandler for CallCheckMessageHandler { fn handle_response(&self, _resp: DataResponse) {} - fn handle_data(&self, _resp: &Data) {} + fn handle_data(&self, _resp: Data) {} fn as_any(&self) -> &dyn Any { self @@ -135,7 +135,7 @@ impl MessageHandler for MessageSavingHandler { fn handle_response(&self, _resp: DataResponse) {} - fn handle_data(&self, _resp: &Data) {} + fn handle_data(&self, _resp: Data) {} fn as_any(&self) -> &dyn Any { self diff --git a/nautilus_core/common/src/msgbus/switchboard.rs b/nautilus_core/common/src/msgbus/switchboard.rs new file mode 100644 index 000000000000..f883344a72e7 --- /dev/null +++ b/nautilus_core/common/src/msgbus/switchboard.rs @@ -0,0 +1,36 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +// https://nautechsystems.io +// +// Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ------------------------------------------------------------------------------------------------- + +use ustr::Ustr; + +/// Represents a switchboard of built-in messaging endpoint names. +#[derive(Clone, Debug)] +pub struct MessagingSwitchboard { + pub data_engine_execute: Ustr, + pub data_engine_process: Ustr, + pub exec_engine_execute: Ustr, + pub exec_engine_process: Ustr, +} + +impl Default for MessagingSwitchboard { + fn default() -> Self { + Self { + data_engine_execute: Ustr::from("DataEngine.execute"), + data_engine_process: Ustr::from("DataEngine.process"), + exec_engine_execute: Ustr::from("ExecEngine.execute"), + exec_engine_process: Ustr::from("ExecEngine.process"), + } + } +} diff --git a/nautilus_core/common/src/python/handler.rs b/nautilus_core/common/src/python/handler.rs index fd5bb9f0868d..b328ea7918dc 100644 --- a/nautilus_core/common/src/python/handler.rs +++ b/nautilus_core/common/src/python/handler.rs @@ -19,7 +19,7 @@ use nautilus_model::data::Data; use pyo3::prelude::*; use ustr::Ustr; -use crate::{messages::data::DataResponse, msgbus::MessageHandler}; +use crate::{messages::data::DataResponse, msgbus::handler::MessageHandler}; #[derive(Clone)] #[cfg_attr( @@ -66,7 +66,7 @@ impl MessageHandler for PythonMessageHandler { } } - fn handle_data(&self, resp: &Data) { + fn handle_data(&self, data: Data) { let py_event = (); let result = pyo3::Python::with_gil(|py| self.handler.call_method1(py, "handle", (py_event,))); diff --git a/nautilus_core/common/src/python/msgbus.rs b/nautilus_core/common/src/python/msgbus.rs index 62943b4a8633..e1d0fa7eadb4 100644 --- a/nautilus_core/common/src/python/msgbus.rs +++ b/nautilus_core/common/src/python/msgbus.rs @@ -19,7 +19,7 @@ use pyo3::{pymethods, PyObject, PyRef, PyRefMut}; use ustr::Ustr; use super::handler::PythonMessageHandler; -use crate::msgbus::{database::BusMessage, MessageBus, ShareableMessageHandler}; +use crate::msgbus::{database::BusMessage, handler::ShareableMessageHandler, MessageBus}; #[pymethods] impl BusMessage { diff --git a/nautilus_core/common/src/client.rs b/nautilus_core/data/src/client.rs similarity index 95% rename from nautilus_core/common/src/client.rs rename to nautilus_core/data/src/client.rs index c05c8629d610..efbfb4841fa8 100644 --- a/nautilus_core/common/src/client.rs +++ b/nautilus_core/data/src/client.rs @@ -26,26 +26,24 @@ use std::{ }; use indexmap::IndexMap; +use nautilus_common::{ + clock::Clock, + messages::data::{Action, DataRequest, DataResponse, Payload, SubscriptionCommand}, +}; use nautilus_core::{nanos::UnixNanos, uuid::UUID4}; use nautilus_model::{ data::{ bar::{Bar, BarType}, quote::QuoteTick, trade::TradeTick, - Data, DataType, + DataType, }, enums::BookType, identifiers::{ClientId, InstrumentId, Venue}, instruments::any::InstrumentAny, }; -use crate::{ - clock::Clock, - messages::data::{Action, DataRequest, DataResponse, Payload, SubscriptionCommand}, -}; - -pub trait LiveDataClient { - // -- GETTERS --------------------------------------------------------------------------- +pub trait DataClient { fn client_id(&self) -> ClientId; fn venue(&self) -> Option; fn start(&self); @@ -55,9 +53,10 @@ pub trait LiveDataClient { fn is_connected(&self) -> bool; fn is_disconnected(&self) -> bool; + // TODO: Move to separate trait // A [`LiveDataClient`] must have two channels to send back data and data responses - fn get_response_data_channel(&self) -> tokio::sync::mpsc::UnboundedSender; - fn get_subscriber_data_channel(&self) -> tokio::sync::mpsc::UnboundedSender; + // fn get_response_data_channel(&self) -> tokio::sync::mpsc::UnboundedSender; + // fn get_subscriber_data_channel(&self) -> tokio::sync::mpsc::UnboundedSender; // -- COMMAND HANDLERS --------------------------------------------------------------------------- @@ -147,10 +146,10 @@ pub trait LiveDataClient { } pub struct DataClientAdapter { + client: Box, + clock: Box, pub client_id: ClientId, pub venue: Venue, - client: Box, - clock: Box, pub subscriptions_generic: HashSet, pub subscriptions_order_book_delta: HashSet, pub subscriptions_order_book_snapshot: HashSet, @@ -164,7 +163,7 @@ pub struct DataClientAdapter { } impl Deref for DataClientAdapter { - type Target = Box; + type Target = Box; fn deref(&self) -> &Self::Target { &self.client @@ -178,6 +177,30 @@ impl DerefMut for DataClientAdapter { } impl DataClientAdapter { + pub fn new( + client_id: ClientId, + venue: Venue, + client: Box, + clock: Box, + ) -> Self { + Self { + client, + clock, + client_id, + venue, + subscriptions_generic: HashSet::new(), + subscriptions_order_book_delta: HashSet::new(), + subscriptions_order_book_snapshot: HashSet::new(), + subscriptions_quote_tick: HashSet::new(), + subscriptions_trade_tick: HashSet::new(), + subscriptions_bar: HashSet::new(), + subscriptions_instrument_status: HashSet::new(), + subscriptions_instrument_close: HashSet::new(), + subscriptions_instrument: HashSet::new(), + subscriptions_instrument_venue: HashSet::new(), + } + } + /// TODO: Decide whether to use mut references for subscription commands pub fn through_execute(&self, command: SubscriptionCommand) {} diff --git a/nautilus_core/data/src/engine/mod.rs b/nautilus_core/data/src/engine/mod.rs index 00dc8096284a..31db4e85e710 100644 --- a/nautilus_core/data/src/engine/mod.rs +++ b/nautilus_core/data/src/engine/mod.rs @@ -25,21 +25,18 @@ use std::{ any::Any, cell::RefCell, collections::{HashMap, HashSet}, - marker::PhantomData, ops::Deref, rc::Rc, sync::Arc, }; +use indexmap::IndexMap; use nautilus_common::{ cache::Cache, - client::DataClientAdapter, clock::Clock, - component::{Disposed, PreInitialized, Ready, Running, Starting, State, Stopped, Stopping}, - enums::ComponentState, logging::{RECV, RES}, - messages::data::DataResponse, - msgbus::MessageBus, + messages::data::{DataRequest, DataResponse, SubscriptionCommand}, + msgbus::{handler::MessageHandler, MessageBus}, }; use nautilus_model::{ data::{ @@ -51,11 +48,12 @@ use nautilus_model::{ trade::TradeTick, Data, DataType, }, - identifiers::{ClientId, InstrumentId}, + identifiers::{ClientId, InstrumentId, Venue}, instruments::{any::InstrumentAny, synthetic::SyntheticInstrument}, }; +use ustr::Ustr; -use crate::aggregation::BarAggregator; +use crate::{aggregation::BarAggregator, client::DataClientAdapter}; pub struct DataEngineConfig { pub time_bars_build_with_no_updates: bool, @@ -63,16 +61,31 @@ pub struct DataEngineConfig { pub time_bars_interval_type: String, // Make this an enum `BarIntervalType` pub validate_data_sequence: bool, pub buffer_deltas: bool, - pub external_clients: Vec, + pub external_clients: Option>, pub debug: bool, } -pub struct DataEngine { - state: PhantomData, +impl Default for DataEngineConfig { + fn default() -> Self { + Self { + time_bars_build_with_no_updates: true, + time_bars_timestamp_on_close: true, + time_bars_interval_type: "left_open".to_string(), // Make this an enum `BarIntervalType` + validate_data_sequence: false, + buffer_deltas: false, + external_clients: None, + debug: false, + } + } +} + +pub struct DataEngine { clock: Box, cache: Rc>, msgbus: Rc>, + clients: IndexMap, default_client: Option, + routing_map: IndexMap, // order_book_intervals: HashMap<(InstrumentId, usize), Vec>, // TODO bar_aggregators: Vec>, // TODO: dyn for now synthetic_quote_feeds: HashMap>, @@ -87,65 +100,78 @@ impl DataEngine { clock: Box, cache: Rc>, msgbus: Rc>, - config: DataEngineConfig, + config: Option, ) -> Self { Self { - state: PhantomData::, clock, cache, + msgbus, + clients: IndexMap::new(), + routing_map: IndexMap::new(), default_client: None, bar_aggregators: Vec::new(), synthetic_quote_feeds: HashMap::new(), synthetic_trade_feeds: HashMap::new(), buffered_deltas_map: HashMap::new(), - config, - msgbus, + config: config.unwrap_or_default(), } } } -impl DataEngine { - fn transition(self) -> DataEngine { - DataEngine { - state: PhantomData, - clock: self.clock, - cache: self.cache, - default_client: self.default_client, - bar_aggregators: self.bar_aggregators, - synthetic_quote_feeds: self.synthetic_quote_feeds, - synthetic_trade_feeds: self.synthetic_trade_feeds, - buffered_deltas_map: self.buffered_deltas_map, - config: self.config, - msgbus: self.msgbus, - } +impl DataEngine { + // pub fn register_catalog(&mut self, catalog: ParquetDataCatalog) {} TODO: Implement catalog + + /// Register the given data `client` with the engine as the default routing client. + /// + /// When a specific venue routing cannot be found, this client will receive messages. + /// + /// # Warnings + /// + /// Any existing default routing client will be overwritten. + /// TODO: change this to suit message bus behaviour + pub fn register_default_client(&mut self, client: DataClientAdapter) { + log::info!("Registered default client {}", client.client_id()); + self.default_client = Some(client); } - #[must_use] - pub fn state(&self) -> ComponentState { - S::state() + pub fn start(self) { + self.clients.values().for_each(|client| client.start()); + } + + pub fn stop(self) { + self.clients.values().for_each(|client| client.stop()); + } + + pub fn reset(self) { + self.clients.values().for_each(|client| client.reset()); + } + + pub fn dispose(mut self) { + self.clients.values().for_each(|client| client.dispose()); + self.clock.cancel_timers(); + } + + pub fn connect(&self) { + todo!() // Implement actual client connections for a live/sandbox context + } + + pub fn disconnect(&self) { + todo!() // Implement actual client connections for a live/sandbox context } #[must_use] pub fn check_connected(&self) -> bool { - self.msgbus - .borrow() - .clients - .values() - .all(|client| client.is_connected()) + self.clients.values().all(|client| client.is_connected()) } #[must_use] pub fn check_disconnected(&self) -> bool { - self.msgbus - .borrow() - .clients - .values() - .all(|client| !client.is_connected()) + self.clients.values().all(|client| !client.is_connected()) } #[must_use] pub fn registed_clients(&self) -> Vec { - self.msgbus.borrow().clients.keys().copied().collect() + self.clients.keys().copied().collect() } // -- SUBSCRIPTIONS --------------------------------------------------------------------------- @@ -156,7 +182,7 @@ impl DataEngine { T: Clone, { let mut subs = Vec::new(); - for client in self.msgbus.borrow().clients.values() { + for client in self.clients.values() { subs.extend(get_subs(client).iter().cloned()); } subs @@ -206,91 +232,80 @@ impl DataEngine { pub fn subscribed_instrument_close(&self) -> Vec { self.collect_subscriptions(|client| &client.subscriptions_instrument_close) } -} - -impl DataEngine { - // pub fn register_catalog(&mut self, catalog: ParquetDataCatalog) {} TODO: Implement catalog - /// Register the given data `client` with the engine as the default routing client. - /// - /// When a specific venue routing cannot be found, this client will receive messages. - /// - /// # Warnings - /// - /// Any existing default routing client will be overwritten. - /// TODO: change this to suit message bus behaviour - pub fn register_default_client(&mut self, client: DataClientAdapter) { - log::info!("Registered default client {}", client.client_id()); - self.default_client = Some(client); + pub fn on_start(self) { + todo!() } - fn initialize(self) -> DataEngine { - self.transition() + pub fn on_stop(self) { + todo!() } -} -impl DataEngine { - #[must_use] - pub fn start(self) -> DataEngine { - self.msgbus - .borrow() - .clients - .values() - .for_each(|client| client.start()); - self.transition() - } + /// Registers a new [`DataClientAdapter`] + pub fn register_client(&mut self, client: DataClientAdapter, routing: Option) { + if let Some(routing) = routing { + self.routing_map.insert(routing, client.client_id()); + log::info!("Set client {} routing for {routing}", client.client_id()); + } - #[must_use] - pub fn stop(self) -> DataEngine { - self.msgbus - .borrow() - .clients - .values() - .for_each(|client| client.stop()); - self.transition() + log::info!("Registered client {}", client.client_id()); + self.clients.insert(client.client_id, client); } - #[must_use] - pub fn reset(self) -> Self { - self.msgbus - .borrow() - .clients - .values() - .for_each(|client| client.reset()); - self.transition() - } + /// Deregisters a [`DataClientAdapter`] + pub fn deregister_client(&mut self, client_id: &ClientId) { + // TODO: We could return a `Result` but then this is part of system wiring and instead of + // propagating results all over the place it may be cleaner to just immediately fail + // for these sorts of design-time errors? + // correctness::check_key_in_map(&client_id, &self.clients, "client_id", "clients").unwrap(); - #[must_use] - pub fn dispose(mut self) -> DataEngine { - self.msgbus - .borrow() - .clients - .values() - .for_each(|client| client.dispose()); - self.clock.cancel_timers(); - self.transition() + self.clients.shift_remove(client_id); + log::info!("Deregistered client {client_id}"); } -} -impl DataEngine { - #[must_use] - pub fn on_start(self) -> DataEngine { - self.transition() + fn get_client(&self, client_id: &ClientId, venue: &Venue) -> Option<&DataClientAdapter> { + match self.clients.get(client_id) { + Some(client) => Some(client), + None => self + .routing_map + .get(venue) + .and_then(|client_id: &ClientId| self.clients.get(client_id)), + } } -} -impl DataEngine { - pub fn connect(&self) { - todo!() // Implement actual client connections for a live/sandbox context + /// Send a [`DataRequest`] to an endpoint that must be a data client implementation. + pub fn execute(&mut self, msg: &dyn Any) { + // TODO: log error + if let Some(cmd) = msg.downcast_ref::() { + if let Some(client) = self.clients.get_mut(&cmd.client_id) { + client.execute(cmd.clone()) + } else { + log::error!( + "Cannot handle command: no client found for {}", + cmd.client_id + ); + } + } } - pub fn disconnect(&self) { - todo!() // Implement actual client connections for a live/sandbox context + pub fn request(&self, req: DataRequest) { + if let Some(client) = self.clients.get(&req.client_id) { + // TODO: We don't immediately need the response + let _ = client.request(req); + } else { + log::error!( + "Cannot handle request: no client found for {}", + req.client_id + ); + } } - #[must_use] - pub fn stop(self) -> DataEngine { - self.transition() + /// TODO: Probably not required + /// Send a [`SubscriptionCommand`] to an endpoint that must be a data client implementation. + pub fn send_subscription_command(&self, message: SubscriptionCommand) { + if let Some(client) = self.get_client(&message.client_id, &message.venue) { + client.through_execute(message); + } } pub fn process(&self, data: Data) { @@ -497,25 +512,6 @@ impl DataEngine { } } -impl DataEngine { - #[must_use] - pub fn on_stop(self) -> DataEngine { - self.transition() - } -} - -impl DataEngine { - #[must_use] - pub fn reset(self) -> DataEngine { - self.transition() - } - - #[must_use] - pub fn dispose(self) -> DataEngine { - self.transition() - } -} - // TODO: Potentially move these pub fn get_instrument_publish_topic(instrument: &InstrumentAny) -> String { let instrument_id = instrument.id(); @@ -563,3 +559,98 @@ pub fn get_trade_publish_topic(trade: &TradeTick) -> String { pub fn get_bar_publish_topic(bar: &Bar) -> String { format!("data.bars.{}", bar.bar_type) } + +pub struct SubscriptionCommandHandler { + id: Ustr, + data_engine: Rc>, +} + +impl MessageHandler for SubscriptionCommandHandler { + fn id(&self) -> Ustr { + self.id + } + + fn handle(&self, message: &dyn Any) { + self.data_engine.borrow_mut().execute(message) + } + fn handle_response(&self, _resp: DataResponse) {} + fn handle_data(&self, _resp: Data) {} + fn as_any(&self) -> &dyn Any { + self + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Tests +//////////////////////////////////////////////////////////////////////////////// +#[cfg(test)] +mod tests { + use indexmap::indexmap; + use nautilus_common::{ + clock::TestClock, messages::data::Action, msgbus::handler::ShareableMessageHandler, + }; + use nautilus_core::{nanos::UnixNanos, uuid::UUID4}; + use nautilus_model::{ + identifiers::TraderId, + instruments::{currency_pair::CurrencyPair, stubs::audusd_sim}, + }; + use rstest::rstest; + + use super::*; + use crate::mocks::MockDataClient; + + #[rstest] + fn test_execute_subscribe_instruments(audusd_sim: CurrencyPair) { + // TODO: Cleanup test and provide more stubs + let trader_id = TraderId::from("TESTER-001"); + let clock = Box::new(TestClock::new()); + let cache = Rc::new(RefCell::new(Cache::default())); + let msgbus = Rc::new(RefCell::new(MessageBus::new( + trader_id, + UUID4::new(), + None, + None, + ))); + let switchboard = msgbus.borrow().switchboard.clone(); + let data_engine = DataEngine::new(clock, cache.clone(), msgbus.clone(), None); + let data_engine = Rc::new(RefCell::new(data_engine)); + + let client_id = ClientId::from("SIM"); + let venue = Venue::from("SIM"); + let client = Box::new(MockDataClient::new( + cache.clone(), + msgbus.clone(), + client_id, + venue, + )); + + let client = DataClientAdapter::new(client_id, venue, client, Box::new(TestClock::new())); + data_engine.borrow_mut().register_client(client, None); + + let metadata = indexmap! { + "instrument_id".to_string() => audusd_sim.id.to_string(), + }; + let data_type = DataType::new(stringify!(QuoteTick), Some(metadata)); + let cmd = SubscriptionCommand::new( + client_id, + venue, + data_type, + Action::Subscribe, + UUID4::new(), + UnixNanos::default(), + ); + + let endpoint = switchboard.data_engine_execute; + let handler = ShareableMessageHandler(Rc::new(SubscriptionCommandHandler { + id: switchboard.data_engine_process, + data_engine: data_engine.clone(), + })); + msgbus.borrow_mut().register(endpoint.as_str(), handler); + msgbus.borrow().send(&endpoint, &cmd as &dyn Any); + + assert!(data_engine + .borrow() + .subscribed_quote_ticks() + .contains(&audusd_sim.id)); + } +} diff --git a/nautilus_core/data/src/engine/runner.rs b/nautilus_core/data/src/engine/runner.rs index 83dab85e347c..eb3b9da9b806 100644 --- a/nautilus_core/data/src/engine/runner.rs +++ b/nautilus_core/data/src/engine/runner.rs @@ -15,10 +15,7 @@ use std::{cell::RefCell, collections::VecDeque, rc::Rc}; -use nautilus_common::{ - component::Running, - messages::data::{DataClientResponse, DataResponse}, -}; +use nautilus_common::messages::data::{DataClientResponse, DataResponse}; use tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender}; use super::DataEngine; @@ -27,7 +24,7 @@ pub trait Runner { type Sender; fn new() -> Self; - fn run(&mut self, engine: &DataEngine); + fn run(&mut self, engine: &DataEngine); fn get_sender(&self) -> Self::Sender; } @@ -35,21 +32,23 @@ pub trait SendResponse { fn send(&self, resp: DataResponse); } -pub struct LiveRunner { - resp_tx: UnboundedSender, - resp_rx: UnboundedReceiver, +pub type DataResponseQueue = Rc>>; + +pub struct BacktestRunner { + queue: DataResponseQueue, } -impl Runner for LiveRunner { - type Sender = UnboundedSender; +impl Runner for BacktestRunner { + type Sender = DataResponseQueue; fn new() -> Self { - let (resp_tx, resp_rx) = tokio::sync::mpsc::unbounded_channel::(); - Self { resp_tx, resp_rx } + Self { + queue: Rc::new(RefCell::new(VecDeque::new())), + } } - fn run(&mut self, engine: &DataEngine) { - while let Some(resp) = self.resp_rx.blocking_recv() { + fn run(&mut self, engine: &DataEngine) { + while let Some(resp) = self.queue.as_ref().borrow_mut().pop_front() { match resp { DataClientResponse::Response(resp) => engine.response(resp), DataClientResponse::Data(data) => engine.process(data), @@ -58,27 +57,25 @@ impl Runner for LiveRunner { } fn get_sender(&self) -> Self::Sender { - self.resp_tx.clone() + self.queue.clone() } } -pub type DataResponseQueue = Rc>>; - -pub struct BacktestRunner { - queue: DataResponseQueue, +pub struct LiveRunner { + resp_tx: UnboundedSender, + resp_rx: UnboundedReceiver, } -impl Runner for BacktestRunner { - type Sender = DataResponseQueue; +impl Runner for LiveRunner { + type Sender = UnboundedSender; fn new() -> Self { - Self { - queue: Rc::new(RefCell::new(VecDeque::new())), - } + let (resp_tx, resp_rx) = tokio::sync::mpsc::unbounded_channel::(); + Self { resp_tx, resp_rx } } - fn run(&mut self, engine: &DataEngine) { - while let Some(resp) = self.queue.as_ref().borrow_mut().pop_front() { + fn run(&mut self, engine: &DataEngine) { + while let Some(resp) = self.resp_rx.blocking_recv() { match resp { DataClientResponse::Response(resp) => engine.response(resp), DataClientResponse::Data(data) => engine.process(data), @@ -87,6 +84,6 @@ impl Runner for BacktestRunner { } fn get_sender(&self) -> Self::Sender { - self.queue.clone() + self.resp_tx.clone() } } diff --git a/nautilus_core/data/src/lib.rs b/nautilus_core/data/src/lib.rs index a472a1de3193..5498f37004c7 100644 --- a/nautilus_core/data/src/lib.rs +++ b/nautilus_core/data/src/lib.rs @@ -28,4 +28,6 @@ //! - `python`: Enables Python bindings from `pyo3` pub mod aggregation; +pub mod client; pub mod engine; +pub mod mocks; diff --git a/nautilus_core/data/src/mocks.rs b/nautilus_core/data/src/mocks.rs new file mode 100644 index 000000000000..eccf472bdb54 --- /dev/null +++ b/nautilus_core/data/src/mocks.rs @@ -0,0 +1,248 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +// https://nautechsystems.io +// +// Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ------------------------------------------------------------------------------------------------- + +// Under development +#![allow(dead_code)] +#![allow(unused_variables)] + +use std::{cell::RefCell, rc::Rc}; + +use nautilus_common::{ + cache::Cache, + messages::data::{DataRequest, Payload}, + msgbus::MessageBus, +}; +use nautilus_core::{nanos::UnixNanos, uuid::UUID4}; +use nautilus_model::{ + data::{ + bar::{Bar, BarType}, + quote::QuoteTick, + trade::TradeTick, + DataType, + }, + enums::BookType, + identifiers::{ClientId, InstrumentId, Venue}, + instruments::any::InstrumentAny, +}; + +use crate::client::DataClient; + +pub struct MockDataClient { + cache: Rc>, + msgbus: Rc>, + pub client_id: ClientId, + pub venue: Venue, +} + +impl MockDataClient { + pub fn new( + cache: Rc>, + msgbus: Rc>, + client_id: ClientId, + venue: Venue, + ) -> Self { + Self { + cache, + msgbus, + client_id, + venue, + } + } +} + +impl DataClient for MockDataClient { + fn client_id(&self) -> ClientId { + self.client_id + } + fn venue(&self) -> Option { + Some(self.venue) + } + + fn start(&self) {} + fn stop(&self) {} + fn reset(&self) {} + fn dispose(&self) {} + fn is_connected(&self) -> bool { + true + } + fn is_disconnected(&self) -> bool { + false + } + + // -- COMMAND HANDLERS --------------------------------------------------------------------------- + + /// Parse command and call specific function + fn subscribe(&mut self, _data_type: DataType) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_instruments(&mut self, _venue: Option) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_instrument(&mut self, _instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_order_book_deltas( + &mut self, + _instrument_id: InstrumentId, + _book_type: BookType, + _depth: Option, + ) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_order_book_snapshots( + &mut self, + instrument_id: InstrumentId, + book_type: BookType, + depth: Option, + ) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_quote_ticks(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_trade_ticks(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_bars(&mut self, bar_type: BarType) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_instrument_status(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn subscribe_instrument_close(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe(&mut self, data_type: DataType) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_instruments(&mut self, venue: Option) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_instrument(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_order_book_deltas(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_order_book_snapshots( + &mut self, + instrument_id: InstrumentId, + ) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_quote_ticks(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_trade_ticks(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_bars(&mut self, bar_type: BarType) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_instrument_status(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + fn unsubscribe_instrument_close(&mut self, instrument_id: InstrumentId) -> anyhow::Result<()> { + Ok(()) + } + + // -- DATA REQUEST HANDLERS --------------------------------------------------------------------------- + + fn request_data(&self, request: DataRequest) { + todo!() + } + + fn request_instruments( + &self, + correlation_id: UUID4, + venue: Venue, + start: Option, + end: Option, + ) -> Vec { + todo!() + } + + fn request_instrument( + &self, + correlation_id: UUID4, + instrument_id: InstrumentId, + start: Option, + end: Option, + ) -> InstrumentAny { + todo!() + } + + // TODO: figure out where to call this and it's return type + fn request_order_book_snapshot( + &self, + correlation_id: UUID4, + instrument_id: InstrumentId, + depth: Option, + ) -> Payload { + todo!() + } + + fn request_quote_ticks( + &self, + correlation_id: UUID4, + instrument_id: InstrumentId, + start: Option, + end: Option, + limit: Option, + ) -> Vec { + todo!() + } + + fn request_trade_ticks( + &self, + correlation_id: UUID4, + instrument_id: InstrumentId, + start: Option, + end: Option, + limit: Option, + ) -> Vec { + todo!() + } + + fn request_bars( + &self, + correlation_id: UUID4, + bar_type: BarType, + start: Option, + end: Option, + limit: Option, + ) -> Vec { + todo!() + } +} diff --git a/nautilus_core/infrastructure/tests/test_cache_postgres.rs b/nautilus_core/infrastructure/tests/test_cache_postgres.rs index 09835f68824b..1ad7dd5fc9fc 100644 --- a/nautilus_core/infrastructure/tests/test_cache_postgres.rs +++ b/nautilus_core/infrastructure/tests/test_cache_postgres.rs @@ -13,12 +13,11 @@ // limitations under the License. // ------------------------------------------------------------------------------------------------- -use nautilus_common::cache::{database::CacheDatabaseAdapter, Cache, CacheConfig}; +use nautilus_common::cache::{database::CacheDatabaseAdapter, Cache}; #[must_use] pub fn get_cache(cache_database: Option>) -> Cache { - let cache_config = CacheConfig::default(); - Cache::new(cache_config, cache_database) + Cache::new(None, cache_database) } #[cfg(test)] From 031de7a3c04b924aa48ee2565eb6c1aa79043265 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sun, 4 Aug 2024 13:41:17 +1000 Subject: [PATCH 14/60] Fix typo --- nautilus_trader/persistence/wranglers_v2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautilus_trader/persistence/wranglers_v2.py b/nautilus_trader/persistence/wranglers_v2.py index 85e1fab23e46..7649e5fa0b39 100644 --- a/nautilus_trader/persistence/wranglers_v2.py +++ b/nautilus_trader/persistence/wranglers_v2.py @@ -475,7 +475,7 @@ def from_pandas( df["open"] = (df["open"] * 1e9).astype(pd.Int64Dtype()) df["high"] = (df["high"] * 1e9).astype(pd.Int64Dtype()) df["low"] = (df["low"] * 1e9).astype(pd.Int64Dtype()) - df["clow"] = (df["close"] * 1e9).astype(pd.Int64Dtype()) + df["close"] = (df["close"] * 1e9).astype(pd.Int64Dtype()) if "volume" not in df.columns: df["volume"] = pd.Series([default_volume * 1e9] * len(df), dtype=pd.UInt64Dtype()) From ea4ad25bd05c9367abb504336c9136cfa75e09e9 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sun, 4 Aug 2024 14:44:48 +1000 Subject: [PATCH 15/60] Add customdata decorator --- RELEASES.md | 2 +- nautilus_trader/model/custom.py | 123 +++++++++++++++++++++ tests/unit_tests/model/test_custom_data.py | 53 +++++++++ 3 files changed, 177 insertions(+), 1 deletion(-) create mode 100644 nautilus_trader/model/custom.py create mode 100644 tests/unit_tests/model/test_custom_data.py diff --git a/RELEASES.md b/RELEASES.md index 3bb7214dd075..928d1b17fc19 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -3,7 +3,7 @@ Released on TBD (UTC). ### Enhancements -None +- Added `@customdata` decorator to reduce need for boiler plate implementing custom data types, thanks @faysou ### Breaking Changes None diff --git a/nautilus_trader/model/custom.py b/nautilus_trader/model/custom.py new file mode 100644 index 000000000000..9afcdb499809 --- /dev/null +++ b/nautilus_trader/model/custom.py @@ -0,0 +1,123 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +from typing import Any + +import msgspec +import pyarrow as pa + +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.serialization.arrow.serializer import register_arrow +from nautilus_trader.serialization.base import register_serializable_type + + +def customdataclass(cls): # noqa: C901 (too complex) + if cls.__init__ is object.__init__: + + def __init__(self, ts_event: int, ts_init: int, **kwargs): + for key, value in kwargs.items(): + setattr(self, key, value) + + self._ts_event = ts_event + self._ts_init = ts_init + + cls.__init__ = __init__ + + @property + def ts_event(self) -> int: + return self._ts_event + + cls.ts_event = ts_event + + @property + def ts_init(self) -> int: + return self._ts_init + + cls.ts_init = ts_init + + if not hasattr(cls, "to_dict"): + + def to_dict(self): + result = {attr: getattr(self, attr) for attr in self.__annotations__} + + if hasattr(self, "instrument_id"): + result["instrument_id"] = self.instrument_id.value + result["ts_event"] = self._ts_event + result["ts_init"] = self._ts_init + + return result + + cls.to_dict = to_dict + + if not hasattr(cls, "from_dict"): + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> cls: + if "instrument_id" in data: + data["instrument_id"] = InstrumentId.from_str(data["instrument_id"]) + + return cls(**data) + + cls.from_dict = from_dict + + if not hasattr(cls, "to_bytes"): + + def to_bytes(self) -> bytes: + return msgspec.msgpack.encode(self.to_dict()) + + cls.to_bytes = to_bytes + + if not hasattr(cls, "from_bytes"): + + @classmethod + def from_bytes(cls, data: bytes) -> cls: + return cls.from_dict(msgspec.msgpack.decode(data)) + + cls.from_bytes = from_bytes + + if not hasattr(cls, "to_arrow"): + + def to_arrow(self) -> pa.RecordBatch: + return pa.RecordBatch.from_pylist([self.to_dict()], schema=cls._schema) + + cls.to_arrow = to_arrow + + if not hasattr(cls, "from_arrow"): + + @classmethod + def from_arrow(cls, table: pa.Table) -> cls: + return [cls.from_dict(d) for d in table.to_pylist()] + + cls.from_arrow = from_arrow + + if not hasattr(cls, "_schema"): + type_mapping = { + "InstrumentId": pa.string(), + "bool": pa.bool_(), + "float": pa.float64(), + "int": pa.int64(), + } + + cls._schema = pa.schema( + { + attr: type_mapping[cls.__annotations__[attr].__name__] + for attr in cls.__annotations__ + }, + ) + + register_serializable_type(cls, cls.to_dict, cls.from_dict) + register_arrow(cls, cls._schema, cls.to_arrow, cls.from_arrow) + + return cls diff --git a/tests/unit_tests/model/test_custom_data.py b/tests/unit_tests/model/test_custom_data.py new file mode 100644 index 000000000000..2945b78ea821 --- /dev/null +++ b/tests/unit_tests/model/test_custom_data.py @@ -0,0 +1,53 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +from nautilus_trader.core.data import Data +from nautilus_trader.model.custom import customdataclass +from nautilus_trader.model.identifiers import InstrumentId + + +@customdataclass +class GreeksTestData(Data): + instrument_id: InstrumentId = InstrumentId.from_str("ES.GLBX") + delta: float = 0.0 + + def __repr__(self): + return f"{self(type).__name__}(instrument_id={self.instrument_id}, delta={self.delta:.2f}, ts_event={self.ts_event}, ts_init={self._ts_init})" + + +def test_customdata_decorator_properties() -> None: + # Arrange, Act + data = GreeksTestData(ts_event=2, ts_init=1) + + # Assert + assert data.ts_event == 2 + assert data.ts_init == 1 + + +def test_customdata_decorator_dict() -> None: + # Arrange + data = GreeksTestData(ts_event=2, ts_init=1) + + # Act + data_dict = data.to_dict() + + # Assert + assert data_dict == { + "instrument_id": "ES.GLBX", + "delta": 0.0, + "ts_event": 2, + "ts_init": 1, + } + # assert GreeksTestData.from_dict(data_dict) == data From 2fdd6480803e92173800708afd2bd84e07c52093 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sun, 4 Aug 2024 16:31:57 +1000 Subject: [PATCH 16/60] Separate Rust and Python network modules --- nautilus_core/Cargo.lock | 1 + nautilus_core/network/Cargo.toml | 1 + nautilus_core/network/src/http.rs | 184 ++----- nautilus_core/network/src/python/http.rs | 129 +++++ nautilus_core/network/src/python/mod.rs | 22 +- nautilus_core/network/src/python/socket.rs | 319 ++++++++++++ nautilus_core/network/src/python/websocket.rs | 457 ++++++++++++++++++ nautilus_core/network/src/socket.rs | 306 +----------- nautilus_core/network/src/websocket.rs | 440 +---------------- 9 files changed, 980 insertions(+), 879 deletions(-) create mode 100644 nautilus_core/network/src/python/http.rs create mode 100644 nautilus_core/network/src/python/socket.rs create mode 100644 nautilus_core/network/src/python/websocket.rs diff --git a/nautilus_core/Cargo.lock b/nautilus_core/Cargo.lock index 9013fb5f31fd..5332abeac217 100644 --- a/nautilus_core/Cargo.lock +++ b/nautilus_core/Cargo.lock @@ -2806,6 +2806,7 @@ version = "0.28.0" dependencies = [ "anyhow", "axum", + "bytes", "criterion", "dashmap 6.0.1", "futures", diff --git a/nautilus_core/network/Cargo.toml b/nautilus_core/network/Cargo.toml index e605975cabdb..8713adcbc429 100644 --- a/nautilus_core/network/Cargo.toml +++ b/nautilus_core/network/Cargo.toml @@ -13,6 +13,7 @@ crate-type = ["rlib", "staticlib", "cdylib"] [dependencies] nautilus-core = { path = "../core" } anyhow = { workspace = true } +bytes = { workspace = true } futures = { workspace = true } pyo3 = { workspace = true, optional = true } pyo3-asyncio-0-21 = { workspace = true, optional = true } diff --git a/nautilus_core/network/src/http.rs b/nautilus_core/network/src/http.rs index dc0c168ad696..4cdef592facb 100644 --- a/nautilus_core/network/src/http.rs +++ b/nautilus_core/network/src/http.rs @@ -21,6 +21,7 @@ use std::{ sync::Arc, }; +use bytes::Bytes; use futures_util::{stream, StreamExt}; use pyo3::{exceptions::PyException, prelude::*, types::PyBytes}; use reqwest::{ @@ -30,6 +31,32 @@ use reqwest::{ use crate::ratelimiter::{clock::MonotonicClock, quota::Quota, RateLimiter}; +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[cfg_attr( + feature = "python", + pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.network") +)] +pub enum HttpMethod { + GET, + POST, + PUT, + DELETE, + PATCH, +} + +#[allow(clippy::from_over_into)] +impl Into for HttpMethod { + fn into(self) -> Method { + match self { + Self::GET => Method::GET, + Self::POST => Method::POST, + Self::PUT => Method::PUT, + Self::DELETE => Method::DELETE, + Self::PATCH => Method::PATCH, + } + } +} + /// A high-performance `HttpClient` for HTTP requests. /// /// The client is backed by a hyper Client which keeps connections alive and @@ -40,8 +67,8 @@ use crate::ratelimiter::{clock::MonotonicClock, quota::Quota, RateLimiter}; /// for the give `header_keys`. #[derive(Clone)] pub struct InnerHttpClient { - client: reqwest::Client, - header_keys: Vec, + pub(crate) client: reqwest::Client, + pub(crate) header_keys: Vec, } impl InnerHttpClient { @@ -87,65 +114,16 @@ impl InnerHttpClient { .map(|(k, v)| (k.clone(), v.to_owned())) .collect(); let status = response.status().as_u16(); - let bytes = response.bytes().await?; + let body = response.bytes().await?; Ok(HttpResponse { status, headers, - body: bytes.to_vec(), + body, }) } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -#[cfg_attr( - feature = "python", - pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.network") -)] -pub enum HttpMethod { - GET, - POST, - PUT, - DELETE, - PATCH, -} - -#[allow(clippy::from_over_into)] -impl Into for HttpMethod { - fn into(self) -> Method { - match self { - Self::GET => Method::GET, - Self::POST => Method::POST, - Self::PUT => Method::PUT, - Self::DELETE => Method::DELETE, - Self::PATCH => Method::PATCH, - } - } -} - -#[pymethods] -impl HttpMethod { - fn __hash__(&self) -> isize { - let mut h = DefaultHasher::new(); - self.hash(&mut h); - h.finish() as isize - } -} - -/// HttpResponse contains relevant data from a HTTP request. -#[derive(Debug, Clone)] -#[cfg_attr( - feature = "python", - pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.network") -)] -pub struct HttpResponse { - #[pyo3(get)] - pub status: u16, - #[pyo3(get)] - headers: HashMap, - body: Vec, -} - impl Default for InnerHttpClient { /// Creates a new default [`InnerHttpClient`] instance. fn default() -> Self { @@ -157,21 +135,16 @@ impl Default for InnerHttpClient { } } -#[pymethods] -impl HttpResponse { - #[new] - fn py_new(status: u16, body: Vec) -> Self { - Self { - status, - body, - headers: Default::default(), - } - } - - #[getter] - fn get_body(&self, py: Python) -> PyResult> { - Ok(PyBytes::new(py, &self.body).into()) - } +/// HttpResponse contains relevant data from a HTTP request. +#[derive(Clone, Debug)] +#[cfg_attr( + feature = "python", + pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.network") +)] +pub struct HttpResponse { + pub status: u16, + pub(crate) headers: HashMap, + pub(crate) body: Bytes, } #[cfg_attr( @@ -179,79 +152,8 @@ impl HttpResponse { pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.network") )] pub struct HttpClient { - rate_limiter: Arc>, - client: InnerHttpClient, -} - -#[pymethods] -impl HttpClient { - /// Create a new HttpClient. - /// - /// * `header_keys`: The key value pairs for the given `header_keys` are retained from the responses. - /// * `keyed_quota`: A list of string quota pairs that gives quota for specific key values. - /// * `default_quota`: The default rate limiting quota for any request. - /// Default quota is optional and no quota is passthrough. - #[new] - #[pyo3(signature = (header_keys = Vec::new(), keyed_quotas = Vec::new(), default_quota = None))] - #[must_use] - pub fn py_new( - header_keys: Vec, - keyed_quotas: Vec<(String, Quota)>, - default_quota: Option, - ) -> Self { - let client = reqwest::Client::new(); - let rate_limiter = Arc::new(RateLimiter::new_with_quota(default_quota, keyed_quotas)); - - let client = InnerHttpClient { - client, - header_keys, - }; - - Self { - rate_limiter, - client, - } - } - - /// Send an HTTP request. - /// - /// * `method`: The HTTP method to call. - /// * `url`: The request is sent to this url. - /// * `headers`: The header key value pairs in the request. - /// * `body`: The bytes sent in the body of request. - /// * `keys`: The keys used for rate limiting the request. - #[pyo3(name = "request")] - fn py_request<'py>( - &self, - method: HttpMethod, - url: String, - headers: Option>, - body: Option<&'py PyBytes>, - keys: Option>, - py: Python<'py>, - ) -> PyResult> { - let headers = headers.unwrap_or_default(); - let body_vec = body.map(|py_bytes| py_bytes.as_bytes().to_vec()); - let keys = keys.unwrap_or_default(); - let client = self.client.clone(); - let rate_limiter = self.rate_limiter.clone(); - let method = method.into(); - pyo3_asyncio_0_21::tokio::future_into_py(py, async move { - // Check keys for rate limiting quota - let tasks = keys.iter().map(|key| rate_limiter.until_key_ready(key)); - stream::iter(tasks) - .for_each(|key| async move { - key.await; - }) - .await; - match client.send_request(method, url, headers, body_vec).await { - Ok(res) => Ok(res), - Err(e) => Err(PyErr::new::(format!( - "Error handling response: {e}" - ))), - } - }) - } + pub(crate) rate_limiter: Arc>, + pub(crate) client: InnerHttpClient, } //////////////////////////////////////////////////////////////////////////////// diff --git a/nautilus_core/network/src/python/http.rs b/nautilus_core/network/src/python/http.rs new file mode 100644 index 000000000000..ac97574bd55c --- /dev/null +++ b/nautilus_core/network/src/python/http.rs @@ -0,0 +1,129 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +// https://nautechsystems.io +// +// Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ------------------------------------------------------------------------------------------------- + +use std::{ + collections::{hash_map::DefaultHasher, HashMap}, + hash::{Hash, Hasher}, + sync::Arc, +}; + +use futures_util::{stream, StreamExt}; +use pyo3::{exceptions::PyException, prelude::*, types::PyBytes}; + +use crate::{ + http::{HttpClient, HttpMethod, HttpResponse, InnerHttpClient}, + ratelimiter::{quota::Quota, RateLimiter}, +}; + +#[pymethods] +impl HttpMethod { + fn __hash__(&self) -> isize { + let mut h = DefaultHasher::new(); + self.hash(&mut h); + h.finish() as isize + } +} + +#[pymethods] +impl HttpResponse { + #[getter] + #[pyo3(name = "status")] + pub fn py_status(&self) -> u16 { + self.status + } + + #[getter] + #[pyo3(name = "headers")] + pub fn py_headers(&self) -> HashMap { + self.headers.clone() + } + + #[getter] + #[pyo3(name = "body")] + pub fn py_body(&self) -> &[u8] { + self.body.as_ref() + } +} + +#[pymethods] +impl HttpClient { + /// Create a new HttpClient. + /// + /// * `header_keys`: The key value pairs for the given `header_keys` are retained from the responses. + /// * `keyed_quota`: A list of string quota pairs that gives quota for specific key values. + /// * `default_quota`: The default rate limiting quota for any request. + /// Default quota is optional and no quota is passthrough. + #[new] + #[pyo3(signature = (header_keys = Vec::new(), keyed_quotas = Vec::new(), default_quota = None))] + #[must_use] + pub fn py_new( + header_keys: Vec, + keyed_quotas: Vec<(String, Quota)>, + default_quota: Option, + ) -> Self { + let client = reqwest::Client::new(); + let rate_limiter = Arc::new(RateLimiter::new_with_quota(default_quota, keyed_quotas)); + + let client = InnerHttpClient { + client, + header_keys, + }; + + Self { + rate_limiter, + client, + } + } + + /// Send an HTTP request. + /// + /// * `method`: The HTTP method to call. + /// * `url`: The request is sent to this url. + /// * `headers`: The header key value pairs in the request. + /// * `body`: The bytes sent in the body of request. + /// * `keys`: The keys used for rate limiting the request. + #[pyo3(name = "request")] + fn py_request<'py>( + &self, + method: HttpMethod, + url: String, + headers: Option>, + body: Option<&'py PyBytes>, + keys: Option>, + py: Python<'py>, + ) -> PyResult> { + let headers = headers.unwrap_or_default(); + let body_vec = body.map(|py_bytes| py_bytes.as_bytes().to_vec()); + let keys = keys.unwrap_or_default(); + let client = self.client.clone(); + let rate_limiter = self.rate_limiter.clone(); + let method = method.into(); + pyo3_asyncio_0_21::tokio::future_into_py(py, async move { + // Check keys for rate limiting quota + let tasks = keys.iter().map(|key| rate_limiter.until_key_ready(key)); + stream::iter(tasks) + .for_each(|key| async move { + key.await; + }) + .await; + match client.send_request(method, url, headers, body_vec).await { + Ok(res) => Ok(res), + Err(e) => Err(PyErr::new::(format!( + "Error handling response: {e}" + ))), + } + }) + } +} diff --git a/nautilus_core/network/src/python/mod.rs b/nautilus_core/network/src/python/mod.rs index a0fa1ffcb031..09b5085a1f51 100644 --- a/nautilus_core/network/src/python/mod.rs +++ b/nautilus_core/network/src/python/mod.rs @@ -15,20 +15,22 @@ //! Python bindings from `pyo3`. -use pyo3::prelude::*; +pub mod http; +pub mod socket; +pub mod websocket; -use crate::{http, ratelimiter, socket, websocket}; +use pyo3::prelude::*; /// Loaded as nautilus_pyo3.network #[pymodule] pub fn network(_: Python<'_>, m: &PyModule) -> PyResult<()> { - m.add_class::()?; - m.add_class::()?; - m.add_class::()?; - m.add_class::()?; - m.add_class::()?; - m.add_class::()?; - m.add_class::()?; - m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; Ok(()) } diff --git a/nautilus_core/network/src/python/socket.rs b/nautilus_core/network/src/python/socket.rs new file mode 100644 index 000000000000..db5fea01395c --- /dev/null +++ b/nautilus_core/network/src/python/socket.rs @@ -0,0 +1,319 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +// https://nautechsystems.io +// +// Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ------------------------------------------------------------------------------------------------- + +use std::{ + collections::{hash_map::DefaultHasher, HashMap}, + hash::{Hash, Hasher}, + sync::{atomic::Ordering, Arc}, +}; + +use futures_util::{stream, StreamExt}; +use nautilus_core::python::to_pyruntime_err; +use pyo3::{exceptions::PyException, prelude::*, types::PyBytes}; +use tokio::io::AsyncWriteExt; +use tokio_tungstenite::tungstenite::stream::Mode; + +use crate::{ + http::{HttpClient, HttpMethod, HttpResponse, InnerHttpClient}, + ratelimiter::{quota::Quota, RateLimiter}, + socket::{SocketClient, SocketConfig}, +}; + +#[pymethods] +impl SocketConfig { + #[new] + fn py_new( + url: String, + ssl: bool, + suffix: Vec, + handler: PyObject, + heartbeat: Option<(u64, Vec)>, + ) -> Self { + let mode = if ssl { Mode::Tls } else { Mode::Plain }; + Self { + url, + mode, + suffix, + handler, + heartbeat, + } + } +} + +#[pymethods] +impl SocketClient { + /// Create a socket client. + /// + /// # Safety + /// + /// - Throws an Exception if it is unable to make socket connection + #[staticmethod] + #[pyo3(name = "connect")] + fn py_connect( + config: SocketConfig, + post_connection: Option, + post_reconnection: Option, + post_disconnection: Option, + py: Python<'_>, + ) -> PyResult> { + pyo3_asyncio_0_21::tokio::future_into_py(py, async move { + Self::connect( + config, + post_connection, + post_reconnection, + post_disconnection, + ) + .await + .map_err(to_pyruntime_err) + }) + } + + /// Closes the client heart beat and reader task. + /// + /// The connection is not completely closed until all references + /// to the client are gone and the client is dropped. + /// + /// # Safety + /// + /// - The client should not be used after closing it + /// - Any auto-reconnect job should be aborted before closing the client + #[pyo3(name = "disconnect")] + fn py_disconnect<'py>(slf: PyRef<'_, Self>, py: Python<'py>) -> PyResult> { + let disconnect_mode = slf.disconnect_mode.clone(); + pyo3_asyncio_0_21::tokio::future_into_py(py, async move { + disconnect_mode.store(true, Ordering::SeqCst); + Ok(()) + }) + } + + /// Check if the client is still alive. + /// + /// Even if the connection is disconnected the client will still be alive + /// and try to reconnect. Only when reconnect fails the client will + /// terminate. + /// + /// This is particularly useful for check why a `send` failed. It could + /// be because the connection disconnected and the client is still alive + /// and reconnecting. In such cases the send can be retried after some + /// delay + #[getter] + fn is_alive(slf: PyRef<'_, Self>) -> bool { + !slf.controller_task.is_finished() + } + + /// Send bytes data to the connection. + /// + /// # Safety + /// + /// - Throws an Exception if it is not able to send data. + #[pyo3(name = "send")] + fn py_send<'py>( + slf: PyRef<'_, Self>, + mut data: Vec, + py: Python<'py>, + ) -> PyResult> { + let writer = slf.writer.clone(); + data.extend(&slf.suffix); + + pyo3_asyncio_0_21::tokio::future_into_py(py, async move { + let mut writer = writer.lock().await; + writer.write_all(&data).await?; + Ok(()) + }) + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Tests +//////////////////////////////////////////////////////////////////////////////// +#[cfg(test)] +mod tests { + use pyo3::{prelude::*, prepare_freethreaded_python}; + use tokio::{ + io::{AsyncReadExt, AsyncWriteExt}, + net::TcpListener, + task::{self, JoinHandle}, + time::{sleep, Duration}, + }; + use tokio_tungstenite::tungstenite::stream::Mode; + use tracing_test::traced_test; + + use crate::socket::{SocketClient, SocketConfig}; + + struct TestServer { + task: JoinHandle<()>, + port: u16, + } + + impl Drop for TestServer { + fn drop(&mut self) { + self.task.abort(); + } + } + + impl TestServer { + async fn basic_client_test() -> Self { + let server = TcpListener::bind("127.0.0.1:0").await.unwrap(); + let port = TcpListener::local_addr(&server).unwrap().port(); + + // Setup test server + let handle = task::spawn(async move { + // Keep listening for new connections + loop { + let (mut stream, _) = server.accept().await.unwrap(); + tracing::debug!("socket:test Server accepted connection"); + + // Keep receiving messages from connection and sending them back as it is + // if the message contains a close stop receiving messages + // and drop the connection. + task::spawn(async move { + let mut buf = Vec::new(); + loop { + let bytes = stream.read_buf(&mut buf).await.unwrap(); + tracing::debug!("socket:test Server received {bytes} bytes"); + + // Terminate if 0 bytes have been read + // Connection has been terminated or vector buffer is completely + if bytes == 0 { + break; + } else { + // if received data has a line break + // extract and write it to the stream + while let Some((i, _)) = + &buf.windows(2).enumerate().find(|(_, pair)| pair == b"\r\n") + { + let close_message = b"close".as_slice(); + if &buf[0..*i] == close_message { + tracing::debug!("socket:test Client sent closing message"); + return; + } else { + tracing::debug!("socket:test Server sending message"); + stream + .write_all(buf.drain(0..i + 2).as_slice()) + .await + .unwrap(); + } + } + } + } + }); + } + }); + + Self { task: handle, port } + } + } + + #[tokio::test] + #[traced_test] + async fn basic_client_test() { + prepare_freethreaded_python(); + + const N: usize = 10; + + // Initialize test server + let server = TestServer::basic_client_test().await; + + // Create counter class and handler that increments it + let (counter, handler) = Python::with_gil(|py| { + let pymod = PyModule::from_code( + py, + r" +class Counter: + def __init__(self): + self.count = 0 + + def handler(self, bytes): + if bytes.decode().rstrip() == 'ping': + self.count = self.count + 1 + + def get_count(self): + return self.count + +counter = Counter()", + "", + "", + ) + .unwrap(); + + let counter = pymod.getattr("counter").unwrap().into_py(py); + let handler = counter.getattr(py, "handler").unwrap().into_py(py); + + (counter, handler) + }); + + let config = SocketConfig { + url: format!("127.0.0.1:{}", server.port), + handler: handler.clone(), + mode: Mode::Plain, + suffix: b"\r\n".to_vec(), + heartbeat: None, + }; + let client: SocketClient = SocketClient::connect(config, None, None, None) + .await + .unwrap(); + + // Send messages that increment the count + for _ in 0..N { + let _ = client.send_bytes(b"ping".as_slice()).await; + } + + sleep(Duration::from_secs(1)).await; + let count_value: usize = Python::with_gil(|py| { + counter + .getattr(py, "get_count") + .unwrap() + .call0(py) + .unwrap() + .extract(py) + .unwrap() + }); + + // Check count is same as number messages sent + assert_eq!(count_value, N); + + ////////////////////////////////////////////////////////////////////// + // Close connection client should reconnect and send messages + ////////////////////////////////////////////////////////////////////// + + // close the connection and wait + // client should reconnect automatically + let _ = client.send_bytes(b"close".as_slice()).await; + sleep(Duration::from_secs(2)).await; + + for _ in 0..N { + let _ = client.send_bytes(b"ping".as_slice()).await; + } + + // Check count is same as number messages sent + sleep(Duration::from_secs(1)).await; + let count_value: usize = Python::with_gil(|py| { + counter + .getattr(py, "get_count") + .unwrap() + .call0(py) + .unwrap() + .extract(py) + .unwrap() + }); + + // Check that messages were received correctly after reconnecting + assert_eq!(count_value, N + N); + + // Shutdown client + client.disconnect().await; + assert!(client.is_disconnected()); + } +} diff --git a/nautilus_core/network/src/python/websocket.rs b/nautilus_core/network/src/python/websocket.rs new file mode 100644 index 000000000000..47fe0baa765f --- /dev/null +++ b/nautilus_core/network/src/python/websocket.rs @@ -0,0 +1,457 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (C) 2015-2024 Nautech Systems Pty Ltd. All rights reserved. +// https://nautechsystems.io +// +// Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ------------------------------------------------------------------------------------------------- + +use std::{ + collections::{hash_map::DefaultHasher, HashMap}, + hash::{Hash, Hasher}, + sync::{atomic::Ordering, Arc}, +}; + +use futures::SinkExt; +use futures_util::{stream, StreamExt}; +use nautilus_core::python::{to_pyruntime_err, to_pyvalue_err}; +use pyo3::{exceptions::PyException, prelude::*, types::PyBytes}; +use tokio_tungstenite::tungstenite::Message; + +use crate::{ + http::{HttpClient, HttpMethod, HttpResponse, InnerHttpClient}, + ratelimiter::{quota::Quota, RateLimiter}, + websocket::{WebSocketClient, WebSocketConfig}, +}; + +#[pymethods] +impl WebSocketConfig { + #[new] + fn py_new( + url: String, + handler: PyObject, + headers: Vec<(String, String)>, + heartbeat: Option, + heartbeat_msg: Option, + ping_handler: Option, + ) -> Self { + Self { + url, + handler, + headers, + heartbeat, + heartbeat_msg, + ping_handler, + } + } +} + +#[pymethods] +impl WebSocketClient { + /// Create a websocket client. + /// + /// # Safety + /// + /// - Throws an Exception if it is unable to make websocket connection + #[staticmethod] + #[pyo3(name = "connect")] + fn py_connect( + config: WebSocketConfig, + post_connection: Option, + post_reconnection: Option, + post_disconnection: Option, + py: Python<'_>, + ) -> PyResult> { + pyo3_asyncio_0_21::tokio::future_into_py(py, async move { + Self::connect( + config, + post_connection, + post_reconnection, + post_disconnection, + ) + .await + .map_err(to_pyruntime_err) + }) + } + + /// Closes the client heart beat and reader task. + /// + /// The connection is not completely closed the till all references + /// to the client are gone and the client is dropped. + /// + /// # Safety + /// + /// - The client should not be used after closing it + /// - Any auto-reconnect job should be aborted before closing the client + #[pyo3(name = "disconnect")] + fn py_disconnect<'py>(slf: PyRef<'_, Self>, py: Python<'py>) -> PyResult> { + let disconnect_mode = slf.disconnect_mode.clone(); + pyo3_asyncio_0_21::tokio::future_into_py(py, async move { + disconnect_mode.store(true, Ordering::SeqCst); + Ok(()) + }) + } + + /// Send bytes data to the server. + /// + /// # Safety + /// + /// - Raises PyRuntimeError if not able to send data. + #[pyo3(name = "send")] + fn py_send<'py>( + slf: PyRef<'_, Self>, + data: Vec, + py: Python<'py>, + ) -> PyResult> { + tracing::debug!("Sending bytes {:?}", data); + let writer = slf.writer.clone(); + pyo3_asyncio_0_21::tokio::future_into_py(py, async move { + let mut guard = writer.lock().await; + guard + .send(Message::Binary(data)) + .await + .map_err(to_pyruntime_err) + }) + } + + /// Send text data to the server. + /// + /// # Safety + /// + /// - Raises PyRuntimeError if not able to send data. + #[pyo3(name = "send_text")] + fn py_send_text<'py>( + slf: PyRef<'_, Self>, + data: String, + py: Python<'py>, + ) -> PyResult> { + tracing::debug!("Sending text: {}", data); + let writer = slf.writer.clone(); + pyo3_asyncio_0_21::tokio::future_into_py(py, async move { + let mut guard = writer.lock().await; + guard + .send(Message::Text(data)) + .await + .map_err(to_pyruntime_err) + }) + } + + /// Send pong bytes data to the server. + /// + /// # Safety + /// + /// - Raises PyRuntimeError if not able to send data. + #[pyo3(name = "send_pong")] + fn py_send_pong<'py>( + slf: PyRef<'_, Self>, + data: Vec, + py: Python<'py>, + ) -> PyResult> { + let data_str = String::from_utf8(data.clone()).map_err(to_pyvalue_err)?; + tracing::debug!("Sending pong: {}", data_str); + let writer = slf.writer.clone(); + pyo3_asyncio_0_21::tokio::future_into_py(py, async move { + let mut guard = writer.lock().await; + guard + .send(Message::Pong(data)) + .await + .map_err(to_pyruntime_err) + }) + } + + /// Check if the client is still alive. + /// + /// Even if the connection is disconnected the client will still be alive + /// and trying to reconnect. Only when reconnect fails the client will + /// terminate. + /// + /// This is particularly useful for checking why a `send` failed. It could + /// be because the connection disconnected and the client is still alive + /// and reconnecting. In such cases the send can be retried after some + /// delay. + #[getter] + fn is_alive(slf: PyRef<'_, Self>) -> bool { + !slf.controller_task.is_finished() + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Tests +//////////////////////////////////////////////////////////////////////////////// +#[cfg(test)] +mod tests { + use futures_util::{SinkExt, StreamExt}; + use pyo3::{prelude::*, prepare_freethreaded_python}; + use tokio::{ + net::TcpListener, + task::{self, JoinHandle}, + time::{sleep, Duration}, + }; + use tokio_tungstenite::{ + accept_hdr_async, + tungstenite::{ + handshake::server::{self, Callback}, + http::HeaderValue, + }, + }; + use tracing_test::traced_test; + + use crate::websocket::{WebSocketClient, WebSocketConfig}; + + struct TestServer { + task: JoinHandle<()>, + port: u16, + } + + #[derive(Debug, Clone)] + struct TestCallback { + key: String, + value: HeaderValue, + } + + impl Callback for TestCallback { + fn on_request( + self, + request: &server::Request, + response: server::Response, + ) -> Result { + let _ = response; + let value = request.headers().get(&self.key); + assert!(value.is_some()); + + if let Some(value) = request.headers().get(&self.key) { + assert_eq!(value, self.value); + } + + Ok(response) + } + } + + impl TestServer { + async fn setup(key: String, value: String) -> Self { + let server = TcpListener::bind("127.0.0.1:0").await.unwrap(); + let port = TcpListener::local_addr(&server).unwrap().port(); + + let test_call_back = TestCallback { + key, + value: HeaderValue::from_str(&value).unwrap(), + }; + + // Setup test server + let task = task::spawn(async move { + // keep accepting connections + loop { + let (conn, _) = server.accept().await.unwrap(); + let mut websocket = accept_hdr_async(conn, test_call_back.clone()) + .await + .unwrap(); + + task::spawn(async move { + loop { + let msg = websocket.next().await.unwrap().unwrap(); + // We do not want to send back ping/pong messages. + if msg.is_binary() || msg.is_text() { + websocket.send(msg).await.unwrap(); + } else if msg.is_close() { + if let Err(e) = websocket.close(None).await { + tracing::debug!("Connection already closed {e}"); + }; + break; + } + } + }); + } + }); + + Self { task, port } + } + } + + impl Drop for TestServer { + fn drop(&mut self) { + self.task.abort(); + } + } + + #[tokio::test] + #[traced_test] + async fn basic_client_test() { + prepare_freethreaded_python(); + + const N: usize = 10; + let mut success_count = 0; + let header_key = "hello-custom-key".to_string(); + let header_value = "hello-custom-value".to_string(); + + // Initialize test server + let server = TestServer::setup(header_key.clone(), header_value.clone()).await; + + // Create counter class and handler that increments it + let (counter, handler) = Python::with_gil(|py| { + let pymod = PyModule::from_code( + py, + r" +class Counter: + def __init__(self): + self.count = 0 + + def handler(self, bytes): + if bytes.decode() == 'ping': + self.count = self.count + 1 + + def get_count(self): + return self.count + +counter = Counter()", + "", + "", + ) + .unwrap(); + + let counter = pymod.getattr("counter").unwrap().into_py(py); + let handler = counter.getattr(py, "handler").unwrap().into_py(py); + + (counter, handler) + }); + + let config = WebSocketConfig::py_new( + format!("ws://127.0.0.1:{}", server.port), + handler.clone(), + vec![(header_key, header_value)], + None, + None, + None, + ); + let client = WebSocketClient::connect(config, None, None, None) + .await + .unwrap(); + + // Send messages that increment the count + for _ in 0..N { + if client.send_bytes(b"ping".to_vec()).await.is_ok() { + success_count += 1; + }; + } + + // Check count is same as number messages sent + sleep(Duration::from_secs(1)).await; + let count_value: usize = Python::with_gil(|py| { + counter + .getattr(py, "get_count") + .unwrap() + .call0(py) + .unwrap() + .extract(py) + .unwrap() + }); + assert_eq!(count_value, success_count); + + ////////////////////////////////////////////////////////////////////// + // Close connection client should reconnect and send messages + ////////////////////////////////////////////////////////////////////// + + // close the connection + // client should reconnect automatically + client.send_close_message().await; + + // Send messages that increment the count + sleep(Duration::from_secs(2)).await; + for _ in 0..N { + if client.send_bytes(b"ping".to_vec()).await.is_ok() { + success_count += 1; + }; + } + + // Check count is same as number messages sent + sleep(Duration::from_secs(1)).await; + let count_value: usize = Python::with_gil(|py| { + counter + .getattr(py, "get_count") + .unwrap() + .call0(py) + .unwrap() + .extract(py) + .unwrap() + }); + assert_eq!(count_value, success_count); + assert_eq!(success_count, N + N); + + // Shutdown client + client.disconnect().await; + assert!(client.is_disconnected()); + } + + #[tokio::test] + #[traced_test] + async fn message_ping_test() { + prepare_freethreaded_python(); + + let header_key = "hello-custom-key".to_string(); + let header_value = "hello-custom-value".to_string(); + + let (checker, handler) = Python::with_gil(|py| { + let pymod = PyModule::from_code( + py, + r" +class Checker: + def __init__(self): + self.check = False + + def handler(self, bytes): + if bytes.decode() == 'heartbeat message': + self.check = True + + def get_check(self): + return self.check + +checker = Checker()", + "", + "", + ) + .unwrap(); + + let checker = pymod.getattr("checker").unwrap().into_py(py); + let handler = checker.getattr(py, "handler").unwrap().into_py(py); + + (checker, handler) + }); + + // Initialize test server and config + let server = TestServer::setup(header_key.clone(), header_value.clone()).await; + let config = WebSocketConfig::py_new( + format!("ws://127.0.0.1:{}", server.port), + handler.clone(), + vec![(header_key, header_value)], + Some(1), + Some("heartbeat message".to_string()), + None, + ); + let client = WebSocketClient::connect(config, None, None, None) + .await + .unwrap(); + + // Check if ping message has the correct message + sleep(Duration::from_secs(2)).await; + let check_value: bool = Python::with_gil(|py| { + checker + .getattr(py, "get_check") + .unwrap() + .call0(py) + .unwrap() + .extract(py) + .unwrap() + }); + assert!(check_value); + + // Shutdown client + client.disconnect().await; + assert!(client.is_disconnected()); + } +} diff --git a/nautilus_core/network/src/socket.rs b/nautilus_core/network/src/socket.rs index e19384f534b2..c5bb3181d613 100644 --- a/nautilus_core/network/src/socket.rs +++ b/nautilus_core/network/src/socket.rs @@ -50,36 +50,15 @@ type TcpReader = ReadHalf>; )] pub struct SocketConfig { /// The URL to connect to. - url: String, + pub url: String, /// The connection mode {Plain, TLS}. - mode: Mode, + pub mode: Mode, /// The sequence of bytes which separates lines. - suffix: Vec, + pub suffix: Vec, /// The Python function to handle incoming messages. - handler: PyObject, + pub handler: PyObject, /// The optional heartbeat with period and beat message. - heartbeat: Option<(u64, Vec)>, -} - -#[pymethods] -impl SocketConfig { - #[new] - fn py_new( - url: String, - ssl: bool, - suffix: Vec, - handler: PyObject, - heartbeat: Option<(u64, Vec)>, - ) -> Self { - let mode = if ssl { Mode::Tls } else { Mode::Plain }; - Self { - url, - mode, - suffix, - handler, - heartbeat, - } - } + pub heartbeat: Option<(u64, Vec)>, } /// Creates a TcpStream with the server. @@ -304,10 +283,10 @@ impl Drop for SocketClientInner { pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.network") )] pub struct SocketClient { - writer: SharedTcpWriter, - controller_task: task::JoinHandle<()>, - disconnect_mode: Arc, - suffix: Vec, + pub(crate) writer: SharedTcpWriter, + pub(crate) controller_task: task::JoinHandle<()>, + pub(crate) disconnect_mode: Arc, + pub(crate) suffix: Vec, } impl SocketClient { @@ -436,270 +415,3 @@ impl SocketClient { }) } } - -#[pymethods] -impl SocketClient { - /// Create a socket client. - /// - /// # Safety - /// - /// - Throws an Exception if it is unable to make socket connection - #[staticmethod] - #[pyo3(name = "connect")] - fn py_connect( - config: SocketConfig, - post_connection: Option, - post_reconnection: Option, - post_disconnection: Option, - py: Python<'_>, - ) -> PyResult> { - pyo3_asyncio_0_21::tokio::future_into_py(py, async move { - Self::connect( - config, - post_connection, - post_reconnection, - post_disconnection, - ) - .await - .map_err(to_pyruntime_err) - }) - } - - /// Closes the client heart beat and reader task. - /// - /// The connection is not completely closed until all references - /// to the client are gone and the client is dropped. - /// - /// # Safety - /// - /// - The client should not be used after closing it - /// - Any auto-reconnect job should be aborted before closing the client - #[pyo3(name = "disconnect")] - fn py_disconnect<'py>(slf: PyRef<'_, Self>, py: Python<'py>) -> PyResult> { - let disconnect_mode = slf.disconnect_mode.clone(); - pyo3_asyncio_0_21::tokio::future_into_py(py, async move { - disconnect_mode.store(true, Ordering::SeqCst); - Ok(()) - }) - } - - /// Check if the client is still alive. - /// - /// Even if the connection is disconnected the client will still be alive - /// and try to reconnect. Only when reconnect fails the client will - /// terminate. - /// - /// This is particularly useful for check why a `send` failed. It could - /// be because the connection disconnected and the client is still alive - /// and reconnecting. In such cases the send can be retried after some - /// delay - #[getter] - fn is_alive(slf: PyRef<'_, Self>) -> bool { - !slf.controller_task.is_finished() - } - - /// Send bytes data to the connection. - /// - /// # Safety - /// - /// - Throws an Exception if it is not able to send data. - #[pyo3(name = "send")] - fn py_send<'py>( - slf: PyRef<'_, Self>, - mut data: Vec, - py: Python<'py>, - ) -> PyResult> { - let writer = slf.writer.clone(); - data.extend(&slf.suffix); - - pyo3_asyncio_0_21::tokio::future_into_py(py, async move { - let mut writer = writer.lock().await; - writer.write_all(&data).await?; - Ok(()) - }) - } -} - -//////////////////////////////////////////////////////////////////////////////// -// Tests -//////////////////////////////////////////////////////////////////////////////// -#[cfg(test)] -mod tests { - use pyo3::{prelude::*, prepare_freethreaded_python}; - use tokio::{ - io::{AsyncReadExt, AsyncWriteExt}, - net::TcpListener, - task::{self, JoinHandle}, - time::{sleep, Duration}, - }; - use tokio_tungstenite::tungstenite::stream::Mode; - use tracing_test::traced_test; - - use crate::socket::{SocketClient, SocketConfig}; - - struct TestServer { - task: JoinHandle<()>, - port: u16, - } - - impl Drop for TestServer { - fn drop(&mut self) { - self.task.abort(); - } - } - - impl TestServer { - async fn basic_client_test() -> Self { - let server = TcpListener::bind("127.0.0.1:0").await.unwrap(); - let port = TcpListener::local_addr(&server).unwrap().port(); - - // Setup test server - let handle = task::spawn(async move { - // keep listening for new connections - loop { - let (mut stream, _) = server.accept().await.unwrap(); - tracing::debug!("socket:test Server accepted connection"); - - // keep receiving messages from connection - // and sending them back as it is - // if the message contains a close stop receiving messages - // and drop the connection - task::spawn(async move { - let mut buf = Vec::new(); - loop { - let bytes = stream.read_buf(&mut buf).await.unwrap(); - tracing::debug!("socket:test Server received {bytes} bytes"); - - // Terminate if 0 bytes have been read - // Connection has been terminated or vector buffer is completely - if bytes == 0 { - break; - } else { - // if received data has a line break - // extract and write it to the stream - while let Some((i, _)) = - &buf.windows(2).enumerate().find(|(_, pair)| pair == b"\r\n") - { - let close_message = b"close".as_slice(); - if &buf[0..*i] == close_message { - tracing::debug!("socket:test Client sent closing message"); - return; - } else { - tracing::debug!("socket:test Server sending message"); - stream - .write_all(buf.drain(0..i + 2).as_slice()) - .await - .unwrap(); - } - } - } - } - }); - } - }); - - Self { task: handle, port } - } - } - - #[tokio::test] - #[traced_test] - async fn basic_client_test() { - prepare_freethreaded_python(); - - const N: usize = 10; - - // Initialize test server - let server = TestServer::basic_client_test().await; - - // Create counter class and handler that increments it - let (counter, handler) = Python::with_gil(|py| { - let pymod = PyModule::from_code( - py, - r" -class Counter: - def __init__(self): - self.count = 0 - - def handler(self, bytes): - if bytes.decode().rstrip() == 'ping': - self.count = self.count + 1 - - def get_count(self): - return self.count - -counter = Counter()", - "", - "", - ) - .unwrap(); - - let counter = pymod.getattr("counter").unwrap().into_py(py); - let handler = counter.getattr(py, "handler").unwrap().into_py(py); - - (counter, handler) - }); - - let config = SocketConfig { - url: format!("127.0.0.1:{}", server.port), - handler: handler.clone(), - mode: Mode::Plain, - suffix: b"\r\n".to_vec(), - heartbeat: None, - }; - let client: SocketClient = SocketClient::connect(config, None, None, None) - .await - .unwrap(); - - // Send messages that increment the count - for _ in 0..N { - let _ = client.send_bytes(b"ping".as_slice()).await; - } - - sleep(Duration::from_secs(1)).await; - let count_value: usize = Python::with_gil(|py| { - counter - .getattr(py, "get_count") - .unwrap() - .call0(py) - .unwrap() - .extract(py) - .unwrap() - }); - - // Check count is same as number messages sent - assert_eq!(count_value, N); - - ////////////////////////////////////////////////////////////////////// - // Close connection client should reconnect and send messages - ////////////////////////////////////////////////////////////////////// - - // close the connection and wait - // client should reconnect automatically - let _ = client.send_bytes(b"close".as_slice()).await; - sleep(Duration::from_secs(2)).await; - - for _ in 0..N { - let _ = client.send_bytes(b"ping".as_slice()).await; - } - - // Check count is same as number messages sent - sleep(Duration::from_secs(1)).await; - let count_value: usize = Python::with_gil(|py| { - counter - .getattr(py, "get_count") - .unwrap() - .call0(py) - .unwrap() - .extract(py) - .unwrap() - }); - - // check that messages were received correctly after reconnecting - assert_eq!(count_value, N + N); - - // Shutdown client - client.disconnect().await; - assert!(client.is_disconnected()); - } -} diff --git a/nautilus_core/network/src/websocket.rs b/nautilus_core/network/src/websocket.rs index 32eca98e0a6f..5d72b2a8057a 100644 --- a/nautilus_core/network/src/websocket.rs +++ b/nautilus_core/network/src/websocket.rs @@ -47,34 +47,12 @@ type MessageReader = SplitStream>>; pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.network") )] pub struct WebSocketConfig { - url: String, - handler: PyObject, - headers: Vec<(String, String)>, - heartbeat: Option, - heartbeat_msg: Option, - ping_handler: Option, -} - -#[pymethods] -impl WebSocketConfig { - #[new] - fn py_new( - url: String, - handler: PyObject, - headers: Vec<(String, String)>, - heartbeat: Option, - heartbeat_msg: Option, - ping_handler: Option, - ) -> Self { - Self { - url, - handler, - headers, - heartbeat, - heartbeat_msg, - ping_handler, - } - } + pub url: String, + pub handler: PyObject, + pub headers: Vec<(String, String)>, + pub heartbeat: Option, + pub heartbeat_msg: Option, + pub ping_handler: Option, } /// `WebSocketClient` connects to a websocket server to read and send messages. @@ -325,9 +303,9 @@ impl Drop for WebSocketClientInner { pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.network") )] pub struct WebSocketClient { - writer: SharedMessageWriter, - controller_task: task::JoinHandle<()>, - disconnect_mode: Arc, + pub(crate) writer: SharedMessageWriter, + pub(crate) controller_task: task::JoinHandle<()>, + pub(crate) disconnect_mode: Arc, } impl WebSocketClient { @@ -464,403 +442,3 @@ impl WebSocketClient { }) } } - -#[pymethods] -impl WebSocketClient { - /// Create a websocket client. - /// - /// # Safety - /// - /// - Throws an Exception if it is unable to make websocket connection - #[staticmethod] - #[pyo3(name = "connect")] - fn py_connect( - config: WebSocketConfig, - post_connection: Option, - post_reconnection: Option, - post_disconnection: Option, - py: Python<'_>, - ) -> PyResult> { - pyo3_asyncio_0_21::tokio::future_into_py(py, async move { - Self::connect( - config, - post_connection, - post_reconnection, - post_disconnection, - ) - .await - .map_err(to_pyruntime_err) - }) - } - - /// Closes the client heart beat and reader task. - /// - /// The connection is not completely closed the till all references - /// to the client are gone and the client is dropped. - /// - /// # Safety - /// - /// - The client should not be used after closing it - /// - Any auto-reconnect job should be aborted before closing the client - #[pyo3(name = "disconnect")] - fn py_disconnect<'py>(slf: PyRef<'_, Self>, py: Python<'py>) -> PyResult> { - let disconnect_mode = slf.disconnect_mode.clone(); - pyo3_asyncio_0_21::tokio::future_into_py(py, async move { - disconnect_mode.store(true, Ordering::SeqCst); - Ok(()) - }) - } - - /// Send bytes data to the server. - /// - /// # Safety - /// - /// - Raises PyRuntimeError if not able to send data. - #[pyo3(name = "send")] - fn py_send<'py>( - slf: PyRef<'_, Self>, - data: Vec, - py: Python<'py>, - ) -> PyResult> { - tracing::debug!("Sending bytes {:?}", data); - let writer = slf.writer.clone(); - pyo3_asyncio_0_21::tokio::future_into_py(py, async move { - let mut guard = writer.lock().await; - guard - .send(Message::Binary(data)) - .await - .map_err(to_pyruntime_err) - }) - } - - /// Send text data to the server. - /// - /// # Safety - /// - /// - Raises PyRuntimeError if not able to send data. - #[pyo3(name = "send_text")] - fn py_send_text<'py>( - slf: PyRef<'_, Self>, - data: String, - py: Python<'py>, - ) -> PyResult> { - tracing::debug!("Sending text: {}", data); - let writer = slf.writer.clone(); - pyo3_asyncio_0_21::tokio::future_into_py(py, async move { - let mut guard = writer.lock().await; - guard - .send(Message::Text(data)) - .await - .map_err(to_pyruntime_err) - }) - } - - /// Send pong bytes data to the server. - /// - /// # Safety - /// - /// - Raises PyRuntimeError if not able to send data. - #[pyo3(name = "send_pong")] - fn py_send_pong<'py>( - slf: PyRef<'_, Self>, - data: Vec, - py: Python<'py>, - ) -> PyResult> { - let data_str = String::from_utf8(data.clone()).map_err(to_pyvalue_err)?; - tracing::debug!("Sending pong: {}", data_str); - let writer = slf.writer.clone(); - pyo3_asyncio_0_21::tokio::future_into_py(py, async move { - let mut guard = writer.lock().await; - guard - .send(Message::Pong(data)) - .await - .map_err(to_pyruntime_err) - }) - } - - /// Check if the client is still alive. - /// - /// Even if the connection is disconnected the client will still be alive - /// and trying to reconnect. Only when reconnect fails the client will - /// terminate. - /// - /// This is particularly useful for checking why a `send` failed. It could - /// be because the connection disconnected and the client is still alive - /// and reconnecting. In such cases the send can be retried after some - /// delay. - #[getter] - fn is_alive(slf: PyRef<'_, Self>) -> bool { - !slf.controller_task.is_finished() - } -} - -#[cfg(test)] -mod tests { - use futures_util::{SinkExt, StreamExt}; - use pyo3::{prelude::*, prepare_freethreaded_python}; - use tokio::{ - net::TcpListener, - task::{self, JoinHandle}, - time::{sleep, Duration}, - }; - use tokio_tungstenite::{ - accept_hdr_async, - tungstenite::{ - handshake::server::{self, Callback}, - http::HeaderValue, - }, - }; - use tracing_test::traced_test; - - use crate::websocket::{WebSocketClient, WebSocketConfig}; - - struct TestServer { - task: JoinHandle<()>, - port: u16, - } - - #[derive(Debug, Clone)] - struct TestCallback { - key: String, - value: HeaderValue, - } - - impl Callback for TestCallback { - fn on_request( - self, - request: &server::Request, - response: server::Response, - ) -> Result { - let _ = response; - let value = request.headers().get(&self.key); - assert!(value.is_some()); - - if let Some(value) = request.headers().get(&self.key) { - assert_eq!(value, self.value); - } - - Ok(response) - } - } - - impl TestServer { - async fn setup(key: String, value: String) -> Self { - let server = TcpListener::bind("127.0.0.1:0").await.unwrap(); - let port = TcpListener::local_addr(&server).unwrap().port(); - - let test_call_back = TestCallback { - key, - value: HeaderValue::from_str(&value).unwrap(), - }; - - // Setup test server - let task = task::spawn(async move { - // keep accepting connections - loop { - let (conn, _) = server.accept().await.unwrap(); - let mut websocket = accept_hdr_async(conn, test_call_back.clone()) - .await - .unwrap(); - - task::spawn(async move { - loop { - let msg = websocket.next().await.unwrap().unwrap(); - // We do not want to send back ping/pong messages. - if msg.is_binary() || msg.is_text() { - websocket.send(msg).await.unwrap(); - } else if msg.is_close() { - if let Err(e) = websocket.close(None).await { - tracing::debug!("Connection already closed {e}"); - }; - break; - } - } - }); - } - }); - - Self { task, port } - } - } - - impl Drop for TestServer { - fn drop(&mut self) { - self.task.abort(); - } - } - - #[tokio::test] - #[traced_test] - async fn basic_client_test() { - prepare_freethreaded_python(); - - const N: usize = 10; - let mut success_count = 0; - let header_key = "hello-custom-key".to_string(); - let header_value = "hello-custom-value".to_string(); - - // Initialize test server - let server = TestServer::setup(header_key.clone(), header_value.clone()).await; - - // Create counter class and handler that increments it - let (counter, handler) = Python::with_gil(|py| { - let pymod = PyModule::from_code( - py, - r" -class Counter: - def __init__(self): - self.count = 0 - - def handler(self, bytes): - if bytes.decode() == 'ping': - self.count = self.count + 1 - - def get_count(self): - return self.count - -counter = Counter()", - "", - "", - ) - .unwrap(); - - let counter = pymod.getattr("counter").unwrap().into_py(py); - let handler = counter.getattr(py, "handler").unwrap().into_py(py); - - (counter, handler) - }); - - let config = WebSocketConfig::py_new( - format!("ws://127.0.0.1:{}", server.port), - handler.clone(), - vec![(header_key, header_value)], - None, - None, - None, - ); - let client = WebSocketClient::connect(config, None, None, None) - .await - .unwrap(); - - // Send messages that increment the count - for _ in 0..N { - if client.send_bytes(b"ping".to_vec()).await.is_ok() { - success_count += 1; - }; - } - - // Check count is same as number messages sent - sleep(Duration::from_secs(1)).await; - let count_value: usize = Python::with_gil(|py| { - counter - .getattr(py, "get_count") - .unwrap() - .call0(py) - .unwrap() - .extract(py) - .unwrap() - }); - assert_eq!(count_value, success_count); - - ////////////////////////////////////////////////////////////////////// - // Close connection client should reconnect and send messages - ////////////////////////////////////////////////////////////////////// - - // close the connection - // client should reconnect automatically - client.send_close_message().await; - - // Send messages that increment the count - sleep(Duration::from_secs(2)).await; - for _ in 0..N { - if client.send_bytes(b"ping".to_vec()).await.is_ok() { - success_count += 1; - }; - } - - // Check count is same as number messages sent - sleep(Duration::from_secs(1)).await; - let count_value: usize = Python::with_gil(|py| { - counter - .getattr(py, "get_count") - .unwrap() - .call0(py) - .unwrap() - .extract(py) - .unwrap() - }); - assert_eq!(count_value, success_count); - assert_eq!(success_count, N + N); - - // Shutdown client - client.disconnect().await; - assert!(client.is_disconnected()); - } - - #[tokio::test] - #[traced_test] - async fn message_ping_test() { - prepare_freethreaded_python(); - - let header_key = "hello-custom-key".to_string(); - let header_value = "hello-custom-value".to_string(); - - let (checker, handler) = Python::with_gil(|py| { - let pymod = PyModule::from_code( - py, - r" -class Checker: - def __init__(self): - self.check = False - - def handler(self, bytes): - if bytes.decode() == 'heartbeat message': - self.check = True - - def get_check(self): - return self.check - -checker = Checker()", - "", - "", - ) - .unwrap(); - - let checker = pymod.getattr("checker").unwrap().into_py(py); - let handler = checker.getattr(py, "handler").unwrap().into_py(py); - - (checker, handler) - }); - - // Initialize test server and config - let server = TestServer::setup(header_key.clone(), header_value.clone()).await; - let config = WebSocketConfig::py_new( - format!("ws://127.0.0.1:{}", server.port), - handler.clone(), - vec![(header_key, header_value)], - Some(1), - Some("heartbeat message".to_string()), - None, - ); - let client = WebSocketClient::connect(config, None, None, None) - .await - .unwrap(); - - // Check if ping message has the correct message - sleep(Duration::from_secs(2)).await; - let check_value: bool = Python::with_gil(|py| { - checker - .getattr(py, "get_check") - .unwrap() - .call0(py) - .unwrap() - .extract(py) - .unwrap() - }); - assert!(check_value); - - // Shutdown client - client.disconnect().await; - assert!(client.is_disconnected()); - } -} From 9a5e76fd96bfca5f3f438566edf509a49c71df66 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sun, 4 Aug 2024 16:42:46 +1000 Subject: [PATCH 17/60] Fix HttpResponse pyo3 constructor --- nautilus_core/network/src/python/http.rs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/nautilus_core/network/src/python/http.rs b/nautilus_core/network/src/python/http.rs index ac97574bd55c..ee8cab7a3c85 100644 --- a/nautilus_core/network/src/python/http.rs +++ b/nautilus_core/network/src/python/http.rs @@ -19,6 +19,7 @@ use std::{ sync::Arc, }; +use bytes::Bytes; use futures_util::{stream, StreamExt}; use pyo3::{exceptions::PyException, prelude::*, types::PyBytes}; @@ -38,6 +39,15 @@ impl HttpMethod { #[pymethods] impl HttpResponse { + #[new] + pub fn py_new(status: u16, body: Vec) -> Self { + Self { + status, + headers: HashMap::new(), + body: Bytes::from(body), + } + } + #[getter] #[pyo3(name = "status")] pub fn py_status(&self) -> u16 { From 198dd9029a109994fd9ce5dcc4b1c09479af7281 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Sun, 4 Aug 2024 18:05:24 +1000 Subject: [PATCH 18/60] Fix config docstrings --- nautilus_trader/adapters/binance/config.py | 5 ++--- nautilus_trader/adapters/bybit/config.py | 4 ++-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/nautilus_trader/adapters/binance/config.py b/nautilus_trader/adapters/binance/config.py index 6387dd497f26..4f4e1da8c313 100644 --- a/nautilus_trader/adapters/binance/config.py +++ b/nautilus_trader/adapters/binance/config.py @@ -13,7 +13,6 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- - from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE from nautilus_trader.adapters.binance.common.enums import BinanceAccountType from nautilus_trader.config import LiveDataClientConfig @@ -35,8 +34,8 @@ class BinanceDataClientConfig(LiveDataClientConfig, frozen=True): `BINANCE_TESTNET_API_KEY` environment variables. api_secret : str, optional The Binance API public key. - If ``None`` then will source the `BINANCE_API_KEY` or - `BINANCE_TESTNET_API_KEY` environment variables. + If ``None`` then will source the `BINANCE_API_SECRET` or + `BINANCE_TESTNET_API_SECRET` environment variables. account_type : BinanceAccountType, default BinanceAccountType.SPOT The account type for the client. base_url_http : str, optional diff --git a/nautilus_trader/adapters/bybit/config.py b/nautilus_trader/adapters/bybit/config.py index c499d984bc0c..5af8c0883833 100644 --- a/nautilus_trader/adapters/bybit/config.py +++ b/nautilus_trader/adapters/bybit/config.py @@ -30,8 +30,8 @@ class BybitDataClientConfig(LiveDataClientConfig, frozen=True): `BYBIT_TESTNET_API_KEY` environment variables. api_secret : str, optional The Bybit API public key. - If ``None`` then will source the `BYBIT_API_KEY` or - `BYBIT_TESTNET_API_KEY` environment variables. + If ``None`` then will source the `BYBIT_API_SECRET` or + `BYBIT_TESTNET_API_SECRET` environment variables. product_types : list[BybitProductType], optional The Bybit product type for the client. If not specified then will use all products. From 0ada7e34ff381c5d6a8d161976d5dd301c561f5a Mon Sep 17 00:00:00 2001 From: Filip Macek Date: Sun, 4 Aug 2024 13:18:49 +0200 Subject: [PATCH 19/60] Implement precision checks in OrderMatchingEngine in Rust (#1826) --- nautilus_core/backtest/src/matching_engine.rs | 279 ++++++++++++++++-- nautilus_core/model/src/orders/any.rs | 30 ++ 2 files changed, 277 insertions(+), 32 deletions(-) diff --git a/nautilus_core/backtest/src/matching_engine.rs b/nautilus_core/backtest/src/matching_engine.rs index d5a9b4e8a234..da495252576b 100644 --- a/nautilus_core/backtest/src/matching_engine.rs +++ b/nautilus_core/backtest/src/matching_engine.rs @@ -218,6 +218,7 @@ impl OrderMatchingEngine { } // -- TRADING COMMANDS ---------------------------------------------------- + #[allow(clippy::needless_return)] pub fn process_order(&mut self, order: &OrderAny, account_id: AccountId) { if self.core.order_exists(order.client_order_id()) { self.generate_order_rejected(order, "Order already exists".into()); @@ -240,6 +241,7 @@ impl OrderMatchingEngine { ) .into(), ); + return; } } if let Some(expiration_ns) = self.instrument.expiration_ns() { @@ -253,9 +255,62 @@ impl OrderMatchingEngine { ) .into(), ); + return; } } } + + // Check fo valid order quantity precision + if order.quantity().precision != self.instrument.size_precision() { + self.generate_order_rejected( + order, + format!( + "Invalid order quantity precision for order {}, was {} when {} size precision is {}", + order.client_order_id(), + order.quantity().precision, + self.instrument.id(), + self.instrument.size_precision() + ) + .into(), + ); + return; + } + + // Check for valid order price precision + if let Some(price) = order.price() { + if price.precision != self.instrument.price_precision() { + self.generate_order_rejected( + order, + format!( + "Invalid order price precision for order {}, was {} when {} price precision is {}", + order.client_order_id(), + price.precision, + self.instrument.id(), + self.instrument.price_precision() + ) + .into(), + ); + } + return; + } + + // Check for valid order trigger price precision + if let Some(trigger_price) = order.trigger_price() { + if trigger_price.precision != self.instrument.price_precision() { + self.generate_order_rejected( + order, + format!( + "Invalid order trigger price precision for order {}, was {} when {} price precision is {}", + order.client_order_id(), + trigger_price.precision, + self.instrument.id(), + self.instrument.price_precision() + ) + .into(), + ); + return; + } + } } // -- ORDER PROCESSING ---------------------------------------------------- @@ -619,6 +674,7 @@ mod tests { use nautilus_common::{ cache::Cache, msgbus::{ + handler::ShareableMessageHandler, stubs::{get_message_saving_handler, MessageSavingHandler}, MessageBus, }, @@ -630,9 +686,9 @@ mod tests { identifiers::AccountId, instruments::{any::InstrumentAny, stubs::futures_contract_es}, orders::stubs::TestOrderStubs, - types::quantity::Quantity, + types::{price::Price, quantity::Quantity}, }; - use rstest::rstest; + use rstest::{fixture, rstest}; use ustr::Ustr; use crate::matching_engine::{OrderMatchingEngine, OrderMatchingEngineConfig}; @@ -640,6 +696,47 @@ mod tests { static ATOMIC_TIME: LazyLock = LazyLock::new(|| AtomicTime::new(true, UnixNanos::default())); + // -- FIXTURES --------------------------------------------------------------------------- + #[fixture] + fn msgbus() -> MessageBus { + MessageBus::default() + } + + #[fixture] + fn account_id() -> AccountId { + AccountId::from("SIM-001") + } + + #[fixture] + fn time() -> AtomicTime { + AtomicTime::new(true, UnixNanos::default()) + } + + #[fixture] + fn order_event_handler() -> ShareableMessageHandler { + get_message_saving_handler::(Ustr::from("ExecEngine.process")) + } + + // for valid es futures contract currently active + #[fixture] + fn instrument_es() -> InstrumentAny { + let activation = UnixNanos::from( + Utc.with_ymd_and_hms(2022, 4, 8, 0, 0, 0) + .unwrap() + .timestamp_nanos_opt() + .unwrap() as u64, + ); + let expiration = UnixNanos::from( + Utc.with_ymd_and_hms(2100, 7, 8, 0, 0, 0) + .unwrap() + .timestamp_nanos_opt() + .unwrap() as u64, + ); + InstrumentAny::FuturesContract(futures_contract_es(Some(activation), Some(expiration))) + } + + // -- HELPERS --------------------------------------------------------------------------- + fn get_order_matching_engine( instrument: InstrumentAny, msgbus: Rc, @@ -659,18 +756,33 @@ mod tests { ) } + fn get_order_event_handler_messages( + event_handler: ShareableMessageHandler, + ) -> Vec { + event_handler + .0 + .as_ref() + .as_any() + .downcast_ref::>() + .unwrap() + .get_messages() + } + + // -- TESTS --------------------------------------------------------------------------- #[rstest] - fn test_order_matching_engine_instrument_already_expired() { - let account_id = AccountId::from("SIM-001"); - let time = AtomicTime::new(true, UnixNanos::default()); - let mut msgbus = MessageBus::default(); + fn test_order_matching_engine_instrument_already_expired( + mut msgbus: MessageBus, + order_event_handler: ShareableMessageHandler, + account_id: AccountId, + time: AtomicTime, + ) { let instrument = InstrumentAny::FuturesContract(futures_contract_es(None, None)); // Register saving message handler to exec engine endpoint - let exec_engine_endpoint = "ExecEngine.process"; - let msg_handler = - get_message_saving_handler::(Ustr::from(exec_engine_endpoint)); - msgbus.register(exec_engine_endpoint, msg_handler.clone()); + msgbus.register( + msgbus.switchboard.exec_engine_process.as_str(), + order_event_handler.clone(), + ); // Create engine and process order let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus)); @@ -684,13 +796,7 @@ mod tests { engine.process_order(&order, account_id); // Get messages and test - let saved_messages = msg_handler - .0 - .as_ref() - .as_any() - .downcast_ref::>() - .unwrap() - .get_messages(); + let saved_messages = get_order_event_handler_messages(order_event_handler); assert_eq!(saved_messages.len(), 1); let first_message = saved_messages.first().unwrap(); assert_eq!(first_message.event_type(), OrderEventType::Rejected); @@ -701,10 +807,12 @@ mod tests { } #[rstest] - fn test_order_matching_engine_instrument_not_active() { - let account_id = AccountId::from("SIM-001"); - let time = AtomicTime::new(true, UnixNanos::default()); - let mut msgbus = MessageBus::default(); + fn test_order_matching_engine_instrument_not_active( + mut msgbus: MessageBus, + order_event_handler: ShareableMessageHandler, + account_id: AccountId, + time: AtomicTime, + ) { let activation = UnixNanos::from( Utc.with_ymd_and_hms(2222, 4, 8, 0, 0, 0) .unwrap() @@ -721,10 +829,10 @@ mod tests { InstrumentAny::FuturesContract(futures_contract_es(Some(activation), Some(expiration))); // Register saving message handler to exec engine endpoint - let exec_engine_endpoint = "ExecEngine.process"; - let msg_handler = - get_message_saving_handler::(Ustr::from(exec_engine_endpoint)); - msgbus.register(exec_engine_endpoint, msg_handler.clone()); + msgbus.register( + msgbus.switchboard.exec_engine_process.as_str(), + order_event_handler.clone(), + ); // Create engine and process order let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus)); @@ -738,13 +846,7 @@ mod tests { engine.process_order(&order, account_id); // Get messages and test - let saved_messages = msg_handler - .0 - .as_ref() - .as_any() - .downcast_ref::>() - .unwrap() - .get_messages(); + let saved_messages = get_order_event_handler_messages(order_event_handler); assert_eq!(saved_messages.len(), 1); let first_message = saved_messages.first().unwrap(); assert_eq!(first_message.event_type(), OrderEventType::Rejected); @@ -753,4 +855,117 @@ mod tests { Ustr::from("Contract ESZ1.GLBX is not yet active, activation 7960723200000000000") ); } + + #[rstest] + fn test_order_matching_engine_wrong_order_quantity_precision( + mut msgbus: MessageBus, + order_event_handler: ShareableMessageHandler, + account_id: AccountId, + time: AtomicTime, + instrument_es: InstrumentAny, + ) { + // Register saving message handler to exec engine endpoint + msgbus.register( + msgbus.switchboard.exec_engine_process.as_str(), + order_event_handler.clone(), + ); + + // Create engine and process order + let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus)); + let order = TestOrderStubs::market_order( + instrument_es.id(), + OrderSide::Buy, + Quantity::from("1.122"), // <- wrong precision for es futures contract (which is 1)x + None, + None, + ); + engine.process_order(&order, account_id); + + // Get messages and test + let saved_messages = get_order_event_handler_messages(order_event_handler); + assert_eq!(saved_messages.len(), 1); + let first_message = saved_messages.first().unwrap(); + assert_eq!(first_message.event_type(), OrderEventType::Rejected); + assert_eq!( + first_message.message().unwrap(), + Ustr::from("Invalid order quantity precision for order O-19700101-000000-001-001-1, was 3 when ESZ1.GLBX size precision is 0") + ); + } + + #[rstest] + fn test_order_matching_engine_wrong_order_price_precision( + mut msgbus: MessageBus, + order_event_handler: ShareableMessageHandler, + account_id: AccountId, + time: AtomicTime, + instrument_es: InstrumentAny, + ) { + // Register saving message handler to exec engine endpoint + msgbus.register( + msgbus.switchboard.exec_engine_process.as_str(), + order_event_handler.clone(), + ); + + // Create engine and process order + let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus)); + let limit_order = TestOrderStubs::limit_order( + instrument_es.id(), + OrderSide::Sell, + Price::from("100.12333"), // <- wrong price precision for es futures contract (which is 2) + Quantity::from("1"), + None, + None, + ); + + engine.process_order(&limit_order, account_id); + + // Get messages and test + let saved_messages = get_order_event_handler_messages(order_event_handler); + assert_eq!(saved_messages.len(), 1); + let first_message = saved_messages.first().unwrap(); + assert_eq!(first_message.event_type(), OrderEventType::Rejected); + assert_eq!( + first_message.message().unwrap(), + Ustr::from("Invalid order price precision for order O-19700101-000000-001-001-1, was 5 when ESZ1.GLBX price precision is 2") + ); + } + + #[rstest] + fn test_order_matching_engine_wrong_order_trigger_price_precision( + mut msgbus: MessageBus, + order_event_handler: ShareableMessageHandler, + account_id: AccountId, + time: AtomicTime, + instrument_es: InstrumentAny, + ) { + // Register saving message handler to exec engine endpoint + msgbus.register( + msgbus.switchboard.exec_engine_process.as_str(), + order_event_handler.clone(), + ); + + // Create engine and process order + let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus)); + let stop_order = TestOrderStubs::stop_market_order( + instrument_es.id(), + OrderSide::Sell, + Price::from("100.12333"), // <- wrong trigger price precision for es futures contract (which is 2) + Quantity::from("1"), + None, + None, + None, + ); + + engine.process_order(&stop_order, account_id); + + // Get messages and test + let saved_messages = get_order_event_handler_messages(order_event_handler); + assert_eq!(saved_messages.len(), 1); + let first_message = saved_messages.first().unwrap(); + assert_eq!(first_message.event_type(), OrderEventType::Rejected); + assert_eq!( + first_message.message().unwrap(), + Ustr::from("Invalid order trigger price precision for order O-19700101-000000-001-001-1, was 5 when ESZ1.GLBX price precision is 2") + ); + } } diff --git a/nautilus_core/model/src/orders/any.rs b/nautilus_core/model/src/orders/any.rs index ecc0d37414e1..ff532463f914 100644 --- a/nautilus_core/model/src/orders/any.rs +++ b/nautilus_core/model/src/orders/any.rs @@ -452,6 +452,36 @@ impl OrderAny { Self::TrailingStopMarket(order) => order.is_inflight(), } } + + #[must_use] + pub fn price(&self) -> Option { + match self { + Self::Limit(order) => Some(order.price), + Self::LimitIfTouched(order) => Some(order.price), + Self::Market(_) => None, + Self::MarketIfTouched(_) => None, + Self::MarketToLimit(order) => order.price, + Self::StopLimit(order) => Some(order.price), + Self::StopMarket(_) => None, + Self::TrailingStopLimit(order) => Some(order.price), + Self::TrailingStopMarket(_) => None, + } + } + + #[must_use] + pub fn trigger_price(&self) -> Option { + match self { + Self::Limit(_) => None, + Self::LimitIfTouched(order) => Some(order.trigger_price), + Self::Market(_) => None, + Self::MarketIfTouched(order) => Some(order.trigger_price), + Self::MarketToLimit(_) => None, + Self::StopLimit(order) => Some(order.trigger_price), + Self::StopMarket(order) => Some(order.trigger_price), + Self::TrailingStopLimit(order) => Some(order.trigger_price), + Self::TrailingStopMarket(order) => Some(order.trigger_price), + } + } } impl PartialEq for OrderAny { From b136940719c9cd2a8db02f1ee620405caa8e080e Mon Sep 17 00:00:00 2001 From: Filip Macek Date: Sun, 4 Aug 2024 23:08:20 +0200 Subject: [PATCH 20/60] Make instrument_id foreign key in order_events table (#1827) --- .../tests/test_cache_database_postgres.rs | 15 ++++++++++++++- .../infrastructure/tests/test_cache_postgres.rs | 8 ++++++++ schema/tables.sql | 2 +- .../test_cache_database_postgres.py | 12 ++++++++++++ 4 files changed, 35 insertions(+), 2 deletions(-) diff --git a/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs b/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs index a41b05087c73..144f963bf1bb 100644 --- a/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs +++ b/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs @@ -232,6 +232,15 @@ mod serial_tests { Some(client_order_id_2), None, ); + // add foreign key dependencies: instrument and currencies + pg_cache + .add_currency(&instrument.base_currency().unwrap()) + .unwrap(); + pg_cache.add_currency(&instrument.quote_currency()).unwrap(); + pg_cache + .add_instrument(&InstrumentAny::CurrencyPair(instrument)) + .unwrap(); + // add orders pg_cache.add_order(&market_order).unwrap(); pg_cache.add_order(&limit_order).unwrap(); wait_until( @@ -261,8 +270,12 @@ mod serial_tests { let instrument = InstrumentAny::CurrencyPair(currency_pair_ethusdt()); let account = account_id(); let mut pg_cache = get_pg_cache_database().await.unwrap(); - // Add the target currency of order + // add foreign key dependencies: instrument and currencies + pg_cache + .add_currency(&instrument.base_currency().unwrap()) + .unwrap(); pg_cache.add_currency(&instrument.quote_currency()).unwrap(); + pg_cache.add_instrument(&instrument).unwrap(); // 1. Create the order let mut market_order = TestOrderStubs::market_order( instrument.id(), diff --git a/nautilus_core/infrastructure/tests/test_cache_postgres.rs b/nautilus_core/infrastructure/tests/test_cache_postgres.rs index 1ad7dd5fc9fc..4afe1029744c 100644 --- a/nautilus_core/infrastructure/tests/test_cache_postgres.rs +++ b/nautilus_core/infrastructure/tests/test_cache_postgres.rs @@ -84,6 +84,14 @@ mod serial_tests { Some(ClientOrderId::new("O-19700101-0000-001-001-1").unwrap()), None, ); + // add foreign key dependencies: instrument and currencies + database + .add_currency(&instrument.base_currency().unwrap()) + .unwrap(); + database.add_currency(&instrument.quote_currency()).unwrap(); + database + .add_instrument(&InstrumentAny::CurrencyPair(instrument)) + .unwrap(); // insert into database and wait database.add_order(&market_order).unwrap(); wait_until( diff --git a/schema/tables.sql b/schema/tables.sql index 559e259fcce5..4825177335df 100644 --- a/schema/tables.sql +++ b/schema/tables.sql @@ -103,7 +103,7 @@ CREATE TABLE IF NOT EXISTS "order_event" ( kind TEXT NOT NULL, trader_id TEXT REFERENCES trader(id) ON DELETE CASCADE, strategy_id TEXT NOT NULL, - instrument_id TEXT NOT NULL, + instrument_id TEXT REFERENCES instrument(id) ON DELETE CASCADE, order_id TEXT DEFAULT NULL, trade_id TEXT, currency TEXT REFERENCES currency(id), diff --git a/tests/integration_tests/infrastructure/test_cache_database_postgres.py b/tests/integration_tests/infrastructure/test_cache_database_postgres.py index f9137989655e..fccc684e6905 100644 --- a/tests/integration_tests/infrastructure/test_cache_database_postgres.py +++ b/tests/integration_tests/infrastructure/test_cache_database_postgres.py @@ -355,6 +355,10 @@ async def test_add_order(self): OrderSide.BUY, Quantity.from_int(100_000), ) + # Add foreign key dependencies: instrument and currencies + self.database.add_currency(_AUDUSD_SIM.base_currency) + self.database.add_currency(_AUDUSD_SIM.quote_currency) + self.database.add_instrument(_AUDUSD_SIM) # Act self.database.add_order(order) @@ -376,6 +380,10 @@ async def test_update_order_for_closed_order(self): OrderSide.BUY, Quantity.from_int(100_000), ) + # Add foreign key dependencies: instrument and currencies + self.database.add_currency(_AUDUSD_SIM.base_currency) + self.database.add_currency(_AUDUSD_SIM.quote_currency) + self.database.add_instrument(_AUDUSD_SIM) self.database.add_order(order) @@ -412,6 +420,10 @@ async def test_update_order_for_open_order(self): Quantity.from_int(100_000), Price.from_str("1.00000"), ) + # Add foreign key dependencies: instrument and currencies + self.database.add_currency(_AUDUSD_SIM.base_currency) + self.database.add_currency(_AUDUSD_SIM.quote_currency) + self.database.add_instrument(_AUDUSD_SIM) self.database.add_order(order) # Allow MPSC thread to insert From 080e8df711ed40ea7a55f36bcf9da748e03882b6 Mon Sep 17 00:00:00 2001 From: Filip Macek Date: Mon, 5 Aug 2024 10:48:07 +0200 Subject: [PATCH 21/60] Implement load_index_order_client in Postgres cache database (#1830) --- nautilus_core/common/src/cache/database.rs | 2 +- nautilus_core/common/src/cache/mod.rs | 2 +- .../src/python/sql/cache_database.rs | 12 +- .../infrastructure/src/redis/cache.rs | 2 +- .../infrastructure/src/sql/cache_database.rs | 112 ++++++++++++------ .../infrastructure/src/sql/models/general.rs | 26 ++++ .../infrastructure/src/sql/queries.rs | 98 ++++++++++----- .../tests/test_cache_database_postgres.rs | 32 ++++- .../tests/test_cache_postgres.rs | 2 +- schema/tables.sql | 5 + 10 files changed, 219 insertions(+), 74 deletions(-) diff --git a/nautilus_core/common/src/cache/database.rs b/nautilus_core/common/src/cache/database.rs index 5c3d592bd9d7..aebdc84b871c 100644 --- a/nautilus_core/common/src/cache/database.rs +++ b/nautilus_core/common/src/cache/database.rs @@ -98,7 +98,7 @@ pub trait CacheDatabaseAdapter { fn add_account(&mut self, account: &AccountAny) -> anyhow::Result<()>; - fn add_order(&mut self, order: &OrderAny) -> anyhow::Result<()>; + fn add_order(&mut self, order: &OrderAny, client_id: Option) -> anyhow::Result<()>; fn add_position(&mut self, position: &Position) -> anyhow::Result<()>; diff --git a/nautilus_core/common/src/cache/mod.rs b/nautilus_core/common/src/cache/mod.rs index 0f5e762153b1..20fe478fbb4b 100644 --- a/nautilus_core/common/src/cache/mod.rs +++ b/nautilus_core/common/src/cache/mod.rs @@ -1390,7 +1390,7 @@ impl Cache { } if let Some(database) = &mut self.database { - database.add_order(&order)?; + database.add_order(&order, client_id)?; // TODO: Implement // if self.config.snapshot_orders { // database.snapshot_order_state(order)?; diff --git a/nautilus_core/infrastructure/src/python/sql/cache_database.rs b/nautilus_core/infrastructure/src/python/sql/cache_database.rs index 4a5fd3e613dc..086967b2bfdb 100644 --- a/nautilus_core/infrastructure/src/python/sql/cache_database.rs +++ b/nautilus_core/infrastructure/src/python/sql/cache_database.rs @@ -20,7 +20,7 @@ use nautilus_common::{cache::database::CacheDatabaseAdapter, runtime::get_runtim use nautilus_core::python::to_pyruntime_err; use nautilus_model::{ data::{bar::Bar, quote::QuoteTick, trade::TradeTick}, - identifiers::{AccountId, ClientOrderId, InstrumentId}, + identifiers::{AccountId, ClientId, ClientOrderId, InstrumentId}, python::{ account::{convert_account_any_to_pyobject, convert_pyobject_to_account_any}, instruments::{instrument_any_to_pyobject, pyobject_to_instrument_any}, @@ -127,9 +127,15 @@ impl PostgresCacheDatabase { } #[pyo3(name = "add_order")] - fn py_add_order(mut slf: PyRefMut<'_, Self>, order: PyObject, py: Python<'_>) -> PyResult<()> { + fn py_add_order( + mut slf: PyRefMut<'_, Self>, + order: PyObject, + client_id: Option, + py: Python<'_>, + ) -> PyResult<()> { let order_any = convert_pyobject_to_order_any(py, order)?; - slf.add_order(&order_any).map_err(to_pyruntime_err) + slf.add_order(&order_any, client_id) + .map_err(to_pyruntime_err) } #[pyo3(name = "update_order")] diff --git a/nautilus_core/infrastructure/src/redis/cache.rs b/nautilus_core/infrastructure/src/redis/cache.rs index 452709bb2231..a16e9a236164 100644 --- a/nautilus_core/infrastructure/src/redis/cache.rs +++ b/nautilus_core/infrastructure/src/redis/cache.rs @@ -847,7 +847,7 @@ impl CacheDatabaseAdapter for RedisCacheDatabaseAdapter { todo!() } - fn add_order(&mut self, order: &OrderAny) -> anyhow::Result<()> { + fn add_order(&mut self, order: &OrderAny, client_id: Option) -> anyhow::Result<()> { todo!() } diff --git a/nautilus_core/infrastructure/src/sql/cache_database.rs b/nautilus_core/infrastructure/src/sql/cache_database.rs index 6b366cbd67a4..ec3f83efc113 100644 --- a/nautilus_core/infrastructure/src/sql/cache_database.rs +++ b/nautilus_core/infrastructure/src/sql/cache_database.rs @@ -66,7 +66,7 @@ pub enum DatabaseQuery { Add(String, Vec), AddCurrency(Currency), AddInstrument(InstrumentAny), - AddOrder(OrderAny, bool), + AddOrder(OrderAny, Option, bool), AddAccount(AccountAny, bool), AddTrade(TradeTick), AddQuote(QuoteTick), @@ -128,47 +128,68 @@ async fn drain_buffer(pool: &PgPool, buffer: &mut VecDeque) { .unwrap() } }, - DatabaseQuery::AddOrder(order_any, updated) => match order_any { + DatabaseQuery::AddOrder(order_any, client_id, updated) => match order_any { OrderAny::Limit(order) => { - DatabaseQueries::add_order(pool, "LIMIT", updated, Box::new(order)) - .await - .unwrap() - } - OrderAny::LimitIfTouched(order) => { - DatabaseQueries::add_order(pool, "LIMIT_IF_TOUCHED", updated, Box::new(order)) + DatabaseQueries::add_order(pool, "LIMIT", updated, Box::new(order), client_id) .await .unwrap() } + OrderAny::LimitIfTouched(order) => DatabaseQueries::add_order( + pool, + "LIMIT_IF_TOUCHED", + updated, + Box::new(order), + client_id, + ) + .await + .unwrap(), OrderAny::Market(order) => { - DatabaseQueries::add_order(pool, "MARKET", updated, Box::new(order)) - .await - .unwrap() - } - OrderAny::MarketIfTouched(order) => { - DatabaseQueries::add_order(pool, "MARKET_IF_TOUCHED", updated, Box::new(order)) - .await - .unwrap() - } - OrderAny::MarketToLimit(order) => { - DatabaseQueries::add_order(pool, "MARKET_TO_LIMIT", updated, Box::new(order)) - .await - .unwrap() - } - OrderAny::StopLimit(order) => { - DatabaseQueries::add_order(pool, "STOP_LIMIT", updated, Box::new(order)) - .await - .unwrap() - } - OrderAny::StopMarket(order) => { - DatabaseQueries::add_order(pool, "STOP_MARKET", updated, Box::new(order)) + DatabaseQueries::add_order(pool, "MARKET", updated, Box::new(order), client_id) .await .unwrap() } + OrderAny::MarketIfTouched(order) => DatabaseQueries::add_order( + pool, + "MARKET_IF_TOUCHED", + updated, + Box::new(order), + client_id, + ) + .await + .unwrap(), + OrderAny::MarketToLimit(order) => DatabaseQueries::add_order( + pool, + "MARKET_TO_LIMIT", + updated, + Box::new(order), + client_id, + ) + .await + .unwrap(), + OrderAny::StopLimit(order) => DatabaseQueries::add_order( + pool, + "STOP_LIMIT", + updated, + Box::new(order), + client_id, + ) + .await + .unwrap(), + OrderAny::StopMarket(order) => DatabaseQueries::add_order( + pool, + "STOP_MARKET", + updated, + Box::new(order), + client_id, + ) + .await + .unwrap(), OrderAny::TrailingStopLimit(order) => DatabaseQueries::add_order( pool, "TRAILING_STOP_LIMIT", updated, Box::new(order), + client_id, ) .await .unwrap(), @@ -177,6 +198,7 @@ async fn drain_buffer(pool: &PgPool, buffer: &mut VecDeque) { "TRAILING_STOP_MARKET", updated, Box::new(order), + client_id, ) .await .unwrap(), @@ -434,7 +456,31 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { } fn load_index_order_client(&mut self) -> anyhow::Result> { - todo!() + let pool = self.pool.clone(); + let (tx, rx) = std::sync::mpsc::channel(); + tokio::spawn(async move { + let result = DatabaseQueries::load_distinct_order_event_client_ids(&pool).await; + match result { + Ok(currency) => { + if let Err(e) = tx.send(currency) { + log::error!("Failed to send load_index_order_client result : {:?}", e); + } + } + Err(e) => { + log::error!( + "Failed to run query load_distinct_order_event_client_ids: {:?}", + e + ); + if let Err(e) = tx.send(HashMap::new()) { + log::error!( + "Failed to send empty load_index_order_client result : {:?}", + e + ); + } + } + } + }); + Ok(rx.recv()?) } fn load_currency(&mut self, code: &Ustr) -> anyhow::Result> { @@ -599,8 +645,8 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { }) } - fn add_order(&mut self, order: &OrderAny) -> anyhow::Result<()> { - let query = DatabaseQuery::AddOrder(order.clone(), false); + fn add_order(&mut self, order: &OrderAny, client_id: Option) -> anyhow::Result<()> { + let query = DatabaseQuery::AddOrder(order.clone(), client_id, false); self.tx.send(query).map_err(|err| { anyhow::anyhow!("Failed to send query add_order to database message handler: {err}") }) @@ -772,7 +818,7 @@ impl CacheDatabaseAdapter for PostgresCacheDatabase { } fn update_order(&mut self, order: &OrderAny) -> anyhow::Result<()> { - let query = DatabaseQuery::AddOrder(order.clone(), true); + let query = DatabaseQuery::AddOrder(order.clone(), None, true); self.tx.send(query).map_err(|err| { anyhow::anyhow!("Failed to send query add_order to database message handler: {err}") }) diff --git a/nautilus_core/infrastructure/src/sql/models/general.rs b/nautilus_core/infrastructure/src/sql/models/general.rs index f74bfb90140d..7d2c94f08b1c 100644 --- a/nautilus_core/infrastructure/src/sql/models/general.rs +++ b/nautilus_core/infrastructure/src/sql/models/general.rs @@ -13,8 +13,34 @@ // limitations under the License. // ------------------------------------------------------------------------------------------------- +use nautilus_model::identifiers::{ClientId, ClientOrderId}; +use sqlx::{postgres::PgRow, Error, FromRow, Row}; + #[derive(Debug, sqlx::FromRow)] pub struct GeneralRow { pub id: String, pub value: Vec, } + +#[derive(Debug)] +pub struct OrderEventOrderClientIdCombination { + pub order_id: ClientOrderId, + pub client_id: ClientId, +} + +impl<'r> FromRow<'r, PgRow> for OrderEventOrderClientIdCombination { + fn from_row(row: &'r PgRow) -> Result { + let order_id = row + .try_get::<&str, _>("order_id") + .map(ClientOrderId::from) + .unwrap(); + let client_id = row + .try_get::<&str, _>("client_id") + .map(ClientId::from) + .unwrap(); + Ok(OrderEventOrderClientIdCombination { + order_id, + client_id, + }) + } +} diff --git a/nautilus_core/infrastructure/src/sql/queries.rs b/nautilus_core/infrastructure/src/sql/queries.rs index 0520409ce831..995c7b2224d8 100644 --- a/nautilus_core/infrastructure/src/sql/queries.rs +++ b/nautilus_core/infrastructure/src/sql/queries.rs @@ -22,7 +22,7 @@ use nautilus_model::{ account::state::AccountState, order::{OrderEvent, OrderEventAny}, }, - identifiers::{AccountId, ClientOrderId, InstrumentId}, + identifiers::{AccountId, ClientId, ClientOrderId, InstrumentId}, instruments::{any::InstrumentAny, Instrument}, orders::{any::OrderAny, base::Order}, types::{ @@ -39,7 +39,7 @@ use crate::sql::models::{ AggregationSourceModel, AggressorSideModel, AssetClassModel, BarAggregationModel, CurrencyTypeModel, PriceTypeModel, TrailingOffsetTypeModel, }, - general::GeneralRow, + general::{GeneralRow, OrderEventOrderClientIdCombination}, instruments::InstrumentAnyModel, orders::OrderEventAnyModel, types::CurrencyModel, @@ -198,6 +198,7 @@ impl DatabaseQueries { _kind: &str, updated: bool, order: Box, + client_id: Option, ) -> anyhow::Result<()> { if updated { let exists = @@ -213,55 +214,55 @@ impl DatabaseQueries { } match order.last_event().clone() { OrderEventAny::Accepted(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::CancelRejected(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Canceled(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Denied(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Emulated(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Expired(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Filled(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Initialized(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::ModifyRejected(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::PendingCancel(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::PendingUpdate(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Rejected(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Released(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Submitted(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Updated(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::Triggered(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } OrderEventAny::PartiallyFilled(event) => { - DatabaseQueries::add_order_event(pool, Box::new(event)).await + DatabaseQueries::add_order_event(pool, Box::new(event), client_id).await } } } @@ -299,9 +300,12 @@ impl DatabaseQueries { pub async fn add_order_event( pool: &PgPool, order_event: Box, + client_id: Option, ) -> anyhow::Result<()> { let mut transaction = pool.begin().await?; + // Insert trader if it does not exist + // TODO remove this when node and trader initialization is implemented sqlx::query( r#" INSERT INTO "trader" (id) VALUES ($1) ON CONFLICT (id) DO NOTHING @@ -313,28 +317,42 @@ impl DatabaseQueries { .map(|_| ()) .map_err(|err| anyhow::anyhow!("Failed to insert into trader table: {err}"))?; + // Insert client if it does not exist + // TODO remove this when client initialization is implemented + if let Some(client_id) = client_id { + sqlx::query( + r#" + INSERT INTO "client" (id) VALUES ($1) ON CONFLICT (id) DO NOTHING + "#, + ) + .bind(client_id.to_string()) + .execute(&mut *transaction) + .await + .map(|_| ()) + .map_err(|err| anyhow::anyhow!("Failed to insert into client table: {err}"))?; + } + sqlx::query(r#" INSERT INTO "order_event" ( - id, kind, order_id, order_type, order_side, trader_id, strategy_id, instrument_id, trade_id, currency, quantity, time_in_force, liquidity_side, + id, kind, order_id, order_type, order_side, trader_id, client_id, strategy_id, instrument_id, trade_id, currency, quantity, time_in_force, liquidity_side, post_only, reduce_only, quote_quantity, reconciliation, price, last_px, last_qty, trigger_price, trigger_type, limit_offset, trailing_offset, trailing_offset_type, expire_time, display_qty, emulation_trigger, trigger_instrument_id, contingency_type, order_list_id, linked_order_ids, parent_order_id, exec_algorithm_id, exec_spawn_id, venue_order_id, account_id, position_id, commission, ts_event, ts_init, created_at, updated_at ) VALUES ( $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, - $21, $22, $23, $24::trailing_offset_type, $25, $26, $27, $28, $29, $30, $31, $32, $33, $34, $35, $36, $37, $38, $39, $40, $41, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP + $21, $22, $23, $24, $25::trailing_offset_type, $26, $27, $28, $29, $30, $31, $32, $33, $34, + $35, $36, $37, $38, $39, $40, $41, $42, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP ) ON CONFLICT (id) DO UPDATE SET - kind = $2, order_id = $3, order_type = $4, order_side=$5, trader_id = $6, strategy_id = $7, instrument_id = $8, trade_id = $9, currency = $10, - quantity = $11, time_in_force = $12, liquidity_side = $13, - post_only = $14, reduce_only = $15, quote_quantity = $16, reconciliation = $17, price = $18, last_px = $19, - last_qty = $20, trigger_price = $21, trigger_type = $22, limit_offset = $23, trailing_offset = $24, - trailing_offset_type = $25, expire_time = $26, display_qty = $27, emulation_trigger = $28, trigger_instrument_id = $29, - contingency_type = $30, order_list_id = $31, linked_order_ids = $32, - parent_order_id = $33, exec_algorithm_id = $34, exec_spawn_id = $35, venue_order_id = $36, account_id = $37, position_id = $38, commission = $39, - ts_event = $40, ts_init = $41, updated_at = CURRENT_TIMESTAMP + kind = $2, order_id = $3, order_type = $4, order_side=$5, trader_id = $6, client_id = $7, strategy_id = $8, instrument_id = $9, trade_id = $10, currency = $11, + quantity = $12, time_in_force = $13, liquidity_side = $14, post_only = $15, reduce_only = $16, quote_quantity = $17, reconciliation = $18, price = $19, last_px = $20, + last_qty = $21, trigger_price = $22, trigger_type = $23, limit_offset = $24, trailing_offset = $25, trailing_offset_type = $26, expire_time = $27, display_qty = $28, + emulation_trigger = $29, trigger_instrument_id = $30, contingency_type = $31, order_list_id = $32, linked_order_ids = $33, parent_order_id = $34, exec_algorithm_id = $35, + exec_spawn_id = $36, venue_order_id = $37, account_id = $38, position_id = $39, commission = $40, ts_event = $41, ts_init = $42, updated_at = CURRENT_TIMESTAMP + "#) .bind(order_event.id().to_string()) .bind(order_event.kind()) @@ -342,6 +360,7 @@ impl DatabaseQueries { .bind(order_event.order_type().map(|x| x.to_string())) .bind(order_event.order_side().map(|x| x.to_string())) .bind(order_event.trader_id().to_string()) + .bind(client_id.map(|x| x.to_string())) .bind(order_event.strategy_id().to_string()) .bind(order_event.instrument_id().to_string()) .bind(order_event.trade_id().map(|x| x.to_string())) @@ -692,4 +711,25 @@ impl DatabaseQueries { .map(|rows| rows.into_iter().map(|row| row.0).collect()) .map_err(|err| anyhow::anyhow!("Failed to load bars: {err}")) } + + pub async fn load_distinct_order_event_client_ids( + pool: &PgPool, + ) -> anyhow::Result> { + let mut map: HashMap = HashMap::new(); + let result = sqlx::query_as::<_, OrderEventOrderClientIdCombination>( + r#" + SELECT DISTINCT + order_id AS "order_id", + client_id AS "client_id" + FROM "order_event" + "#, + ) + .fetch_all(pool) + .await + .map_err(|err| anyhow::anyhow!("Failed to load account ids: {err}"))?; + for id in result { + map.insert(id.order_id, id.client_id); + } + Ok(map) + } } diff --git a/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs b/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs index 144f963bf1bb..dd9331df91fb 100644 --- a/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs +++ b/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs @@ -31,7 +31,8 @@ mod serial_tests { enums::{CurrencyType, OrderSide, OrderStatus}, events::account::stubs::cash_account_state_million_usd, identifiers::{ - stubs::account_id, AccountId, ClientOrderId, InstrumentId, TradeId, VenueOrderId, + stubs::account_id, AccountId, ClientId, ClientOrderId, InstrumentId, TradeId, + VenueOrderId, }, instruments::{ any::InstrumentAny, @@ -212,7 +213,7 @@ mod serial_tests { } #[tokio::test(flavor = "multi_thread")] - async fn test_add_order() { + async fn test_postgres_cache_database_add_order_and_load_indexes() { let client_order_id_1 = ClientOrderId::new("O-19700101-000000-001-001-1").unwrap(); let client_order_id_2 = ClientOrderId::new("O-19700101-000000-001-001-2").unwrap(); let instrument = currency_pair_ethusdt(); @@ -240,9 +241,11 @@ mod serial_tests { pg_cache .add_instrument(&InstrumentAny::CurrencyPair(instrument)) .unwrap(); + // Set client id + let client_id = ClientId::new("TEST").unwrap(); // add orders - pg_cache.add_order(&market_order).unwrap(); - pg_cache.add_order(&limit_order).unwrap(); + pg_cache.add_order(&market_order, Some(client_id)).unwrap(); + pg_cache.add_order(&limit_order, Some(client_id)).unwrap(); wait_until( || { pg_cache @@ -260,8 +263,27 @@ mod serial_tests { .load_order(&market_order.client_order_id()) .unwrap(); let limit_order_result = pg_cache.load_order(&limit_order.client_order_id()).unwrap(); + let client_order_ids = pg_cache.load_index_order_client().unwrap(); entirely_equal(market_order_result.unwrap(), market_order); entirely_equal(limit_order_result.unwrap(), limit_order); + // Check event client order ids + assert_eq!(client_order_ids.len(), 2); + assert_eq!( + client_order_ids + .keys() + .cloned() + .collect::>(), + vec![client_order_id_1, client_order_id_2] + .into_iter() + .collect::>() + ); + assert_eq!( + client_order_ids + .values() + .cloned() + .collect::>(), + vec![client_id].into_iter().collect::>() + ); } #[tokio::test(flavor = "multi_thread")] @@ -284,7 +306,7 @@ mod serial_tests { Some(client_order_id_1), None, ); - pg_cache.add_order(&market_order).unwrap(); + pg_cache.add_order(&market_order, None).unwrap(); let submitted = TestOrderEventStubs::order_submitted(&market_order, account); market_order.apply(submitted).unwrap(); pg_cache.update_order(&market_order).unwrap(); diff --git a/nautilus_core/infrastructure/tests/test_cache_postgres.rs b/nautilus_core/infrastructure/tests/test_cache_postgres.rs index 4afe1029744c..c5304dafea70 100644 --- a/nautilus_core/infrastructure/tests/test_cache_postgres.rs +++ b/nautilus_core/infrastructure/tests/test_cache_postgres.rs @@ -93,7 +93,7 @@ mod serial_tests { .add_instrument(&InstrumentAny::CurrencyPair(instrument)) .unwrap(); // insert into database and wait - database.add_order(&market_order).unwrap(); + database.add_order(&market_order, None).unwrap(); wait_until( || { let order = database diff --git a/schema/tables.sql b/schema/tables.sql index 4825177335df..cf29e7f79079 100644 --- a/schema/tables.sql +++ b/schema/tables.sql @@ -27,6 +27,10 @@ CREATE TABLE IF NOT EXISTS "account" ( id TEXT PRIMARY KEY NOT NULL ); +CREATE TABLE IF NOT EXISTS "client" ( + id TEXT PRIMARY KEY NOT NULL +); + CREATE TABLE IF NOT EXISTS "strategy" ( id TEXT PRIMARY KEY NOT NULL, order_id_tag TEXT, @@ -105,6 +109,7 @@ CREATE TABLE IF NOT EXISTS "order_event" ( strategy_id TEXT NOT NULL, instrument_id TEXT REFERENCES instrument(id) ON DELETE CASCADE, order_id TEXT DEFAULT NULL, + client_id TEXT REFERENCES client(id) ON DELETE CASCADE, trade_id TEXT, currency TEXT REFERENCES currency(id), order_type TEXT, From 710626f5dfd1c2c9bad67f184b6dd969816caab8 Mon Sep 17 00:00:00 2001 From: faysou Date: Mon, 5 Aug 2024 10:16:05 +0100 Subject: [PATCH 22/60] Fix customdataclass and add tests (#1828) --- nautilus_trader/model/custom.py | 151 ++++++++++++--------- tests/unit_tests/model/test_custom_data.py | 64 +++++++-- 2 files changed, 143 insertions(+), 72 deletions(-) diff --git a/nautilus_trader/model/custom.py b/nautilus_trader/model/custom.py index 9afcdb499809..480becf2dc0c 100644 --- a/nautilus_trader/model/custom.py +++ b/nautilus_trader/model/custom.py @@ -13,6 +13,7 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +from dataclasses import dataclass from typing import Any import msgspec @@ -23,101 +24,123 @@ from nautilus_trader.serialization.base import register_serializable_type -def customdataclass(cls): # noqa: C901 (too complex) - if cls.__init__ is object.__init__: +def customdataclass(*args, **kwargs): # noqa: C901 (too complex) + def wrapper(cls): # noqa: C901 (too complex) + if cls.__init__ is object.__init__: - def __init__(self, ts_event: int, ts_init: int, **kwargs): - for key, value in kwargs.items(): - setattr(self, key, value) + def __init__(self, ts_event: int = 0, ts_init: int = 0, **kwargs): + for key, value in kwargs.items(): + if key in self.__class__.__annotations__: + setattr(self, key, value) + else: + raise ValueError(f"Unexpected keyword argument: {key}") - self._ts_event = ts_event - self._ts_init = ts_init + self._ts_event = ts_event + self._ts_init = ts_init - cls.__init__ = __init__ + cls.__init__ = __init__ - @property - def ts_event(self) -> int: - return self._ts_event + cls = dataclass(cls, **kwargs) - cls.ts_event = ts_event + if "ts_event" not in cls.__dict__: - @property - def ts_init(self) -> int: - return self._ts_init + @property + def ts_event(self) -> int: + return self._ts_event - cls.ts_init = ts_init + cls.ts_event = ts_event - if not hasattr(cls, "to_dict"): + if "ts_init" not in cls.__dict__: - def to_dict(self): - result = {attr: getattr(self, attr) for attr in self.__annotations__} + @property + def ts_init(self) -> int: + return self._ts_init + + cls.ts_init = ts_init + + if "to_dict" not in cls.__dict__: + + def to_dict(self) -> dict[str, Any]: + result = {attr: getattr(self, attr) for attr in self.__annotations__} + + if hasattr(self, "instrument_id"): + result["instrument_id"] = self.instrument_id.value - if hasattr(self, "instrument_id"): - result["instrument_id"] = self.instrument_id.value result["ts_event"] = self._ts_event result["ts_init"] = self._ts_init - return result + return result + + cls.to_dict = to_dict + + if "from_dict" not in cls.__dict__: - cls.to_dict = to_dict + @classmethod + def from_dict(cls, data: dict[str, Any]) -> cls: + if "instrument_id" in data: + data["instrument_id"] = InstrumentId.from_str(data["instrument_id"]) - if not hasattr(cls, "from_dict"): + return cls(**data) - @classmethod - def from_dict(cls, data: dict[str, Any]) -> cls: - if "instrument_id" in data: - data["instrument_id"] = InstrumentId.from_str(data["instrument_id"]) + cls.from_dict = from_dict - return cls(**data) + if "to_bytes" not in cls.__dict__: - cls.from_dict = from_dict + def to_bytes(self) -> bytes: + return msgspec.msgpack.encode(self.to_dict()) - if not hasattr(cls, "to_bytes"): + cls.to_bytes = to_bytes - def to_bytes(self) -> bytes: - return msgspec.msgpack.encode(self.to_dict()) + if "from_bytes" not in cls.__dict__: - cls.to_bytes = to_bytes + @classmethod + def from_bytes(cls, data: bytes) -> cls: + return cls.from_dict(msgspec.msgpack.decode(data)) - if not hasattr(cls, "from_bytes"): + cls.from_bytes = from_bytes - @classmethod - def from_bytes(cls, data: bytes) -> cls: - return cls.from_dict(msgspec.msgpack.decode(data)) + if "to_arrow" not in cls.__dict__: - cls.from_bytes = from_bytes + def to_arrow(self) -> pa.RecordBatch: + return pa.RecordBatch.from_pylist([self.to_dict()], schema=cls._schema) - if not hasattr(cls, "to_arrow"): + cls.to_arrow = to_arrow - def to_arrow(self) -> pa.RecordBatch: - return pa.RecordBatch.from_pylist([self.to_dict()], schema=cls._schema) + if "from_arrow" not in cls.__dict__: - cls.to_arrow = to_arrow + @classmethod + def from_arrow(cls, table: pa.Table) -> cls: + return [cls.from_dict(d) for d in table.to_pylist()] - if not hasattr(cls, "from_arrow"): + cls.from_arrow = from_arrow - @classmethod - def from_arrow(cls, table: pa.Table) -> cls: - return [cls.from_dict(d) for d in table.to_pylist()] + if "_schema" not in cls.__dict__: + type_mapping = { + "InstrumentId": pa.string(), + "str": pa.string(), + "bool": pa.bool_(), + "float": pa.float64(), + "int": pa.int64(), + "bytes": pa.binary(), + } - cls.from_arrow = from_arrow + cls._schema = pa.schema( + { + attr: type_mapping[cls.__annotations__[attr].__name__] + for attr in cls.__annotations__ + } + | { + "ts_event": pa.int64(), + "ts_init": pa.int64(), + }, + ) - if not hasattr(cls, "_schema"): - type_mapping = { - "InstrumentId": pa.string(), - "bool": pa.bool_(), - "float": pa.float64(), - "int": pa.int64(), - } + register_serializable_type(cls, cls.to_dict, cls.from_dict) + register_arrow(cls, cls._schema, cls.to_arrow, cls.from_arrow) - cls._schema = pa.schema( - { - attr: type_mapping[cls.__annotations__[attr].__name__] - for attr in cls.__annotations__ - }, - ) + return cls - register_serializable_type(cls, cls.to_dict, cls.from_dict) - register_arrow(cls, cls._schema, cls.to_arrow, cls.from_arrow) + if args and callable(args[0]): + return wrapper(args[0]) - return cls + return wrapper diff --git a/tests/unit_tests/model/test_custom_data.py b/tests/unit_tests/model/test_custom_data.py index 2945b78ea821..30b21371cf70 100644 --- a/tests/unit_tests/model/test_custom_data.py +++ b/tests/unit_tests/model/test_custom_data.py @@ -13,6 +13,7 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- + from nautilus_trader.core.data import Data from nautilus_trader.model.custom import customdataclass from nautilus_trader.model.identifiers import InstrumentId @@ -24,21 +25,21 @@ class GreeksTestData(Data): delta: float = 0.0 def __repr__(self): - return f"{self(type).__name__}(instrument_id={self.instrument_id}, delta={self.delta:.2f}, ts_event={self.ts_event}, ts_init={self._ts_init})" + return f"GreeksTestData(instrument_id={self.instrument_id}, delta={self.delta:.2f}, ts_event={self.ts_event}, ts_init={self._ts_init})" def test_customdata_decorator_properties() -> None: # Arrange, Act - data = GreeksTestData(ts_event=2, ts_init=1) + data = GreeksTestData(ts_event=1, ts_init=2) # Assert - assert data.ts_event == 2 - assert data.ts_init == 1 + assert data.ts_event == 1 + assert data.ts_init == 2 def test_customdata_decorator_dict() -> None: # Arrange - data = GreeksTestData(ts_event=2, ts_init=1) + data = GreeksTestData(1, 2) # Act data_dict = data.to_dict() @@ -47,7 +48,54 @@ def test_customdata_decorator_dict() -> None: assert data_dict == { "instrument_id": "ES.GLBX", "delta": 0.0, - "ts_event": 2, - "ts_init": 1, + "ts_event": 1, + "ts_init": 2, } - # assert GreeksTestData.from_dict(data_dict) == data + + +def test_customdata_decorator_dict_identity() -> None: + # Arrange + data = GreeksTestData( + ts_event=1, + ts_init=2, + instrument_id=InstrumentId.from_str("CL.GLBX"), + delta=1000.0, + ) + + # Act + new_data = GreeksTestData.from_dict(data.to_dict()) + + # Assert + assert new_data == data + + +def test_customdata_decorator_bytes_identity() -> None: + # Arrange + data = GreeksTestData( + ts_event=1, + ts_init=2, + instrument_id=InstrumentId.from_str("CL.GLBX"), + delta=1000.0, + ) + + # Act + new_data = GreeksTestData.from_bytes(data.to_bytes()) + + # Assert + assert new_data == data + + +def test_customdata_decorator_arrow_identity() -> None: + # Arrange + data = GreeksTestData( + ts_event=1, + ts_init=2, + instrument_id=InstrumentId.from_str("CL.GLBX"), + delta=1000.0, + ) + + # Act + new_data = GreeksTestData.from_arrow(data.to_arrow())[0] + + # Assert + assert new_data == data From fbba4dd9d7a9177e87fbc827dd76c87233963b77 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Mon, 5 Aug 2024 20:59:39 +1000 Subject: [PATCH 23/60] Add customdataclass docs --- docs/concepts/advanced/custom_data.md | 28 +++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/docs/concepts/advanced/custom_data.md b/docs/concepts/advanced/custom_data.md index ed307bad736a..5a5c0dde434d 100644 --- a/docs/concepts/advanced/custom_data.md +++ b/docs/concepts/advanced/custom_data.md @@ -229,3 +229,31 @@ catalog = ParquetDataCatalog('.') catalog.write_data([GreeksData()]) ``` + +## Creating a custom data class automatically + +The `@customdataclass` decorator enables the creation of a custom data class with default +implementations for all the features described above. + +Each method can also be overridden if needed. Here is an example of its usage: + +```python +from nautilus_trader.model.custom import customdataclass + + +@customdataclass +class GreeksTestData(Data): + instrument_id: InstrumentId = InstrumentId.from_str("ES.GLBX") + delta: float = 0.0 + + def __repr__(self): + return (f"GreeksData(instrument_id={self.instrument_id}, delta={self.delta:.2f}, ts_event={unix_nanos_to_str(self._ts_event)}, ts_init={unix_nanos_to_str(self._ts_init)})") + + +GreeksTestData( + instrument_id=InstrumentId.from_str("CL.GLBX"), + delta=1000.0, + ts_event=1, + ts_init=2, +) +``` From de290ae8661f7c674ea3ad080f05db46dbafc191 Mon Sep 17 00:00:00 2001 From: Filip Macek Date: Tue, 6 Aug 2024 13:29:58 +0200 Subject: [PATCH 24/60] Check short selling Equity without MarginAccount in OrderMatchingEngine (#1831) --- nautilus_core/backtest/src/matching_engine.rs | 147 +++++++++++++++++- nautilus_core/model/src/orders/any.rs | 20 ++- 2 files changed, 158 insertions(+), 9 deletions(-) diff --git a/nautilus_core/backtest/src/matching_engine.rs b/nautilus_core/backtest/src/matching_engine.rs index da495252576b..8ac7c4c2445a 100644 --- a/nautilus_core/backtest/src/matching_engine.rs +++ b/nautilus_core/backtest/src/matching_engine.rs @@ -29,7 +29,7 @@ use nautilus_model::{ bar::{Bar, BarType}, delta::OrderBookDelta, }, - enums::{AccountType, BookType, LiquiditySide, MarketStatus, OmsType}, + enums::{AccountType, BookType, LiquiditySide, MarketStatus, OmsType, OrderSide, OrderType}, events::order::{ OrderAccepted, OrderCancelRejected, OrderCanceled, OrderEventAny, OrderExpired, OrderFilled, OrderModifyRejected, OrderRejected, OrderTriggered, OrderUpdated, @@ -45,6 +45,7 @@ use nautilus_model::{ trailing_stop_limit::TrailingStopLimitOrder, trailing_stop_market::TrailingStopMarketOrder, }, + position::Position, types::{currency::Currency, money::Money, price::Price, quantity::Quantity}, }; use ustr::Ustr; @@ -311,6 +312,89 @@ impl OrderMatchingEngine { return; } } + + // Get position if exists + let position: Option<&Position> = self + .cache + .position_for_order(&order.client_order_id()) + .or_else(|| { + if self.oms_type == OmsType::Netting { + let position_id = PositionId::new( + format!("{}-{}", order.instrument_id(), order.strategy_id()).as_str(), + ) + .unwrap(); + self.cache.position(&position_id) + } else { + None + } + }); + + // Check not shorting an equity without a MARGIN account + if order.order_side() == OrderSide::Sell + && self.account_type != AccountType::Margin + && matches!(self.instrument, InstrumentAny::Equity(_)) + && (position.is_none() + || !order.would_reduce_only(position.unwrap().side, position.unwrap().quantity)) + { + let position_string = position.map_or("None".to_string(), |pos| pos.id.to_string()); + self.generate_order_rejected( + order, + format!( + "Short selling not permitted on a CASH account with position {} and order {}", + position_string, order, + ) + .into(), + ); + return; + } + + match order.order_type() { + OrderType::Market => self.process_market_order(order), + OrderType::Limit => self.process_limit_order(order), + OrderType::MarketToLimit => self.process_market_to_limit_order(order), + OrderType::StopMarket => self.process_stop_market_order(order), + OrderType::StopLimit => self.process_stop_limit_order(order), + OrderType::MarketIfTouched => self.process_market_if_touched_order(order), + OrderType::LimitIfTouched => self.process_limit_if_touched_order(order), + OrderType::TrailingStopMarket => self.process_trailing_stop_market_order(order), + OrderType::TrailingStopLimit => self.process_trailing_stop_limit_order(order), + } + } + + fn process_market_order(&mut self, order: &OrderAny) { + todo!("process_market_order") + } + + fn process_limit_order(&mut self, order: &OrderAny) { + todo!("process_limit_order") + } + + fn process_market_to_limit_order(&mut self, order: &OrderAny) { + todo!("process_market_to_limit_order") + } + + fn process_stop_market_order(&mut self, order: &OrderAny) { + todo!("process_stop_market_order") + } + + fn process_stop_limit_order(&mut self, order: &OrderAny) { + todo!("process_stop_limit_order") + } + + fn process_market_if_touched_order(&mut self, order: &OrderAny) { + todo!("process_market_if_touched_order") + } + + fn process_limit_if_touched_order(&mut self, order: &OrderAny) { + todo!("process_limit_if_touched_order") + } + + fn process_trailing_stop_market_order(&mut self, order: &OrderAny) { + todo!("process_trailing_stop_market_order") + } + + fn process_trailing_stop_limit_order(&mut self, order: &OrderAny) { + todo!("process_trailing_stop_limit_order") } // -- ORDER PROCESSING ---------------------------------------------------- @@ -684,7 +768,11 @@ mod tests { enums::{AccountType, BookType, OmsType, OrderSide}, events::order::{OrderEventAny, OrderEventType}, identifiers::AccountId, - instruments::{any::InstrumentAny, stubs::futures_contract_es}, + instruments::{ + any::InstrumentAny, + equity::Equity, + stubs::{futures_contract_es, *}, + }, orders::stubs::TestOrderStubs, types::{price::Price, quantity::Quantity}, }; @@ -740,6 +828,7 @@ mod tests { fn get_order_matching_engine( instrument: InstrumentAny, msgbus: Rc, + account_type: Option, ) -> OrderMatchingEngine { let cache = Rc::new(Cache::default()); let config = OrderMatchingEngineConfig::default(); @@ -748,7 +837,7 @@ mod tests { 1, BookType::L1_MBP, OmsType::Netting, - AccountType::Cash, + account_type.unwrap_or(AccountType::Cash), &ATOMIC_TIME, msgbus, cache, @@ -785,7 +874,7 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus)); + let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None); let order = TestOrderStubs::market_order( instrument.id(), OrderSide::Buy, @@ -835,7 +924,7 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus)); + let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None); let order = TestOrderStubs::market_order( instrument.id(), OrderSide::Buy, @@ -871,7 +960,7 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus)); + let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None); let order = TestOrderStubs::market_order( instrument_es.id(), OrderSide::Buy, @@ -907,7 +996,7 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus)); + let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None); let limit_order = TestOrderStubs::limit_order( instrument_es.id(), OrderSide::Sell, @@ -945,7 +1034,7 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus)); + let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None); let stop_order = TestOrderStubs::stop_market_order( instrument_es.id(), OrderSide::Sell, @@ -968,4 +1057,46 @@ mod tests { Ustr::from("Invalid order trigger price precision for order O-19700101-000000-001-001-1, was 5 when ESZ1.GLBX price precision is 2") ); } + + #[rstest] + fn test_order_matching_engine_error_shorting_equity_without_margin_account( + mut msgbus: MessageBus, + order_event_handler: ShareableMessageHandler, + account_id: AccountId, + time: AtomicTime, + equity_aapl: Equity, + ) { + let instrument = InstrumentAny::Equity(equity_aapl); + // Register saving message handler to exec engine endpoint + msgbus.register( + msgbus.switchboard.exec_engine_process.as_str(), + order_event_handler.clone(), + ); + + // Create engine and process order + let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None); + let order = TestOrderStubs::market_order( + instrument.id(), + OrderSide::Sell, + Quantity::from("1"), + None, + None, + ); + + engine.process_order(&order, account_id); + + // Get messages and test + let saved_messages = get_order_event_handler_messages(order_event_handler); + assert_eq!(saved_messages.len(), 1); + let first_message = saved_messages.first().unwrap(); + assert_eq!(first_message.event_type(), OrderEventType::Rejected); + assert_eq!( + first_message.message().unwrap(), + Ustr::from( + "Short selling not permitted on a CASH account with position None and order \ + MarketOrder(SELL 1 AAPL.XNAS @ MARKET GTC, status=INITIALIZED, client_order_id=O-19700101-000000-001-001-1, \ + venue_order_id=None, position_id=None, exec_algorithm_id=None, \ + exec_spawn_id=None, tags=None)") + ); + } } diff --git a/nautilus_core/model/src/orders/any.rs b/nautilus_core/model/src/orders/any.rs index ff532463f914..f18c2ff8ea9c 100644 --- a/nautilus_core/model/src/orders/any.rs +++ b/nautilus_core/model/src/orders/any.rs @@ -31,7 +31,10 @@ use super::{ trailing_stop_market::TrailingStopMarketOrder, }; use crate::{ - enums::{LiquiditySide, OrderSide, OrderSideSpecified, OrderStatus, OrderType, TriggerType}, + enums::{ + LiquiditySide, OrderSide, OrderSideSpecified, OrderStatus, OrderType, PositionSide, + TriggerType, + }, events::order::OrderEventAny, identifiers::{ AccountId, ClientOrderId, ExecAlgorithmId, InstrumentId, PositionId, StrategyId, TraderId, @@ -482,6 +485,21 @@ impl OrderAny { Self::TrailingStopMarket(order) => Some(order.trigger_price), } } + + #[must_use] + pub fn would_reduce_only(&self, side: PositionSide, position_qty: Quantity) -> bool { + match self { + Self::Limit(order) => order.would_reduce_only(side, position_qty), + Self::Market(order) => order.would_reduce_only(side, position_qty), + Self::MarketToLimit(order) => order.would_reduce_only(side, position_qty), + Self::LimitIfTouched(order) => order.would_reduce_only(side, position_qty), + Self::MarketIfTouched(order) => order.would_reduce_only(side, position_qty), + Self::StopLimit(order) => order.would_reduce_only(side, position_qty), + Self::StopMarket(order) => order.would_reduce_only(side, position_qty), + Self::TrailingStopLimit(order) => order.would_reduce_only(side, position_qty), + Self::TrailingStopMarket(order) => order.would_reduce_only(side, position_qty), + } + } } impl PartialEq for OrderAny { From fcfc7f9e5f9fe32aebd4fd3790a91ac924b1d28d Mon Sep 17 00:00:00 2001 From: faysou Date: Wed, 7 Aug 2024 07:22:03 +0100 Subject: [PATCH 25/60] Fix creation of instrumend_id folder when writing PyO3 bars in catalog (#1832) --- nautilus_trader/persistence/catalog/parquet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautilus_trader/persistence/catalog/parquet.py b/nautilus_trader/persistence/catalog/parquet.py index ef4a3667e05f..ddf3ad747756 100644 --- a/nautilus_trader/persistence/catalog/parquet.py +++ b/nautilus_trader/persistence/catalog/parquet.py @@ -334,7 +334,7 @@ def key(obj: Any) -> tuple[str, str | None]: name = type(obj).__name__ if isinstance(obj, Instrument): return name, obj.id.value - elif isinstance(obj, Bar): + elif hasattr(obj, "bar_type"): return name, str(obj.bar_type) elif hasattr(obj, "instrument_id"): return name, obj.instrument_id.value From 79ff3bb9028c9210628b758775a6d7a7977655d8 Mon Sep 17 00:00:00 2001 From: faysou Date: Wed, 7 Aug 2024 07:23:36 +0100 Subject: [PATCH 26/60] Fix handling include_types option in StreamingFeatherWriter (#1833) --- nautilus_trader/persistence/writer.py | 5 ++ .../adapters/betfair/test_kit.py | 2 + .../unit_tests/persistence/test_streaming.py | 52 +++++++++++++++++++ 3 files changed, 59 insertions(+) diff --git a/nautilus_trader/persistence/writer.py b/nautilus_trader/persistence/writer.py index 564e9998942b..0119646da09f 100644 --- a/nautilus_trader/persistence/writer.py +++ b/nautilus_trader/persistence/writer.py @@ -209,6 +209,11 @@ def write(self, obj: object) -> None: # noqa: C901 PyCondition.not_none(obj, "obj") cls = obj.__class__ + + # Check if an include types filter has been specified + if self.include_types is not None and cls not in self.include_types: + return + if isinstance(obj, CustomData): cls = obj.data_type.type elif isinstance(obj, Instrument): diff --git a/tests/integration_tests/adapters/betfair/test_kit.py b/tests/integration_tests/adapters/betfair/test_kit.py index 6065a2293f7f..e2d3168970d3 100644 --- a/tests/integration_tests/adapters/betfair/test_kit.py +++ b/tests/integration_tests/adapters/betfair/test_kit.py @@ -206,11 +206,13 @@ def streaming_config( catalog_path: str, catalog_fs_protocol: str = "memory", flush_interval_ms: int | None = None, + include_types: list[type] | None = None, ) -> StreamingConfig: return StreamingConfig( catalog_path=catalog_path, fs_protocol=catalog_fs_protocol, flush_interval_ms=flush_interval_ms, + include_types=include_types, ) @staticmethod diff --git a/tests/unit_tests/persistence/test_streaming.py b/tests/unit_tests/persistence/test_streaming.py index 3bed56ecbe57..794cc090aaac 100644 --- a/tests/unit_tests/persistence/test_streaming.py +++ b/tests/unit_tests/persistence/test_streaming.py @@ -139,6 +139,58 @@ def test_feather_writer_custom_data( result = Counter([r.__class__.__name__ for r in result]) # type: ignore assert result["NewsEventData"] == 86985 # type: ignore + def test_feather_writer_include_types( + self, + catalog_betfair: ParquetDataCatalog, + ) -> None: + # Arrange + self.catalog = catalog_betfair + TestPersistenceStubs.setup_news_event_persistence() + + # Load news events into catalog + news_events = TestPersistenceStubs.news_events() + self.catalog.write_data(news_events) + + data_config = BacktestDataConfig( + catalog_path=self.catalog.path, + catalog_fs_protocol="file", + data_cls=NewsEventData.fully_qualified_name(), + client_id="NewsClient", + ) + + # Add some arbitrary instrument data to appease BacktestEngine + instrument_data_config = BacktestDataConfig( + catalog_path=self.catalog.path, + catalog_fs_protocol="file", + data_cls=InstrumentStatus.fully_qualified_name(), + ) + + streaming = BetfairTestStubs.streaming_config( + catalog_path=self.catalog.path, + catalog_fs_protocol="file", + include_types=[NewsEventData], + ) + + run_config = BacktestRunConfig( + engine=BacktestEngineConfig(streaming=streaming), + data=[data_config, instrument_data_config], + venues=[BetfairTestStubs.betfair_venue_config(book_type="L1_MBP")], + ) + + # Act + node = BacktestNode(configs=[run_config]) + r = node.run() + + # Assert + result = self.catalog.read_backtest( + instance_id=r[0].instance_id, + raise_on_failed_deserialize=True, + ) + + result = Counter([r.__class__.__name__ for r in result]) # type: ignore + assert result["NewsEventData"] == 86985 # type: ignore + assert len(result) == 1 + def test_feather_writer_signal_data( self, catalog_betfair: ParquetDataCatalog, From 44f734533ff0bce1e10c382e31f29fb9820315f2 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Wed, 7 Aug 2024 16:44:18 +1000 Subject: [PATCH 27/60] Update dependencies --- nautilus_core/Cargo.lock | 93 +++-- nautilus_core/Cargo.toml | 2 +- nautilus_core/common/Cargo.toml | 2 +- nautilus_core/model/Cargo.toml | 2 +- poetry.lock | 611 +++++++++++++++++--------------- pyproject.toml | 12 +- 6 files changed, 401 insertions(+), 321 deletions(-) diff --git a/nautilus_core/Cargo.lock b/nautilus_core/Cargo.lock index 5332abeac217..7dc5e92d36dc 100644 --- a/nautilus_core/Cargo.lock +++ b/nautilus_core/Cargo.lock @@ -2138,9 +2138,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ab92f4f49ee4fb4f997c784b7a2e0fa70050211e0b6a287f898c3c9785ca956" +checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9" dependencies = [ "bytes", "futures-channel", @@ -2173,7 +2173,7 @@ dependencies = [ "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows-core", + "windows-core 0.52.0", ] [[package]] @@ -3039,9 +3039,9 @@ dependencies = [ [[package]] name = "object" -version = "0.36.2" +version = "0.36.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f203fa8daa7bb185f760ae12bd8e097f63d17041dcdcaf675ac54cdf863170e" +checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9" dependencies = [ "memchr", ] @@ -3156,9 +3156,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "papergrid" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ad43c07024ef767f9160710b3a6773976194758c7919b17e63b863db0bdf7fb" +checksum = "c7419ad52a7de9b60d33e11085a0fe3df1fbd5926aa3f93d3dd53afbc9e86725" dependencies = [ "bytecount", "fnv", @@ -4067,9 +4067,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" +checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" [[package]] name = "rustls-webpki" @@ -4698,15 +4698,14 @@ checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" [[package]] name = "sysinfo" -version = "0.30.13" +version = "0.31.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a5b4ddaee55fb2bea2bf0e5000747e5f5c0de765e5a5ff87f4cd106439f4bb3" +checksum = "d4115055da5f572fff541dd0c4e61b0262977f453cc9fe04be83aba25a89bdab" dependencies = [ - "cfg-if", "core-foundation-sys", "libc", + "memchr", "ntapi", - "once_cell", "rayon", "windows", ] @@ -4734,20 +4733,19 @@ dependencies = [ [[package]] name = "tabled" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c998b0c8b921495196a48aabaf1901ff28be0760136e31604f7967b0792050e" +checksum = "77c9303ee60b9bedf722012ea29ae3711ba13a67c9b9ae28993838b63057cb1b" dependencies = [ "papergrid", "tabled_derive", - "unicode-width", ] [[package]] name = "tabled_derive" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c138f99377e5d653a371cdad263615634cfc8467685dfe8e73e2b8e98f44b17" +checksum = "bf0fb8bfdc709786c154e24a66777493fb63ae97e3036d914c8666774c477069" dependencies = [ "heck 0.4.1", "proc-macro-error", @@ -4770,15 +4768,15 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "tempfile" -version = "3.11.0" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53" +checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" dependencies = [ "cfg-if", "fastrand", "once_cell", "rustix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -5258,9 +5256,9 @@ checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" [[package]] name = "unicode-width" -version = "0.1.13" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "unicode_categories" @@ -5521,11 +5519,11 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.52.0" +version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" +checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" dependencies = [ - "windows-core", + "windows-core 0.57.0", "windows-targets 0.52.6", ] @@ -5538,6 +5536,49 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-core" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-result", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-implement" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.72", +] + +[[package]] +name = "windows-interface" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.72", +] + +[[package]] +name = "windows-result" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-sys" version = "0.48.0" diff --git a/nautilus_core/Cargo.toml b/nautilus_core/Cargo.toml index 47de484a7b43..c36ef95dbd05 100644 --- a/nautilus_core/Cargo.toml +++ b/nautilus_core/Cargo.toml @@ -59,7 +59,7 @@ float-cmp = "0.9.0" iai = "0.1.1" pretty_assertions = "1.4.0" rstest = "0.21.0" -tempfile = "3.11.0" +tempfile = "3.12.0" # build-dependencies cbindgen = "0.26.0" diff --git a/nautilus_core/common/Cargo.toml b/nautilus_core/common/Cargo.toml index 8186fc4e196a..453efc32ac27 100644 --- a/nautilus_core/common/Cargo.toml +++ b/nautilus_core/common/Cargo.toml @@ -27,7 +27,7 @@ rust_decimal_macros = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } strum = { workspace = true } -sysinfo = "0.30.13" +sysinfo = "0.31.2" tokio = { workspace = true } # Disable default feature "tracing-log" since it interferes with custom logging tracing-subscriber = { version = "0.3.18", default-features = false, features = ["smallvec", "fmt", "ansi", "std", "env-filter"] } diff --git a/nautilus_core/model/Cargo.toml b/nautilus_core/model/Cargo.toml index e127c11c99a2..ce969df7fb0f 100644 --- a/nautilus_core/model/Cargo.toml +++ b/nautilus_core/model/Cargo.toml @@ -28,7 +28,7 @@ thiserror = { workspace = true } thousands = { workspace = true } ustr = { workspace = true } evalexpr = "11.3.0" -tabled = "0.15.0" +tabled = "0.16.0" [dev-dependencies] criterion = { workspace = true } diff --git a/poetry.lock b/poetry.lock index 37d961a98cba..60b1a9c5e040 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,87 +13,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.10.0" +version = "3.10.1" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:68ab608118e212f56feef44d4785aa90b713042da301f26338f36497b481cd79"}, - {file = "aiohttp-3.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:64a117c16273ca9f18670f33fc7fd9604b9f46ddb453ce948262889a6be72868"}, - {file = "aiohttp-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:54076a25f32305e585a3abae1f0ad10646bec539e0e5ebcc62b54ee4982ec29f"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71c76685773444d90ae83874433505ed800e1706c391fdf9e57cc7857611e2f4"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdda86ab376f9b3095a1079a16fbe44acb9ddde349634f1c9909d13631ff3bcf"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d6dcd1d21da5ae1416f69aa03e883a51e84b6c803b8618cbab341ac89a85b9e"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ef0135d7ab7fb0284342fbbf8e8ddf73b7fee8ecc55f5c3a3d0a6b765e6d8b"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccab9381f38c669bb9254d848f3b41a3284193b3e274a34687822f98412097e9"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:947da3aee057010bc750b7b4bb65cbd01b0bdb7c4e1cf278489a1d4a1e9596b3"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5268b35fee7eb754fb5b3d0f16a84a2e9ed21306f5377f3818596214ad2d7714"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ff25d988fd6ce433b5c393094a5ca50df568bdccf90a8b340900e24e0d5fb45c"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:594b4b4f1dfe8378b4a0342576dc87a930c960641159f5ae83843834016dbd59"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c8820dad615cd2f296ed3fdea8402b12663ac9e5ea2aafc90ef5141eb10b50b8"}, - {file = "aiohttp-3.10.0-cp310-cp310-win32.whl", hash = "sha256:ab1d870403817c9a0486ca56ccbc0ebaf85d992277d48777faa5a95e40e5bcca"}, - {file = "aiohttp-3.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:563705a94ea3af43467167f3a21c665f3b847b2a0ae5544fa9e18df686a660da"}, - {file = "aiohttp-3.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13679e11937d3f37600860de1f848e2e062e2b396d3aa79b38c89f9c8ab7e791"}, - {file = "aiohttp-3.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c66a1aadafbc0bd7d648cb7fcb3860ec9beb1b436ce3357036a4d9284fcef9a"}, - {file = "aiohttp-3.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7e3545b06aae925f90f06402e05cfb9c62c6409ce57041932163b09c48daad6"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:effafe5144aa32f0388e8f99b1b2692cf094ea2f6b7ceca384b54338b77b1f50"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a04f2c8d41821a2507b49b2694c40495a295b013afb0cc7355b337980b47c546"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dbfac556219d884d50edc6e1952a93545c2786193f00f5521ec0d9d464040ab"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a65472256c5232681968deeea3cd5453aa091c44e8db09f22f1a1491d422c2d9"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941366a554e566efdd3f042e17a9e461a36202469e5fd2aee66fe3efe6412aef"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:927b4aca6340301e7d8bb05278d0b6585b8633ea852b7022d604a5df920486bf"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:34adb8412e736a5d0df6d1fccdf71599dfb07a63add241a94a189b6364e997f1"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:43c60d9b332a01ee985f080f639f3e56abcfb95ec1320013c94083c3b6a2e143"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3f49edf7c5cd2987634116e1b6a0ee2438fca17f7c4ee480ff41decb76cf6158"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9784246431eaf9d651b3cc06f9c64f9a9f57299f4971c5ea778fa0b81074ef13"}, - {file = "aiohttp-3.10.0-cp311-cp311-win32.whl", hash = "sha256:bec91402df78b897a47b66b9c071f48051cea68d853d8bc1d4404896c6de41ae"}, - {file = "aiohttp-3.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:25a9924343bf91b0c5082cae32cfc5a1f8787ac0433966319ec07b0ed4570722"}, - {file = "aiohttp-3.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:21dab4a704c68dc7bc2a1219a4027158e8968e2079f1444eda2ba88bc9f2895f"}, - {file = "aiohttp-3.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:872c0dcaccebd5733d535868fe2356aa6939f5827dcea7a8b9355bb2eff6f56e"}, - {file = "aiohttp-3.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f381424dbce313bb5a666a215e7a9dcebbc533e9a2c467a1f0c95279d24d1fa7"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ca48e9f092a417c6669ee8d3a19d40b3c66dde1a2ae0d57e66c34812819b671"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbe2f6d0466f5c59c7258e0745c20d74806a1385fbb7963e5bbe2309a11cc69b"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:03799a95402a7ed62671c4465e1eae51d749d5439dbc49edb6eee52ea165c50b"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5549c71c35b5f057a4eebcc538c41299826f7813f28880722b60e41c861a57ec"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6fa7a42b78d8698491dc4ad388169de54cca551aa9900f750547372de396277"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:77bbf0a2f6fefac6c0db1792c234f577d80299a33ce7125467439097cf869198"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:34eaf5cfcc979846d73571b1a4be22cad5e029d55cdbe77cdc7545caa4dcb925"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4f1de31a585344a106db43a9c3af2e15bb82e053618ff759f1fdd31d82da38eb"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3a1ea61d96146e9b9e5597069466e2e4d9e01e09381c5dd51659f890d5e29e7"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:73c01201219eb039a828bb58dcc13112eec2fed6eea718356316cd552df26e04"}, - {file = "aiohttp-3.10.0-cp312-cp312-win32.whl", hash = "sha256:33e915971eee6d2056d15470a1214e4e0f72b6aad10225548a7ab4c4f54e2db7"}, - {file = "aiohttp-3.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:2dc75da06c35a7b47a88ceadbf993a53d77d66423c2a78de8c6f9fb41ec35687"}, - {file = "aiohttp-3.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f1bc4d68b83966012813598fe39b35b4e6019b69d29385cf7ec1cb08e1ff829b"}, - {file = "aiohttp-3.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9b8b31c057a0b7bb822a159c490af05cb11b8069097f3236746a78315998afa"}, - {file = "aiohttp-3.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10f0d7894ddc6ff8f369e3fdc082ef1f940dc1f5b9003cd40945d24845477220"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72de8ffba4a27e3c6e83e58a379fc4fe5548f69f9b541fde895afb9be8c31658"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd36d0f0afc2bd84f007cedd2d9a449c3cf04af471853a25eb71f28bc2e1a119"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f64d503c661864866c09806ac360b95457f872d639ca61719115a9f389b2ec90"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31616121369bc823791056c632f544c6c8f8d1ceecffd8bf3f72ef621eaabf49"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f76c12abb88b7ee64b3f9ae72f0644af49ff139067b5add142836dab405d60d4"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6c99eef30a7e98144bcf44d615bc0f445b3a3730495fcc16124cb61117e1f81e"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:39e7ec718e7a1971a5d98357e3e8c0529477d45c711d32cd91999dc8d8404e1e"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1cef548ee4e84264b78879de0c754bbe223193c6313beb242ce862f82eab184"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f98f036eab11d2f90cdd01b9d1410de9d7eb520d070debeb2edadf158b758431"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc4376ff537f7d2c1e98f97f6d548e99e5d96078b0333c1d3177c11467b972de"}, - {file = "aiohttp-3.10.0-cp38-cp38-win32.whl", hash = "sha256:ebedc51ee6d39f9ea5e26e255fd56a7f4e79a56e77d960f9bae75ef4f95ed57f"}, - {file = "aiohttp-3.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:aad87626f31a85fd4af02ba7fd6cc424b39d4bff5c8677e612882649da572e47"}, - {file = "aiohttp-3.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1dc95c5e2a5e60095f1bb51822e3b504e6a7430c9b44bff2120c29bb876c5202"}, - {file = "aiohttp-3.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c83977f7b6f4f4a96fab500f5a76d355f19f42675224a3002d375b3fb309174"}, - {file = "aiohttp-3.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8cedc48d36652dd3ac40e5c7c139d528202393e341a5e3475acedb5e8d5c4c75"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b099fbb823efed3c1d736f343ac60d66531b13680ee9b2669e368280f41c2b8"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d583755ddb9c97a2da1322f17fc7d26792f4e035f472d675e2761c766f94c2ff"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a03a4407bdb9ae815f0d5a19df482b17df530cf7bf9c78771aa1c713c37ff1f"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcb6e65f6ea7caa0188e36bebe9e72b259d3d525634758c91209afb5a6cbcba7"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6612c6ed3147a4a2d6463454b94b877566b38215665be4c729cd8b7bdce15b4"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b0c0148d2a69b82ffe650c2ce235b431d49a90bde7dd2629bcb40314957acf6"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0d85a173b4dbbaaad1900e197181ea0fafa617ca6656663f629a8a372fdc7d06"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:12c43dace645023583f3dd2337dfc3aa92c99fb943b64dcf2bc15c7aa0fb4a95"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:33acb0d9bf12cdc80ceec6f5fda83ea7990ce0321c54234d629529ca2c54e33d"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:91e0b76502205484a4d1d6f25f461fa60fe81a7987b90e57f7b941b0753c3ec8"}, - {file = "aiohttp-3.10.0-cp39-cp39-win32.whl", hash = "sha256:1ebd8ed91428ffbe8b33a5bd6f50174e11882d5b8e2fe28670406ab5ee045ede"}, - {file = "aiohttp-3.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:0433795c4a8bafc03deb3e662192250ba5db347c41231b0273380d2f53c9ea0b"}, - {file = "aiohttp-3.10.0.tar.gz", hash = "sha256:e8dd7da2609303e3574c95b0ec9f1fd49647ef29b94701a2862cceae76382e1d"}, + {file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:47b4c2412960e64d97258f40616efddaebcb34ff664c8a972119ed38fac2a62c"}, + {file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7dbf637f87dd315fa1f36aaed8afa929ee2c607454fb7791e74c88a0d94da59"}, + {file = "aiohttp-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c8fb76214b5b739ce59e2236a6489d9dc3483649cfd6f563dbf5d8e40dbdd57d"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c577cdcf8f92862363b3d598d971c6a84ed8f0bf824d4cc1ce70c2fb02acb4a"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:777e23609899cb230ad2642b4bdf1008890f84968be78de29099a8a86f10b261"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b07286a1090483799599a2f72f76ac396993da31f6e08efedb59f40876c144fa"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9db600a86414a9a653e3c1c7f6a2f6a1894ab8f83d11505247bd1b90ad57157"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c3f1eb280008e51965a8d160a108c333136f4a39d46f516c64d2aa2e6a53f2"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f5dd109a925fee4c9ac3f6a094900461a2712df41745f5d04782ebcbe6479ccb"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8c81ff4afffef9b1186639506d70ea90888218f5ddfff03870e74ec80bb59970"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:2a384dfbe8bfebd203b778a30a712886d147c61943675f4719b56725a8bbe803"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b9fb6508893dc31cfcbb8191ef35abd79751db1d6871b3e2caee83959b4d91eb"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:88596384c3bec644a96ae46287bb646d6a23fa6014afe3799156aef42669c6bd"}, + {file = "aiohttp-3.10.1-cp310-cp310-win32.whl", hash = "sha256:68164d43c580c2e8bf8e0eb4960142919d304052ccab92be10250a3a33b53268"}, + {file = "aiohttp-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:d6bbe2c90c10382ca96df33b56e2060404a4f0f88673e1e84b44c8952517e5f3"}, + {file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6979b4f20d3e557a867da9d9227de4c156fcdcb348a5848e3e6190fd7feb972"}, + {file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03c0c380c83f8a8d4416224aafb88d378376d6f4cadebb56b060688251055cd4"}, + {file = "aiohttp-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c2b104e81b3c3deba7e6f5bc1a9a0e9161c380530479970766a6655b8b77c7c"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b023b68c61ab0cd48bd38416b421464a62c381e32b9dc7b4bdfa2905807452a4"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a07c76a82390506ca0eabf57c0540cf5a60c993c442928fe4928472c4c6e5e6"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:41d8dab8c64ded1edf117d2a64f353efa096c52b853ef461aebd49abae979f16"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:615348fab1a9ef7d0960a905e83ad39051ae9cb0d2837da739b5d3a7671e497a"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:256ee6044214ee9d66d531bb374f065ee94e60667d6bbeaa25ca111fc3997158"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7d5bb926805022508b7ddeaad957f1fce7a8d77532068d7bdb431056dc630cd"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:028faf71b338f069077af6315ad54281612705d68889f5d914318cbc2aab0d50"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5c12310d153b27aa630750be44e79313acc4e864c421eb7d2bc6fa3429c41bf8"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:de1a91d5faded9054957ed0a9e01b9d632109341942fc123947ced358c5d9009"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c186b270979fb1dee3ababe2d12fb243ed7da08b30abc83ebac3a928a4ddb15"}, + {file = "aiohttp-3.10.1-cp311-cp311-win32.whl", hash = "sha256:4a9ce70f5e00380377aac0e568abd075266ff992be2e271765f7b35d228a990c"}, + {file = "aiohttp-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:a77c79bac8d908d839d32c212aef2354d2246eb9deb3e2cb01ffa83fb7a6ea5d"}, + {file = "aiohttp-3.10.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2212296cdb63b092e295c3e4b4b442e7b7eb41e8a30d0f53c16d5962efed395d"}, + {file = "aiohttp-3.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4dcb127ca3eb0a61205818a606393cbb60d93b7afb9accd2fd1e9081cc533144"}, + {file = "aiohttp-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb8b79a65332e1a426ccb6290ce0409e1dc16b4daac1cc5761e059127fa3d134"}, + {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc24f707ed9cb961f6ee04020ca01de2c89b2811f3cf3361dc7c96a14bfbcc"}, + {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cb54f5725b4b37af12edf6c9e834df59258c82c15a244daa521a065fbb11717"}, + {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51d03e948e53b3639ce4d438f3d1d8202898ec6655cadcc09ec99229d4adc2a9"}, + {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786299d719eb5d868f161aeec56d589396b053925b7e0ce36e983d30d0a3e55c"}, + {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abda4009a30d51d3f06f36bc7411a62b3e647fa6cc935ef667e3e3d3a7dd09b1"}, + {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:67f7639424c313125213954e93a6229d3a1d386855d70c292a12628f600c7150"}, + {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e5a26d7aac4c0d8414a347da162696eea0629fdce939ada6aedf951abb1d745"}, + {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:120548d89f14b76a041088b582454d89389370632ee12bf39d919cc5c561d1ca"}, + {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f5293726943bdcea24715b121d8c4ae12581441d22623b0e6ab12d07ce85f9c4"}, + {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f8605e573ed6c44ec689d94544b2c4bb1390aaa723a8b5a2cc0a5a485987a68"}, + {file = "aiohttp-3.10.1-cp312-cp312-win32.whl", hash = "sha256:e7168782621be4448d90169a60c8b37e9b0926b3b79b6097bc180c0a8a119e73"}, + {file = "aiohttp-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fbf8c0ded367c5c8eaf585f85ca8dd85ff4d5b73fb8fe1e6ac9e1b5e62e11f7"}, + {file = "aiohttp-3.10.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:54b7f4a20d7cc6bfa4438abbde069d417bb7a119f870975f78a2b99890226d55"}, + {file = "aiohttp-3.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2fa643ca990323db68911b92f3f7a0ca9ae300ae340d0235de87c523601e58d9"}, + {file = "aiohttp-3.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8311d0d690487359fe2247ec5d2cac9946e70d50dced8c01ce9e72341c21151"}, + {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222821c60b8f6a64c5908cb43d69c0ee978a1188f6a8433d4757d39231b42cdb"}, + {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7b55d9ede66af7feb6de87ff277e0ccf6d51c7db74cc39337fe3a0e31b5872d"}, + {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a95151a5567b3b00368e99e9c5334a919514f60888a6b6d2054fea5e66e527e"}, + {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e9e9171d2fe6bfd9d3838a6fe63b1e91b55e0bf726c16edf265536e4eafed19"}, + {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a57e73f9523e980f6101dc9a83adcd7ac0006ea8bf7937ca3870391c7bb4f8ff"}, + {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0df51a3d70a2bfbb9c921619f68d6d02591f24f10e9c76de6f3388c89ed01de6"}, + {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b0de63ff0307eac3961b4af74382d30220d4813f36b7aaaf57f063a1243b4214"}, + {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8db9b749f589b5af8e4993623dbda6716b2b7a5fcb0fa2277bf3ce4b278c7059"}, + {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6b14c19172eb53b63931d3e62a9749d6519f7c121149493e6eefca055fcdb352"}, + {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cd57ad998e3038aa87c38fe85c99ed728001bf5dde8eca121cadee06ee3f637"}, + {file = "aiohttp-3.10.1-cp38-cp38-win32.whl", hash = "sha256:df31641e3f02b77eb3c5fb63c0508bee0fc067cf153da0e002ebbb0db0b6d91a"}, + {file = "aiohttp-3.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:93094eba50bc2ad4c40ff4997ead1fdcd41536116f2e7d6cfec9596a8ecb3615"}, + {file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:440954ddc6b77257e67170d57b1026aa9545275c33312357472504eef7b4cc0b"}, + {file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f9f8beed277488a52ee2b459b23c4135e54d6a819eaba2e120e57311015b58e9"}, + {file = "aiohttp-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8a8221a63602008550022aa3a4152ca357e1dde7ab3dd1da7e1925050b56863"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a702bd3663b5cbf3916e84bf332400d24cdb18399f0877ca6b313ce6c08bfb43"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1988b370536eb14f0ce7f3a4a5b422ab64c4e255b3f5d7752c5f583dc8c967fc"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ccf1f0a304352c891d124ac1a9dea59b14b2abed1704aaa7689fc90ef9c5be1"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3ea6ef2a83edad84bbdb5d96e22f587b67c68922cd7b6f9d8f24865e655bcf"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b47c125ab07f0831803b88aeb12b04c564d5f07a1c1a225d4eb4d2f26e8b5e"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21778552ef3d44aac3278cc6f6d13a6423504fa5f09f2df34bfe489ed9ded7f5"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bde0693073fd5e542e46ea100aa6c1a5d36282dbdbad85b1c3365d5421490a92"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bf66149bb348d8e713f3a8e0b4f5b952094c2948c408e1cfef03b49e86745d60"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:587237571a85716d6f71f60d103416c9df7d5acb55d96d3d3ced65f39bff9c0c"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bfe33cba6e127d0b5b417623c9aa621f0a69f304742acdca929a9fdab4593693"}, + {file = "aiohttp-3.10.1-cp39-cp39-win32.whl", hash = "sha256:9fbff00646cf8211b330690eb2fd64b23e1ce5b63a342436c1d1d6951d53d8dd"}, + {file = "aiohttp-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:5951c328f9ac42d7bce7a6ded535879bc9ae13032818d036749631fa27777905"}, + {file = "aiohttp-3.10.1.tar.gz", hash = "sha256:8b0d058e4e425d3b45e8ec70d49b402f4d6b21041e674798b1f91ba027c73f28"}, ] [package.dependencies] @@ -146,13 +146,13 @@ files = [ [[package]] name = "attrs" -version = "24.1.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-24.1.0-py3-none-any.whl", hash = "sha256:377b47448cb61fea38533f671fba0d0f8a96fd58facd4dc518e3dac9dbea0905"}, - {file = "attrs-24.1.0.tar.gz", hash = "sha256:adbdec84af72d38be7628e353a09b6a6790d15cd71819f6e9d7b0faa8a125745"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] @@ -385,63 +385,83 @@ files = [ [[package]] name = "coverage" -version = "7.6.0" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, - {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, - {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, - {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, - {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, - {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, - {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, - {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, - {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, - {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, - {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, - {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, - {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, - {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, - {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, - {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, - {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, - {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, - {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, - {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, - {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, - {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] @@ -452,69 +472,77 @@ toml = ["tomli"] [[package]] name = "cython" -version = "3.0.10" +version = "3.0.11" description = "The Cython compiler for writing C extensions in the Python language." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ - {file = "Cython-3.0.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e876272548d73583e90babda94c1299537006cad7a34e515a06c51b41f8657aa"}, - {file = "Cython-3.0.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adc377aa33c3309191e617bf675fdbb51ca727acb9dc1aa23fc698d8121f7e23"}, - {file = "Cython-3.0.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:401aba1869a57aba2922ccb656a6320447e55ace42709b504c2f8e8b166f46e1"}, - {file = "Cython-3.0.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:541fbe725d6534a90b93f8c577eb70924d664b227a4631b90a6e0506d1469591"}, - {file = "Cython-3.0.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:86998b01f6a6d48398df8467292c7637e57f7e3a2ca68655367f13f66fed7734"}, - {file = "Cython-3.0.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d092c0ddba7e9e530a5c5be4ac06db8360258acc27675d1fc86294a5dc8994c5"}, - {file = "Cython-3.0.10-cp310-cp310-win32.whl", hash = "sha256:3cffb666e649dba23810732497442fb339ee67ba4e0be1f0579991e83fcc2436"}, - {file = "Cython-3.0.10-cp310-cp310-win_amd64.whl", hash = "sha256:9ea31184c7b3a728ef1f81fccb161d8948c05aa86c79f63b74fb6f3ddec860ec"}, - {file = "Cython-3.0.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:051069638abfb076900b0c2bcb6facf545655b3f429e80dd14365192074af5a4"}, - {file = "Cython-3.0.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:712760879600907189c7d0d346851525545484e13cd8b787e94bfd293da8ccf0"}, - {file = "Cython-3.0.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38d40fa1324ac47c04483d151f5e092406a147eac88a18aec789cf01c089c3f2"}, - {file = "Cython-3.0.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bd49a3a9fdff65446a3e1c2bfc0ec85c6ce4c3cad27cd4ad7ba150a62b7fb59"}, - {file = "Cython-3.0.10-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e8df79b596633b8295eaa48b1157d796775c2bb078f32267d32f3001b687f2fd"}, - {file = "Cython-3.0.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bcc9795990e525c192bc5c0775e441d7d56d7a7d02210451e9e13c0448dba51b"}, - {file = "Cython-3.0.10-cp311-cp311-win32.whl", hash = "sha256:09f2000041db482cad3bfce94e1fa3a4c82b0e57390a164c02566cbbda8c4f12"}, - {file = "Cython-3.0.10-cp311-cp311-win_amd64.whl", hash = "sha256:3919a55ec9b6c7db6f68a004c21c05ed540c40dbe459ced5d801d5a1f326a053"}, - {file = "Cython-3.0.10-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8f2864ab5fcd27a346f0b50f901ebeb8f60b25a60a575ccfd982e7f3e9674914"}, - {file = "Cython-3.0.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:407840c56385b9c085826fe300213e0e76ba15d1d47daf4b58569078ecb94446"}, - {file = "Cython-3.0.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a036d00caa73550a3a976432ef21c1e3fa12637e1616aab32caded35331ae96"}, - {file = "Cython-3.0.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc6a0e7e23a96dec3f3c9d39690d4281beabd5297855140d0d30855f950275e"}, - {file = "Cython-3.0.10-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5e14a8c6a8157d2b0cdc2e8e3444905d20a0e78e19d2a097e89fb8b04b51f6b"}, - {file = "Cython-3.0.10-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f8a2b8fa0fd8358bccb5f3304be563c4750aae175100463d212d5ea0ec74cbe0"}, - {file = "Cython-3.0.10-cp312-cp312-win32.whl", hash = "sha256:2d29e617fd23cf4b83afe8f93f2966566c9f565918ad1e86a4502fe825cc0a79"}, - {file = "Cython-3.0.10-cp312-cp312-win_amd64.whl", hash = "sha256:6c5af936940a38c300977b81598d9c0901158f220a58c177820e17e1774f1cf1"}, - {file = "Cython-3.0.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5f465443917d5c0f69825fca3b52b64c74ac3de0143b1fff6db8ba5b48c9fb4a"}, - {file = "Cython-3.0.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fadb84193c25641973666e583df8df4e27c52cdc05ddce7c6f6510d690ba34a"}, - {file = "Cython-3.0.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fa9e7786083b6aa61594c16979d621b62e61fcd9c2edd4761641b95c7fb34b2"}, - {file = "Cython-3.0.10-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4780d0f98ce28191c4d841c4358b5d5e79d96520650910cd59904123821c52d"}, - {file = "Cython-3.0.10-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:32fbad02d1189be75eb96456d9c73f5548078e5338d8fa153ecb0115b6ee279f"}, - {file = "Cython-3.0.10-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:90e2f514fc753b55245351305a399463103ec18666150bb1c36779b9862388e9"}, - {file = "Cython-3.0.10-cp36-cp36m-win32.whl", hash = "sha256:a9c976e9ec429539a4367cb4b24d15a1e46b925976f4341143f49f5f161171f5"}, - {file = "Cython-3.0.10-cp36-cp36m-win_amd64.whl", hash = "sha256:a9bb402674788a7f4061aeef8057632ec440123e74ed0fb425308a59afdfa10e"}, - {file = "Cython-3.0.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:206e803598010ecc3813db8748ed685f7beeca6c413f982df9f8a505fce56563"}, - {file = "Cython-3.0.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15b6d397f4ee5ad54e373589522af37935a32863f1b23fa8c6922adf833e28e2"}, - {file = "Cython-3.0.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a181144c2f893ed8e6a994d43d0b96300bc99873f21e3b7334ca26c61c37b680"}, - {file = "Cython-3.0.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74b700d6a793113d03fb54b63bdbadba6365379424bac7c0470605672769260"}, - {file = "Cython-3.0.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:076e9fd4e0ca33c5fa00a7479180dbfb62f17fe928e2909f82da814536e96d2b"}, - {file = "Cython-3.0.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:269f06e6961e8591d56e30b46e1a51b6ccb42cab04c29fa3b30d3e8723485fb4"}, - {file = "Cython-3.0.10-cp37-cp37m-win32.whl", hash = "sha256:d4e83a8ceff7af60064da4ccfce0ac82372544dd5392f1b350c34f1b04d0fae6"}, - {file = "Cython-3.0.10-cp37-cp37m-win_amd64.whl", hash = "sha256:40fac59c3a7fbcd9c25aea64c342c890a5e2270ce64a1525e840807800167799"}, - {file = "Cython-3.0.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f43a58bf2434870d2fc42ac2e9ff8138c9e00c6251468de279d93fa279e9ba3b"}, - {file = "Cython-3.0.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e9a885ec63d3955a08cefc4eec39fefa9fe14989c6e5e2382bd4aeb6bdb9bc3"}, - {file = "Cython-3.0.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acfbe0fff364d54906058fc61f2393f38cd7fa07d344d80923937b87e339adcf"}, - {file = "Cython-3.0.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8adcde00a8a88fab27509b558cd8c2959ab0c70c65d3814cfea8c68b83fa6dcd"}, - {file = "Cython-3.0.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2c9c1e3e78909488f3b16fabae02308423fa6369ed96ab1e250807d344cfffd7"}, - {file = "Cython-3.0.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc6e0faf5b57523b073f0cdefadcaef3a51235d519a0594865925cadb3aeadf0"}, - {file = "Cython-3.0.10-cp38-cp38-win32.whl", hash = "sha256:35f6ede7c74024ed1982832ae61c9fad7cf60cc3f5b8c6a63bb34e38bc291936"}, - {file = "Cython-3.0.10-cp38-cp38-win_amd64.whl", hash = "sha256:950c0c7b770d2a7cec74fb6f5ccc321d0b51d151f48c075c0d0db635a60ba1b5"}, - {file = "Cython-3.0.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:077b61ee789e48700e25d4a16daa4258b8e65167136e457174df400cf9b4feab"}, - {file = "Cython-3.0.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f1f8bba9d8f37c0cffc934792b4ac7c42d0891077127c11deebe9fa0a0f7e4"}, - {file = "Cython-3.0.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:651a15a8534ebfb9b58cb0b87c269c70984b6f9c88bfe65e4f635f0e3f07dfcd"}, - {file = "Cython-3.0.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d10fc9aa82e5e53a0b7fd118f9771199cddac8feb4a6d8350b7d4109085aa775"}, - {file = "Cython-3.0.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f610964ab252a83e573a427e28b103e2f1dd3c23bee54f32319f9e73c3c5499"}, - {file = "Cython-3.0.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c9c4c4f3ab8f8c02817b0e16e8fa7b8cc880f76e9b63fe9c010e60c1a6c2b13"}, - {file = "Cython-3.0.10-cp39-cp39-win32.whl", hash = "sha256:0bac3ccdd4e03924028220c62ae3529e17efa8ca7e9df9330de95de02f582b26"}, - {file = "Cython-3.0.10-cp39-cp39-win_amd64.whl", hash = "sha256:81f356c1c8c0885b8435bfc468025f545c5d764aa9c75ab662616dd1193c331e"}, - {file = "Cython-3.0.10-py2.py3-none-any.whl", hash = "sha256:fcbb679c0b43514d591577fd0d20021c55c240ca9ccafbdb82d3fb95e5edfee2"}, - {file = "Cython-3.0.10.tar.gz", hash = "sha256:dcc96739331fb854dcf503f94607576cfe8488066c61ca50dfd55836f132de99"}, + {file = "Cython-3.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:44292aae17524abb4b70a25111fe7dec1a0ad718711d47e3786a211d5408fdaa"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75d45fbc20651c1b72e4111149fed3b33d270b0a4fb78328c54d965f28d55e1"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89a82937ce4037f092e9848a7bbcc65bc8e9fc9aef2bb74f5c15e7d21a73080"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ea2e7e2d3bc0d8630dafe6c4a5a89485598ff8a61885b74f8ed882597efd5"}, + {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cee29846471ce60226b18e931d8c1c66a158db94853e3e79bc2da9bd22345008"}, + {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eeb6860b0f4bfa402de8929833fe5370fa34069c7ebacb2d543cb017f21fb891"}, + {file = "Cython-3.0.11-cp310-cp310-win32.whl", hash = "sha256:3699391125ab344d8d25438074d1097d9ba0fb674d0320599316cfe7cf5f002a"}, + {file = "Cython-3.0.11-cp310-cp310-win_amd64.whl", hash = "sha256:d02f4ebe15aac7cdacce1a628e556c1983f26d140fd2e0ac5e0a090e605a2d38"}, + {file = "Cython-3.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75ba1c70b6deeaffbac123856b8d35f253da13552207aa969078611c197377e4"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af91497dc098718e634d6ec8f91b182aea6bb3690f333fc9a7777bc70abe8810"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3999fb52d3328a6a5e8c63122b0a8bd110dfcdb98dda585a3def1426b991cba7"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d566a4e09b8979be8ab9f843bac0dd216c81f5e5f45661a9b25cd162ed80508c"}, + {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:46aec30f217bdf096175a1a639203d44ac73a36fe7fa3dd06bd012e8f39eca0f"}, + {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd1fe25af330f4e003421636746a546474e4ccd8f239f55d2898d80983d20ed"}, + {file = "Cython-3.0.11-cp311-cp311-win32.whl", hash = "sha256:221de0b48bf387f209003508e602ce839a80463522fc6f583ad3c8d5c890d2c1"}, + {file = "Cython-3.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:3ff8ac1f0ecd4f505db4ab051e58e4531f5d098b6ac03b91c3b902e8d10c67b3"}, + {file = "Cython-3.0.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:11996c40c32abf843ba652a6d53cb15944c88d91f91fc4e6f0028f5df8a8f8a1"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63f2c892e9f9c1698ecfee78205541623eb31cd3a1b682668be7ac12de94aa8e"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b14c24f1dc4c4c9d997cca8d1b7fb01187a218aab932328247dcf5694a10102"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8eed5c015685106db15dd103fd040948ddca9197b1dd02222711815ea782a27"}, + {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780f89c95b8aec1e403005b3bf2f0a2afa060b3eba168c86830f079339adad89"}, + {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a690f2ff460682ea985e8d38ec541be97e0977fa0544aadc21efc116ff8d7579"}, + {file = "Cython-3.0.11-cp312-cp312-win32.whl", hash = "sha256:2252b5aa57621848e310fe7fa6f7dce5f73aa452884a183d201a8bcebfa05a00"}, + {file = "Cython-3.0.11-cp312-cp312-win_amd64.whl", hash = "sha256:da394654c6da15c1d37f0b7ec5afd325c69a15ceafee2afba14b67a5df8a82c8"}, + {file = "Cython-3.0.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4341d6a64d47112884e0bcf31e6c075268220ee4cd02223047182d4dda94d637"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:351955559b37e6c98b48aecb178894c311be9d731b297782f2b78d111f0c9015"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c02361af9bfa10ff1ccf967fc75159e56b1c8093caf565739ed77a559c1f29f"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6823aef13669a32caf18bbb036de56065c485d9f558551a9b55061acf9c4c27f"}, + {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fb68cef33684f8cc97987bee6ae919eee7e18ee6a3ad7ed9516b8386ef95ae6"}, + {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:790263b74432cb997740d73665f4d8d00b9cd1cecbdd981d93591ddf993d4f12"}, + {file = "Cython-3.0.11-cp313-cp313-win32.whl", hash = "sha256:e6dd395d1a704e34a9fac00b25f0036dce6654c6b898be6f872ac2bb4f2eda48"}, + {file = "Cython-3.0.11-cp313-cp313-win_amd64.whl", hash = "sha256:52186101d51497519e99b60d955fd5cb3bf747c67f00d742e70ab913f1e42d31"}, + {file = "Cython-3.0.11-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c69d5cad51388522b98a99b4be1b77316de85b0c0523fa865e0ea58bbb622e0a"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8acdc87e9009110adbceb7569765eb0980129055cc954c62f99fe9f094c9505e"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dd47865f4c0a224da73acf83d113f93488d17624e2457dce1753acdfb1cc40c"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:301bde949b4f312a1c70e214b0c3bc51a3f955d466010d2f68eb042df36447b0"}, + {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:f3953d2f504176f929862e5579cfc421860c33e9707f585d70d24e1096accdf7"}, + {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:3f2b062f6df67e8a56c75e500ca330cf62c85ac26dd7fd006f07ef0f83aebfa3"}, + {file = "Cython-3.0.11-cp36-cp36m-win32.whl", hash = "sha256:c3d68751668c66c7a140b6023dba5d5d507f72063407bb609d3a5b0f3b8dfbe4"}, + {file = "Cython-3.0.11-cp36-cp36m-win_amd64.whl", hash = "sha256:bcd29945fafd12484cf37b1d84f12f0e7a33ba3eac5836531c6bd5283a6b3a0c"}, + {file = "Cython-3.0.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4e9a8d92978b15a0c7ca7f98447c6c578dc8923a0941d9d172d0b077cb69c576"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:421017466e9260aca86823974e26e158e6358622f27c0f4da9c682f3b6d2e624"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80a7232938d523c1a12f6b1794ab5efb1ae77ad3fde79de4bb558d8ab261619"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfa550d9ae39e827a6e7198076df763571cb53397084974a6948af558355e028"}, + {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:aedceb6090a60854b31bf9571dc55f642a3fa5b91f11b62bcef167c52cac93d8"}, + {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:473d35681d9f93ce380e6a7c8feb2d65fc6333bd7117fbc62989e404e241dbb0"}, + {file = "Cython-3.0.11-cp37-cp37m-win32.whl", hash = "sha256:3379c6521e25aa6cd7703bb7d635eaca75c0f9c7f1b0fdd6dd15a03bfac5f68d"}, + {file = "Cython-3.0.11-cp37-cp37m-win_amd64.whl", hash = "sha256:14701edb3107a5d9305a82d9d646c4f28bfecbba74b26cc1ee2f4be08f602057"}, + {file = "Cython-3.0.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598699165cfa7c6d69513ee1bffc9e1fdd63b00b624409174c388538aa217975"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0583076c4152b417a3a8a5d81ec02f58c09b67d3f22d5857e64c8734ceada8c"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52205347e916dd65d2400b977df4c697390c3aae0e96275a438cc4ae85dadc08"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:989899a85f0d9a57cebb508bd1f194cb52f0e3f7e22ac259f33d148d6422375c"}, + {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53b6072a89049a991d07f42060f65398448365c59c9cb515c5925b9bdc9d71f8"}, + {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f988f7f8164a6079c705c39e2d75dbe9967e3dacafe041420d9af7b9ee424162"}, + {file = "Cython-3.0.11-cp38-cp38-win32.whl", hash = "sha256:a1f4cbc70f6b7f0c939522118820e708e0d490edca42d852fa8004ec16780be2"}, + {file = "Cython-3.0.11-cp38-cp38-win_amd64.whl", hash = "sha256:187685e25e037320cae513b8cc4bf9dbc4465c037051aede509cbbf207524de2"}, + {file = "Cython-3.0.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0fc6fdd6fa493be7bdda22355689d5446ac944cd71286f6f44a14b0d67ee3ff5"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b1d1f6f94cc5d42a4591f6d60d616786b9cd15576b112bc92a23131fcf38020"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ab2b92a3e6ed552adbe9350fd2ef3aa0cc7853cf91569f9dbed0c0699bbeab"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:104d6f2f2c827ccc5e9e42c80ef6773a6aa94752fe6bc5b24a4eab4306fb7f07"}, + {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:13062ce556a1e98d2821f7a0253b50569fdc98c36efd6653a65b21e3f8bbbf5f"}, + {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:525d09b3405534763fa73bd78c8e51ac8264036ce4c16d37dfd1555a7da6d3a7"}, + {file = "Cython-3.0.11-cp39-cp39-win32.whl", hash = "sha256:b8c7e514075696ca0f60c337f9e416e61d7ccbc1aa879a56c39181ed90ec3059"}, + {file = "Cython-3.0.11-cp39-cp39-win_amd64.whl", hash = "sha256:8948802e1f5677a673ea5d22a1e7e273ca5f83e7a452786ca286eebf97cee67c"}, + {file = "Cython-3.0.11-py2.py3-none-any.whl", hash = "sha256:0e25f6425ad4a700d7f77cd468da9161e63658837d1bc34861a9861a4ef6346d"}, + {file = "cython-3.0.11.tar.gz", hash = "sha256:7146dd2af8682b4ca61331851e6aebce9fe5158e75300343f80c07ca80b1faff"}, ] [[package]] @@ -1221,47 +1249,56 @@ files = [ [[package]] name = "numpy" -version = "1.26.4" +version = "2.0.1" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fbb536eac80e27a2793ffd787895242b7f18ef792563d742c2d673bfcb75134"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69ff563d43c69b1baba77af455dd0a839df8d25e8590e79c90fcbe1499ebde42"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1b902ce0e0a5bb7704556a217c4f63a7974f8f43e090aff03fcf262e0b135e02"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f1659887361a7151f89e79b276ed8dff3d75877df906328f14d8bb40bb4f5101"}, + {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4658c398d65d1b25e1760de3157011a80375da861709abd7cef3bad65d6543f9"}, + {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4127d4303b9ac9f94ca0441138acead39928938660ca58329fe156f84b9f3015"}, + {file = "numpy-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5eeca8067ad04bc8a2a8731183d51d7cbaac66d86085d5f4766ee6bf19c7f87"}, + {file = "numpy-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9adbd9bb520c866e1bfd7e10e1880a1f7749f1f6e5017686a5fbb9b72cf69f82"}, + {file = "numpy-2.0.1-cp310-cp310-win32.whl", hash = "sha256:7b9853803278db3bdcc6cd5beca37815b133e9e77ff3d4733c247414e78eb8d1"}, + {file = "numpy-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81b0893a39bc5b865b8bf89e9ad7807e16717f19868e9d234bdaf9b1f1393868"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75b4e316c5902d8163ef9d423b1c3f2f6252226d1aa5cd8a0a03a7d01ffc6268"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e4eeb6eb2fced786e32e6d8df9e755ce5be920d17f7ce00bc38fcde8ccdbf9e"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1e01dcaab205fbece13c1410253a9eea1b1c9b61d237b6fa59bcc46e8e89343"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8fc2de81ad835d999113ddf87d1ea2b0f4704cbd947c948d2f5513deafe5a7b"}, + {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a3d94942c331dd4e0e1147f7a8699a4aa47dffc11bf8a1523c12af8b2e91bbe"}, + {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15eb4eca47d36ec3f78cde0a3a2ee24cf05ca7396ef808dda2c0ddad7c2bde67"}, + {file = "numpy-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b83e16a5511d1b1f8a88cbabb1a6f6a499f82c062a4251892d9ad5d609863fb7"}, + {file = "numpy-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f87fec1f9bc1efd23f4227becff04bd0e979e23ca50cc92ec88b38489db3b55"}, + {file = "numpy-2.0.1-cp311-cp311-win32.whl", hash = "sha256:36d3a9405fd7c511804dc56fc32974fa5533bdeb3cd1604d6b8ff1d292b819c4"}, + {file = "numpy-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:08458fbf403bff5e2b45f08eda195d4b0c9b35682311da5a5a0a0925b11b9bd8"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bf4e6f4a2a2e26655717a1983ef6324f2664d7011f6ef7482e8c0b3d51e82ac"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6fddc5fe258d3328cd8e3d7d3e02234c5d70e01ebe377a6ab92adb14039cb4"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5daab361be6ddeb299a918a7c0864fa8618af66019138263247af405018b04e1"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:ea2326a4dca88e4a274ba3a4405eb6c6467d3ffbd8c7d38632502eaae3820587"}, + {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529af13c5f4b7a932fb0e1911d3a75da204eff023ee5e0e79c1751564221a5c8"}, + {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6790654cb13eab303d8402354fabd47472b24635700f631f041bd0b65e37298a"}, + {file = "numpy-2.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbab9fc9c391700e3e1287666dfd82d8666d10e69a6c4a09ab97574c0b7ee0a7"}, + {file = "numpy-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d0d92a5e3613c33a5f01db206a33f8fdf3d71f2912b0de1739894668b7a93b"}, + {file = "numpy-2.0.1-cp312-cp312-win32.whl", hash = "sha256:173a00b9995f73b79eb0191129f2455f1e34c203f559dd118636858cc452a1bf"}, + {file = "numpy-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:bb2124fdc6e62baae159ebcfa368708867eb56806804d005860b6007388df171"}, + {file = "numpy-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc085b28d62ff4009364e7ca34b80a9a080cbd97c2c0630bb5f7f770dae9414"}, + {file = "numpy-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fae4ebbf95a179c1156fab0b142b74e4ba4204c87bde8d3d8b6f9c34c5825ef"}, + {file = "numpy-2.0.1-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:72dc22e9ec8f6eaa206deb1b1355eb2e253899d7347f5e2fae5f0af613741d06"}, + {file = "numpy-2.0.1-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:ec87f5f8aca726117a1c9b7083e7656a9d0d606eec7299cc067bb83d26f16e0c"}, + {file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f682ea61a88479d9498bf2091fdcd722b090724b08b31d63e022adc063bad59"}, + {file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8efc84f01c1cd7e34b3fb310183e72fcdf55293ee736d679b6d35b35d80bba26"}, + {file = "numpy-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3fdabe3e2a52bc4eff8dc7a5044342f8bd9f11ef0934fcd3289a788c0eb10018"}, + {file = "numpy-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:24a0e1befbfa14615b49ba9659d3d8818a0f4d8a1c5822af8696706fbda7310c"}, + {file = "numpy-2.0.1-cp39-cp39-win32.whl", hash = "sha256:f9cf5ea551aec449206954b075db819f52adc1638d46a6738253a712d553c7b4"}, + {file = "numpy-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:e9e81fa9017eaa416c056e5d9e71be93d05e2c3c2ab308d23307a8bc4443c368"}, + {file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61728fba1e464f789b11deb78a57805c70b2ed02343560456190d0501ba37b0f"}, + {file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:12f5d865d60fb9734e60a60f1d5afa6d962d8d4467c120a1c0cda6eb2964437d"}, + {file = "numpy-2.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eacf3291e263d5a67d8c1a581a8ebbcfd6447204ef58828caf69a5e3e8c75990"}, + {file = "numpy-2.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2c3a346ae20cfd80b6cfd3e60dc179963ef2ea58da5ec074fd3d9e7a1e7ba97f"}, + {file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"}, ] [[package]] @@ -1721,62 +1758,64 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -2070,13 +2109,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.4" +version = "4.66.5" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [package.dependencies] @@ -2362,4 +2401,4 @@ ib = ["async-timeout", "defusedxml", "nautilus_ibapi"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "1e34ff64907a35e48bd8a2238a323679b11f0b7a10d92ab4e11921284b1dbef9" +content-hash = "87819ed24c0a0158744979441eacb1e39e8847d4072c4f7aa71e3a2685e6db46" diff --git a/pyproject.toml b/pyproject.toml index c8239659162b..5126fec760d1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,8 +39,8 @@ include = [ requires = [ "setuptools", "poetry-core>=1.9.0", - "numpy==1.26.4", - "Cython==3.0.10", + "numpy>=2.0.1", + "Cython==3.0.11", "toml>=0.10.2", ] build-backend = "poetry.core.masonry.api" @@ -51,8 +51,8 @@ generate-setup-file = false [tool.poetry.dependencies] python = ">=3.10,<3.13" -cython = "==3.0.10" # Build dependency (pinned for stability) -numpy = "==1.26.4" # Build dependency (pinned below v2 for now) +cython = "==3.0.11" # Build dependency (pinned for stability) +numpy = "^2.0.1" # Build dependency setuptools = ">=72" # Build dependency toml = "^0.10.2" # Build dependency click = "^8.1.7" @@ -61,7 +61,7 @@ msgspec = "^0.18.6" pandas = "^2.2.2" pyarrow = ">=17.0.0" pytz = ">=2024.1.0" -tqdm = "^4.66.4" +tqdm = "^4.66.5" uvloop = {version = "^0.19.0", markers = "sys_platform != 'win32'"} async-timeout = {version = "^4.0.3", optional = true} @@ -93,7 +93,7 @@ types-toml = "^0.10.2" optional = true [tool.poetry.group.test.dependencies] -coverage = "^7.6.0" +coverage = "^7.6.1" pytest = "^7.4.4" pytest-aiohttp = "^1.0.5" pytest-asyncio = "==0.21.1" # Pinned due Cython: cannot set '__pytest_asyncio_scoped_event_loop' attribute of immutable type From a831b991d2d614cb9443bdf6db59d9b798d87628 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Wed, 7 Aug 2024 16:44:40 +1000 Subject: [PATCH 28/60] Update release notes --- RELEASES.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/RELEASES.md b/RELEASES.md index 928d1b17fc19..18b3ce1892e0 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -3,13 +3,16 @@ Released on TBD (UTC). ### Enhancements -- Added `@customdata` decorator to reduce need for boiler plate implementing custom data types, thanks @faysou +- Added `@customdata` decorator to reduce need for boiler plate implementing custom data types (#1828), thanks @faysou +- Added timeout for HTTP client in Rust (#1835), thanks @davidsblom +- Upgraded Cython to 3.0.11 ### Breaking Changes None ### Fixes -None +- Fixed creation of `instrumend_id` folder when writing PyO3 bars in catalog (#1832), thanks @faysou +- Fixed handling `include_types` option in `StreamingFeatherWriter` (#1833), thanks @faysou --- From 1c35c900185b673fa785849b018b95639c601aa9 Mon Sep 17 00:00:00 2001 From: David Blom Date: Wed, 7 Aug 2024 09:12:34 +0200 Subject: [PATCH 29/60] Add optional timeout for HTTP requests in Rust (#1835) --- nautilus_core/network/src/http.rs | 16 +++++++++++++++- nautilus_core/network/src/python/http.rs | 6 +++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/nautilus_core/network/src/http.rs b/nautilus_core/network/src/http.rs index 4cdef592facb..505bcecdd6f7 100644 --- a/nautilus_core/network/src/http.rs +++ b/nautilus_core/network/src/http.rs @@ -19,6 +19,7 @@ use std::{ collections::{hash_map::DefaultHasher, HashMap}, hash::{Hash, Hasher}, sync::Arc, + time::Duration, }; use bytes::Bytes; @@ -78,6 +79,7 @@ impl InnerHttpClient { url: String, headers: HashMap, body: Option>, + timeout_sec: Option, ) -> Result> { let reqwest_url = Url::parse(url.as_str())?; @@ -87,7 +89,14 @@ impl InnerHttpClient { let _ = header_map.insert(key, header_value.parse().unwrap()); } - let request_builder = self.client.request(method, reqwest_url).headers(header_map); + let request_builder = match timeout_sec { + Some(timeout_sec) => self + .client + .request(method, reqwest_url) + .headers(header_map) + .timeout(Duration::new(timeout_sec, 0)), + None => self.client.request(method, reqwest_url).headers(header_map), + }; let request = match body { Some(b) => request_builder.body(b).build()?, @@ -217,6 +226,7 @@ mod tests { format!("{url}/get"), HashMap::new(), None, + None, ) .await .unwrap(); @@ -237,6 +247,7 @@ mod tests { format!("{url}/post"), HashMap::new(), None, + None, ) .await .unwrap(); @@ -270,6 +281,7 @@ mod tests { format!("{url}/post"), HashMap::new(), Some(body_bytes), + None, ) .await .unwrap(); @@ -289,6 +301,7 @@ mod tests { format!("{url}/patch"), HashMap::new(), None, + None, ) .await .unwrap(); @@ -308,6 +321,7 @@ mod tests { format!("{url}/delete"), HashMap::new(), None, + None, ) .await .unwrap(); diff --git a/nautilus_core/network/src/python/http.rs b/nautilus_core/network/src/python/http.rs index ee8cab7a3c85..0cd1e24dc5d8 100644 --- a/nautilus_core/network/src/python/http.rs +++ b/nautilus_core/network/src/python/http.rs @@ -112,6 +112,7 @@ impl HttpClient { headers: Option>, body: Option<&'py PyBytes>, keys: Option>, + timeout_sec: Option, py: Python<'py>, ) -> PyResult> { let headers = headers.unwrap_or_default(); @@ -128,7 +129,10 @@ impl HttpClient { key.await; }) .await; - match client.send_request(method, url, headers, body_vec).await { + match client + .send_request(method, url, headers, body_vec, timeout_sec) + .await + { Ok(res) => Ok(res), Err(e) => Err(PyErr::new::(format!( "Error handling response: {e}" From 2eaa7fc45436b0db6f7c97da94f0d59c01577260 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Wed, 7 Aug 2024 17:22:21 +1000 Subject: [PATCH 30/60] Minor cleanups --- nautilus_core/network/benches/test_client.rs | 1 + nautilus_core/network/src/http.rs | 15 ++++++--------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/nautilus_core/network/benches/test_client.rs b/nautilus_core/network/benches/test_client.rs index 407531ba3f7c..0245d149f734 100644 --- a/nautilus_core/network/benches/test_client.rs +++ b/nautilus_core/network/benches/test_client.rs @@ -32,6 +32,7 @@ async fn main() { "http://127.0.0.1:3000".to_string(), HashMap::new(), None, + None, )); } diff --git a/nautilus_core/network/src/http.rs b/nautilus_core/network/src/http.rs index 505bcecdd6f7..769cf40ad9d3 100644 --- a/nautilus_core/network/src/http.rs +++ b/nautilus_core/network/src/http.rs @@ -79,7 +79,7 @@ impl InnerHttpClient { url: String, headers: HashMap, body: Option>, - timeout_sec: Option, + timeout_secs: Option, ) -> Result> { let reqwest_url = Url::parse(url.as_str())?; @@ -89,14 +89,11 @@ impl InnerHttpClient { let _ = header_map.insert(key, header_value.parse().unwrap()); } - let request_builder = match timeout_sec { - Some(timeout_sec) => self - .client - .request(method, reqwest_url) - .headers(header_map) - .timeout(Duration::new(timeout_sec, 0)), - None => self.client.request(method, reqwest_url).headers(header_map), - }; + let mut request_builder = self.client.request(method, reqwest_url).headers(header_map); + + if let Some(timeout_secs) = timeout_secs { + request_builder = request_builder.timeout(Duration::new(timeout_secs, 0)); + } let request = match body { Some(b) => request_builder.body(b).build()?, From 16a9ddd44b9f7a9e4071e38fb48a5c93b4864e71 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Wed, 7 Aug 2024 17:32:49 +1000 Subject: [PATCH 31/60] Fix clippy lints --- nautilus_core/backtest/src/matching_engine.rs | 5 ++--- nautilus_core/common/src/logging/mod.rs | 2 +- nautilus_core/common/src/messages/data.rs | 3 ++- nautilus_core/data/src/client.rs | 1 + nautilus_core/data/src/engine/mod.rs | 20 ++++++++++--------- nautilus_core/data/src/mocks.rs | 2 +- .../tests/test_cache_database_postgres.rs | 4 ++-- 7 files changed, 20 insertions(+), 17 deletions(-) diff --git a/nautilus_core/backtest/src/matching_engine.rs b/nautilus_core/backtest/src/matching_engine.rs index 8ac7c4c2445a..8f91e80684d0 100644 --- a/nautilus_core/backtest/src/matching_engine.rs +++ b/nautilus_core/backtest/src/matching_engine.rs @@ -64,7 +64,7 @@ pub struct OrderMatchingEngineConfig { #[allow(clippy::derivable_impls)] impl Default for OrderMatchingEngineConfig { fn default() -> Self { - OrderMatchingEngineConfig { + Self { bar_execution: false, reject_stop_orders: false, support_gtd_orders: false, @@ -340,8 +340,7 @@ impl OrderMatchingEngine { self.generate_order_rejected( order, format!( - "Short selling not permitted on a CASH account with position {} and order {}", - position_string, order, + "Short selling not permitted on a CASH account with position {position_string} and order {order}", ) .into(), ); diff --git a/nautilus_core/common/src/logging/mod.rs b/nautilus_core/common/src/logging/mod.rs index 61897aa08bd9..5e95d4a670ce 100644 --- a/nautilus_core/common/src/logging/mod.rs +++ b/nautilus_core/common/src/logging/mod.rs @@ -107,7 +107,7 @@ pub fn init_tracing() { .with_env_filter(EnvFilter::new(v.clone())) .try_init() .unwrap_or_else(|e| { - tracing::error!("Cannot set tracing subscriber because of error: {e}") + tracing::error!("Cannot set tracing subscriber because of error: {e}"); }); println!("Initialized tracing logs with RUST_LOG={v}"); } diff --git a/nautilus_core/common/src/messages/data.rs b/nautilus_core/common/src/messages/data.rs index 951049fc4cab..fbfb991fd0c8 100644 --- a/nautilus_core/common/src/messages/data.rs +++ b/nautilus_core/common/src/messages/data.rs @@ -78,7 +78,8 @@ pub struct SubscriptionCommand { } impl SubscriptionCommand { - pub fn new( + #[must_use] + pub const fn new( client_id: ClientId, venue: Venue, data_type: DataType, diff --git a/nautilus_core/data/src/client.rs b/nautilus_core/data/src/client.rs index efbfb4841fa8..38de00d5e4c0 100644 --- a/nautilus_core/data/src/client.rs +++ b/nautilus_core/data/src/client.rs @@ -177,6 +177,7 @@ impl DerefMut for DataClientAdapter { } impl DataClientAdapter { + #[must_use] pub fn new( client_id: ClientId, venue: Venue, diff --git a/nautilus_core/data/src/engine/mod.rs b/nautilus_core/data/src/engine/mod.rs index 31db4e85e710..caf6a7f50396 100644 --- a/nautilus_core/data/src/engine/mod.rs +++ b/nautilus_core/data/src/engine/mod.rs @@ -278,7 +278,7 @@ impl DataEngine { // TODO: log error if let Some(cmd) = msg.downcast_ref::() { if let Some(client) = self.clients.get_mut(&cmd.client_id) { - client.execute(cmd.clone()) + client.execute(cmd.clone()); } else { log::error!( "Cannot handle command: no client found for {}", @@ -346,7 +346,7 @@ impl DataEngine { _ => {} // Nothing else to handle } - self.msgbus.as_ref().borrow().send_response(resp) + self.msgbus.as_ref().borrow().send_response(resp); } // -- DATA HANDLERS --------------------------------------------------------------------------- @@ -513,6 +513,7 @@ impl DataEngine { } // TODO: Potentially move these +#[must_use] pub fn get_instrument_publish_topic(instrument: &InstrumentAny) -> String { let instrument_id = instrument.id(); format!( @@ -521,6 +522,7 @@ pub fn get_instrument_publish_topic(instrument: &InstrumentAny) -> String { ) } +#[must_use] pub fn get_delta_publish_topic(delta: &OrderBookDelta) -> String { format!( "data.book.delta.{}.{}", @@ -528,6 +530,7 @@ pub fn get_delta_publish_topic(delta: &OrderBookDelta) -> String { ) } +#[must_use] pub fn get_deltas_publish_topic(delta: &OrderBookDeltas) -> String { format!( "data.book.snapshots.{}.{}", @@ -535,6 +538,7 @@ pub fn get_deltas_publish_topic(delta: &OrderBookDeltas) -> String { ) } +#[must_use] pub fn get_depth_publish_topic(depth: &OrderBookDepth10) -> String { format!( "data.book.depth.{}.{}", @@ -542,6 +546,7 @@ pub fn get_depth_publish_topic(depth: &OrderBookDepth10) -> String { ) } +#[must_use] pub fn get_quote_publish_topic(quote: &QuoteTick) -> String { format!( "data.quotes.{}.{}", @@ -549,6 +554,7 @@ pub fn get_quote_publish_topic(quote: &QuoteTick) -> String { ) } +#[must_use] pub fn get_trade_publish_topic(trade: &TradeTick) -> String { format!( "data.trades.{}.{}", @@ -556,6 +562,7 @@ pub fn get_trade_publish_topic(trade: &TradeTick) -> String { ) } +#[must_use] pub fn get_bar_publish_topic(bar: &Bar) -> String { format!("data.bars.{}", bar.bar_type) } @@ -571,7 +578,7 @@ impl MessageHandler for SubscriptionCommandHandler { } fn handle(&self, message: &dyn Any) { - self.data_engine.borrow_mut().execute(message) + self.data_engine.borrow_mut().execute(message); } fn handle_response(&self, _resp: DataResponse) {} fn handle_data(&self, _resp: Data) {} @@ -617,12 +624,7 @@ mod tests { let client_id = ClientId::from("SIM"); let venue = Venue::from("SIM"); - let client = Box::new(MockDataClient::new( - cache.clone(), - msgbus.clone(), - client_id, - venue, - )); + let client = Box::new(MockDataClient::new(cache, msgbus.clone(), client_id, venue)); let client = DataClientAdapter::new(client_id, venue, client, Box::new(TestClock::new())); data_engine.borrow_mut().register_client(client, None); diff --git a/nautilus_core/data/src/mocks.rs b/nautilus_core/data/src/mocks.rs index eccf472bdb54..8be1ac8b4a19 100644 --- a/nautilus_core/data/src/mocks.rs +++ b/nautilus_core/data/src/mocks.rs @@ -47,7 +47,7 @@ pub struct MockDataClient { } impl MockDataClient { - pub fn new( + pub const fn new( cache: Rc>, msgbus: Rc>, client_id: ClientId, diff --git a/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs b/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs index dd9331df91fb..8030e4b215a0 100644 --- a/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs +++ b/nautilus_core/infrastructure/tests/test_cache_database_postgres.rs @@ -271,7 +271,7 @@ mod serial_tests { assert_eq!( client_order_ids .keys() - .cloned() + .copied() .collect::>(), vec![client_order_id_1, client_order_id_2] .into_iter() @@ -280,7 +280,7 @@ mod serial_tests { assert_eq!( client_order_ids .values() - .cloned() + .copied() .collect::>(), vec![client_id].into_iter().collect::>() ); From 03165f5b35e3278fef0ac1b1ff4009aa6fc386b2 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Wed, 7 Aug 2024 17:58:49 +1000 Subject: [PATCH 32/60] Fix BybitExecutionClient position reports logging --- RELEASES.md | 1 + nautilus_trader/adapters/bybit/execution.py | 49 +++++++++++++-------- 2 files changed, 32 insertions(+), 18 deletions(-) diff --git a/RELEASES.md b/RELEASES.md index 18b3ce1892e0..dd23e9219545 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -13,6 +13,7 @@ None ### Fixes - Fixed creation of `instrumend_id` folder when writing PyO3 bars in catalog (#1832), thanks @faysou - Fixed handling `include_types` option in `StreamingFeatherWriter` (#1833), thanks @faysou +- Fixed `BybitExecutionClient` position reports error handling and logging --- diff --git a/nautilus_trader/adapters/bybit/execution.py b/nautilus_trader/adapters/bybit/execution.py index 53f8925c2fe7..cee01c42fb20 100644 --- a/nautilus_trader/adapters/bybit/execution.py +++ b/nautilus_trader/adapters/bybit/execution.py @@ -241,6 +241,7 @@ async def generate_order_status_reports( ) -> list[OrderStatusReport]: self._log.info("Requesting OrderStatusReports...") reports: list[OrderStatusReport] = [] + try: _symbol = instrument_id.symbol.value if instrument_id is not None else None symbol = BybitSymbol(_symbol) if _symbol is not None else None @@ -275,9 +276,11 @@ async def generate_order_status_reports( self._log.debug(f"Received {report}", LogColor.MAGENTA) except BybitError as e: self._log.error(f"Failed to generate OrderStatusReports: {e}") + len_reports = len(reports) plural = "" if len_reports == 1 else "s" self._log.info(f"Received {len(reports)} OrderStatusReport{plural}") + return reports async def generate_order_status_report( @@ -359,6 +362,7 @@ async def generate_fill_reports( ) -> list[FillReport]: self._log.info("Requesting FillReports...") reports: list[FillReport] = [] + try: _symbol = instrument_id.symbol.value if instrument_id is not None else None symbol = BybitSymbol(_symbol) if _symbol is not None else None @@ -384,9 +388,11 @@ async def generate_fill_reports( self._log.debug(f"Received {report}") except BybitError as e: self._log.error(f"Failed to generate FillReports: {e}") + len_reports = len(reports) plural = "" if len_reports == 1 else "s" self._log.info(f"Received {len(reports)} FillReport{plural}") + return reports async def generate_position_status_reports( @@ -398,24 +404,31 @@ async def generate_position_status_reports( self._log.info("Requesting PositionStatusReports...") reports: list[PositionStatusReport] = [] - for product_type in self._product_types: - if product_type == BybitProductType.SPOT: - continue # No positions on spot - positions = await self._http_account.query_position_info(product_type) - for position in positions: - # Uncomment for development - # self._log.info(f"Generating report {position}", LogColor.MAGENTA) - instr: InstrumentId = BybitSymbol( - position.symbol + "-" + product_type.value.upper(), - ).parse_as_nautilus() - position_report = position.parse_to_position_status_report( - account_id=self.account_id, - instrument_id=instr, - report_id=UUID4(), - ts_init=self._clock.timestamp_ns(), - ) - self._log.debug(f"Received {position_report}") - reports.append(position_report) + try: + for product_type in self._product_types: + if product_type == BybitProductType.SPOT: + continue # No positions on spot + positions = await self._http_account.query_position_info(product_type) + for position in positions: + # Uncomment for development + self._log.info(f"Generating report {position}", LogColor.MAGENTA) + instr: InstrumentId = BybitSymbol( + position.symbol + "-" + product_type.value.upper(), + ).parse_as_nautilus() + position_report = position.parse_to_position_status_report( + account_id=self.account_id, + instrument_id=instr, + report_id=UUID4(), + ts_init=self._clock.timestamp_ns(), + ) + self._log.debug(f"Received {position_report}") + reports.append(position_report) + except BybitError as e: + self._log.error(f"Failed to generate PositionReports: {e}") + + len_reports = len(reports) + plural = "" if len_reports == 1 else "s" + self._log.info(f"Received {len(reports)} PositionReport{plural}") return reports From 4e33b83a3e153a827ef995218d7d6f0014d6cfa1 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Wed, 7 Aug 2024 18:08:39 +1000 Subject: [PATCH 33/60] Use cached cargo for GitHub workflow --- .github/workflows/build.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 97242cf6ec48..d507dedca068 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -111,6 +111,19 @@ jobs: path: ~/.cache/pre-commit key: ${{ runner.os }}-${{ env.PYTHON_VERSION }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} + - name: Setup cached cargo + id: cached-cargo + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: ${{ runner.os }}-cargo- + - name: Set poetry cache-dir run: echo "POETRY_CACHE_DIR=$(poetry config cache-dir)" >> $GITHUB_ENV From d99d120569b596680382807d470943058bf7dbd7 Mon Sep 17 00:00:00 2001 From: faysou Date: Wed, 7 Aug 2024 09:20:04 +0100 Subject: [PATCH 34/60] Add conversion function of streamed data to backtest data (#1834) --- .../persistence/catalog/parquet.py | 18 ++++++ .../unit_tests/persistence/test_streaming.py | 56 +++++++++++++++++++ 2 files changed, 74 insertions(+) diff --git a/nautilus_trader/persistence/catalog/parquet.py b/nautilus_trader/persistence/catalog/parquet.py index ddf3ad747756..c8d20288386f 100644 --- a/nautilus_trader/persistence/catalog/parquet.py +++ b/nautilus_trader/persistence/catalog/parquet.py @@ -44,6 +44,7 @@ from nautilus_trader.core.message import Event from nautilus_trader.core.nautilus_pyo3 import DataBackendSession from nautilus_trader.core.nautilus_pyo3 import NautilusDataType +from nautilus_trader.core.uuid import UUID4 from nautilus_trader.model import NautilusRustDataType from nautilus_trader.model.data import Bar from nautilus_trader.model.data import CustomData @@ -749,3 +750,20 @@ def _read_feather_file( return reader.read_all() except (pa.ArrowInvalid, OSError): return None + + def convert_stream_to_data( + self, + instance_id: UUID4, + data_cls: type, + other_catalog: ParquetDataCatalog | None = None, + ) -> None: + table_name = class_to_filename(data_cls) + feather_file = Path(self.path) / "backtest" / instance_id / f"{table_name}.feather" + + feather_table = self._read_feather_file(feather_file) + custom_data_list = self._handle_table_nautilus(feather_table, data_cls) + + if other_catalog is not None: + other_catalog.write_data(custom_data_list) + else: + self.write_data(custom_data_list) diff --git a/tests/unit_tests/persistence/test_streaming.py b/tests/unit_tests/persistence/test_streaming.py index 794cc090aaac..83b2c79937eb 100644 --- a/tests/unit_tests/persistence/test_streaming.py +++ b/tests/unit_tests/persistence/test_streaming.py @@ -191,6 +191,62 @@ def test_feather_writer_include_types( assert result["NewsEventData"] == 86985 # type: ignore assert len(result) == 1 + def test_feather_writer_stream_to_data( + self, + catalog_betfair: ParquetDataCatalog, + ) -> None: + # Arrange + self.catalog = catalog_betfair + TestPersistenceStubs.setup_news_event_persistence() + + # Load news events into catalog + news_events = TestPersistenceStubs.news_events() + self.catalog.write_data(news_events) + + data_config = BacktestDataConfig( + catalog_path=self.catalog.path, + catalog_fs_protocol="file", + data_cls=NewsEventData.fully_qualified_name(), + client_id="NewsClient", + ) + + # Add some arbitrary instrument data to appease BacktestEngine + instrument_data_config = BacktestDataConfig( + catalog_path=self.catalog.path, + catalog_fs_protocol="file", + data_cls=InstrumentStatus.fully_qualified_name(), + ) + + streaming = BetfairTestStubs.streaming_config( + catalog_path=self.catalog.path, + catalog_fs_protocol="file", + ) + + run_config = BacktestRunConfig( + engine=BacktestEngineConfig(streaming=streaming), + data=[data_config, instrument_data_config], + venues=[BetfairTestStubs.betfair_venue_config(book_type="L1_MBP")], + ) + + node = BacktestNode(configs=[run_config]) + r = node.run() + + # Act + # NewsEventData is overridden here with data from the stream, but it should be the same data + self.catalog.convert_stream_to_data(r[0].instance_id, NewsEventData) + + node2 = BacktestNode(configs=[run_config]) + r2 = node2.run() + + # Assert + result = self.catalog.read_backtest( + instance_id=r2[0].instance_id, + raise_on_failed_deserialize=True, + ) + + result = Counter([r.__class__.__name__ for r in result]) # type: ignore + assert result["NewsEventData"] == 86985 # type: ignore + def test_feather_writer_signal_data( self, catalog_betfair: ParquetDataCatalog, From 970ef603f72051fb94ade6b2b9ee50b2bb326c4b Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Wed, 7 Aug 2024 18:18:46 +1000 Subject: [PATCH 35/60] Refine GitHub workflows caching setups --- .github/workflows/build-wheels.yml | 13 ++++++++++++ .github/workflows/build.yml | 34 ++++++++++++++++++++++++++---- .github/workflows/coverage.yml | 15 ++++++++++++- 3 files changed, 57 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index f9b7dbbe9ed7..4d314b9680fc 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -63,6 +63,19 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Set up cached cargo + id: cached-cargo + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: ${{ runner.os }}-cargo- + - name: Get Poetry version from poetry-version run: | version=$(cat poetry-version) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d507dedca068..02c23c7fcdeb 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -104,14 +104,14 @@ jobs: # make install-talib # poetry run pip install setuptools numpy==1.26.4 ta-lib - - name: Setup cached pre-commit + - name: Set up cached pre-commit id: cached-pre-commit uses: actions/cache@v4 with: path: ~/.cache/pre-commit key: ${{ runner.os }}-${{ env.PYTHON_VERSION }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} - - name: Setup cached cargo + - name: Set up cached cargo id: cached-cargo uses: actions/cache@v4 with: @@ -218,13 +218,26 @@ jobs: - name: Install build dependencies run: python -m pip install --upgrade pip setuptools wheel pre-commit msgspec - - name: Setup cached pre-commit + - name: Set up cached pre-commit id: cached-pre-commit uses: actions/cache@v4 with: path: ~/.cache/pre-commit key: ${{ runner.os }}-${{ matrix.python-version }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} + - name: Set up cached cargo + id: cached-cargo + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: ${{ runner.os }}-cargo- + - name: Set poetry cache-dir run: echo "POETRY_CACHE_DIR=$(poetry config cache-dir)" >> $GITHUB_ENV @@ -310,13 +323,26 @@ jobs: - name: Install build dependencies run: python -m pip install --upgrade pip setuptools wheel pre-commit msgspec - - name: Setup cached pre-commit + - name: Set up cached pre-commit id: cached-pre-commit uses: actions/cache@v4 with: path: ~/.cache/pre-commit key: ${{ runner.os }}-${{ env.PYTHON_VERSION }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} + - name: Set up cached cargo + id: cached-cargo + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: ${{ runner.os }}-cargo- + - name: Set poetry cache-dir run: echo "POETRY_CACHE_DIR=$(poetry config cache-dir)" >> $GITHUB_ENV diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index f2c73c88a571..076c2e1c38d8 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -91,13 +91,26 @@ jobs: # make install-talib # poetry run pip install setuptools numpy==1.26.4 ta-lib - - name: Setup cached pre-commit + - name: Set up cached pre-commit id: cached-pre-commit uses: actions/cache@v4 with: path: ~/.cache/pre-commit key: ${{ runner.os }}-${{ matrix.python-version }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} + - name: Set up cached cargo + id: cached-cargo + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: ${{ runner.os }}-cargo- + - name: Run pre-commit run: pre-commit run --all-files From fa6fb058504ffa9652d331a068a580ca5f036c8b Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Wed, 7 Aug 2024 18:21:35 +1000 Subject: [PATCH 36/60] Update release notes --- RELEASES.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/RELEASES.md b/RELEASES.md index dd23e9219545..620be710051e 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -5,6 +5,7 @@ Released on TBD (UTC). ### Enhancements - Added `@customdata` decorator to reduce need for boiler plate implementing custom data types (#1828), thanks @faysou - Added timeout for HTTP client in Rust (#1835), thanks @davidsblom +- Added catalog conversion function of streamed data to backtest data (#1834), thanks @faysou - Upgraded Cython to 3.0.11 ### Breaking Changes @@ -12,7 +13,7 @@ None ### Fixes - Fixed creation of `instrumend_id` folder when writing PyO3 bars in catalog (#1832), thanks @faysou -- Fixed handling `include_types` option in `StreamingFeatherWriter` (#1833), thanks @faysou +- Fixed `StreamingFeatherWriter` handling of `include_types` option (#1833), thanks @faysou - Fixed `BybitExecutionClient` position reports error handling and logging --- From 530c62457ca3dd615c7dd283cb9be43cc423a8ac Mon Sep 17 00:00:00 2001 From: Filip Macek Date: Wed, 7 Aug 2024 10:25:23 +0200 Subject: [PATCH 37/60] Add reduce only checks in OrderMatchingEngine (#1836) --- nautilus_core/backtest/src/matching_engine.rs | 88 +++++++++++++++++-- nautilus_core/model/src/orders/any.rs | 45 ++++++++++ nautilus_core/model/src/orders/stubs.rs | 33 +++++++ 3 files changed, 159 insertions(+), 7 deletions(-) diff --git a/nautilus_core/backtest/src/matching_engine.rs b/nautilus_core/backtest/src/matching_engine.rs index 8f91e80684d0..a84e5d14e3a6 100644 --- a/nautilus_core/backtest/src/matching_engine.rs +++ b/nautilus_core/backtest/src/matching_engine.rs @@ -347,6 +347,29 @@ impl OrderMatchingEngine { return; } + // Check reduce-only instruction + if self.config.use_reduce_only + && order.is_reduce_only() + && !order.is_closed() + && position.map_or(true, |pos| { + pos.is_closed() + || (order.is_buy() && pos.is_long()) + || (order.is_sell() && pos.is_short()) + }) + { + self.generate_order_rejected( + order, + format!( + "Reduce-only order {} ({}-{}) would have increased position", + order.client_order_id(), + order.order_type().to_string().to_uppercase(), + order.order_side().to_string().to_uppercase() + ) + .into(), + ); + return; + } + match order.order_type() { OrderType::Market => self.process_market_order(order), OrderType::Limit => self.process_limit_order(order), @@ -828,9 +851,10 @@ mod tests { instrument: InstrumentAny, msgbus: Rc, account_type: Option, + config: Option, ) -> OrderMatchingEngine { let cache = Rc::new(Cache::default()); - let config = OrderMatchingEngineConfig::default(); + let config = config.unwrap_or_default(); OrderMatchingEngine::new( instrument, 1, @@ -873,7 +897,7 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None); + let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None, None); let order = TestOrderStubs::market_order( instrument.id(), OrderSide::Buy, @@ -923,7 +947,7 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None); + let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None, None); let order = TestOrderStubs::market_order( instrument.id(), OrderSide::Buy, @@ -959,7 +983,8 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None); + let mut engine = + get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, None); let order = TestOrderStubs::market_order( instrument_es.id(), OrderSide::Buy, @@ -995,7 +1020,8 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None); + let mut engine = + get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, None); let limit_order = TestOrderStubs::limit_order( instrument_es.id(), OrderSide::Sell, @@ -1033,7 +1059,8 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None); + let mut engine = + get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, None); let stop_order = TestOrderStubs::stop_market_order( instrument_es.id(), OrderSide::Sell, @@ -1073,7 +1100,7 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None); + let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None, None); let order = TestOrderStubs::market_order( instrument.id(), OrderSide::Sell, @@ -1098,4 +1125,51 @@ mod tests { exec_spawn_id=None, tags=None)") ); } + + #[rstest] + fn test_order_matching_engine_reduce_only_error( + mut msgbus: MessageBus, + order_event_handler: ShareableMessageHandler, + account_id: AccountId, + time: AtomicTime, + instrument_es: InstrumentAny, + ) { + // Register saving message handler to exec engine endpoint + msgbus.register( + msgbus.switchboard.exec_engine_process.as_str(), + order_event_handler.clone(), + ); + + // Create engine (with reduce_only option) and process order + let config = OrderMatchingEngineConfig { + use_reduce_only: true, + bar_execution: false, + reject_stop_orders: false, + support_gtd_orders: false, + support_contingent_orders: false, + use_position_ids: false, + use_random_ids: false, + }; + let mut engine = + get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, Some(config)); + let market_order = TestOrderStubs::market_order_reduce( + instrument_es.id(), + OrderSide::Buy, + Quantity::from("1"), + None, + None, + ); + + engine.process_order(&market_order, account_id); + + // Get messages and test + let saved_messages = get_order_event_handler_messages(order_event_handler); + assert_eq!(saved_messages.len(), 1); + let first_message = saved_messages.first().unwrap(); + assert_eq!(first_message.event_type(), OrderEventType::Rejected); + assert_eq!( + first_message.message().unwrap(), + Ustr::from("Reduce-only order O-19700101-000000-001-001-1 (MARKET-BUY) would have increased position") + ); + } } diff --git a/nautilus_core/model/src/orders/any.rs b/nautilus_core/model/src/orders/any.rs index f18c2ff8ea9c..01ae63378e98 100644 --- a/nautilus_core/model/src/orders/any.rs +++ b/nautilus_core/model/src/orders/any.rs @@ -500,6 +500,51 @@ impl OrderAny { Self::TrailingStopMarket(order) => order.would_reduce_only(side, position_qty), } } + + #[must_use] + pub fn is_reduce_only(&self) -> bool { + match self { + Self::Limit(order) => order.is_reduce_only(), + Self::Market(order) => order.is_reduce_only(), + Self::MarketToLimit(order) => order.is_reduce_only(), + Self::LimitIfTouched(order) => order.is_reduce_only(), + Self::MarketIfTouched(order) => order.is_reduce_only(), + Self::StopLimit(order) => order.is_reduce_only(), + Self::StopMarket(order) => order.is_reduce_only(), + Self::TrailingStopLimit(order) => order.is_reduce_only(), + Self::TrailingStopMarket(order) => order.is_reduce_only(), + } + } + + #[must_use] + pub fn is_buy(&self) -> bool { + match self { + Self::Limit(order) => order.is_buy(), + Self::LimitIfTouched(order) => order.is_buy(), + Self::Market(order) => order.is_buy(), + Self::MarketIfTouched(order) => order.is_buy(), + Self::MarketToLimit(order) => order.is_buy(), + Self::StopLimit(order) => order.is_buy(), + Self::StopMarket(order) => order.is_buy(), + Self::TrailingStopLimit(order) => order.is_buy(), + Self::TrailingStopMarket(order) => order.is_buy(), + } + } + + #[must_use] + pub fn is_sell(&self) -> bool { + match self { + Self::Limit(order) => order.is_sell(), + Self::LimitIfTouched(order) => order.is_sell(), + Self::Market(order) => order.is_sell(), + Self::MarketIfTouched(order) => order.is_sell(), + Self::MarketToLimit(order) => order.is_sell(), + Self::StopLimit(order) => order.is_sell(), + Self::StopMarket(order) => order.is_sell(), + Self::TrailingStopLimit(order) => order.is_sell(), + Self::TrailingStopMarket(order) => order.is_sell(), + } + } } impl PartialEq for OrderAny { diff --git a/nautilus_core/model/src/orders/stubs.rs b/nautilus_core/model/src/orders/stubs.rs index 42ca2a25f8ba..dfdf4b6469a2 100644 --- a/nautilus_core/model/src/orders/stubs.rs +++ b/nautilus_core/model/src/orders/stubs.rs @@ -160,6 +160,39 @@ impl TestOrderStubs { OrderAny::Market(order) } + #[must_use] + pub fn market_order_reduce( + instrument_id: InstrumentId, + order_side: OrderSide, + quantity: Quantity, + client_order_id: Option, + time_in_force: Option, + ) -> OrderAny { + let order = MarketOrder::new( + TraderId::default(), + StrategyId::default(), + instrument_id, + client_order_id.unwrap_or_default(), + order_side, + quantity, + time_in_force.unwrap_or(TimeInForce::Gtc), + UUID4::new(), + UnixNanos::default(), + true, // reduce only + false, + None, + None, + None, + None, + None, + None, + None, + None, + ) + .unwrap(); + OrderAny::Market(order) + } + #[must_use] pub fn limit_order( instrument_id: InstrumentId, From 4f4db7cc30a3fdecfe6c7ccd3b2cd3415d2cb9aa Mon Sep 17 00:00:00 2001 From: David Blom Date: Wed, 7 Aug 2024 23:02:05 +0200 Subject: [PATCH 38/60] Rename to timeout_secs for consistency (#1837) --- nautilus_core/network/src/python/http.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautilus_core/network/src/python/http.rs b/nautilus_core/network/src/python/http.rs index 0cd1e24dc5d8..b034415617ee 100644 --- a/nautilus_core/network/src/python/http.rs +++ b/nautilus_core/network/src/python/http.rs @@ -112,7 +112,7 @@ impl HttpClient { headers: Option>, body: Option<&'py PyBytes>, keys: Option>, - timeout_sec: Option, + timeout_secs: Option, py: Python<'py>, ) -> PyResult> { let headers = headers.unwrap_or_default(); @@ -130,7 +130,7 @@ impl HttpClient { }) .await; match client - .send_request(method, url, headers, body_vec, timeout_sec) + .send_request(method, url, headers, body_vec, timeout_secs) .await { Ok(res) => Ok(res), From 0a49b4e1b7e95a462ddbf22fa574d2ef9f6bfa1a Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 07:08:31 +1000 Subject: [PATCH 39/60] Add timeout_secs to HttpClient.request type stub --- nautilus_trader/core/nautilus_pyo3.pyi | 1 + 1 file changed, 1 insertion(+) diff --git a/nautilus_trader/core/nautilus_pyo3.pyi b/nautilus_trader/core/nautilus_pyo3.pyi index b02643c4cb50..dd9c7b27963e 100644 --- a/nautilus_trader/core/nautilus_pyo3.pyi +++ b/nautilus_trader/core/nautilus_pyo3.pyi @@ -2532,6 +2532,7 @@ class HttpClient: headers: dict[str, str] | None = None, body: bytes | None = None, keys: list[str] | None = None, + timeout_secs: int | None = None, ) -> HttpResponse: ... class HttpMethod(Enum): From 64ddb89fce972650a866f1c824ef160129e330ff Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 17:22:54 +1000 Subject: [PATCH 40/60] Include quantity in position reports --- nautilus_trader/analysis/reporter.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautilus_trader/analysis/reporter.py b/nautilus_trader/analysis/reporter.py index e869a5975488..be640031204a 100644 --- a/nautilus_trader/analysis/reporter.py +++ b/nautilus_trader/analysis/reporter.py @@ -138,7 +138,6 @@ def generate_positions_report(positions: list[Position]) -> pd.DataFrame: sort = ["ts_opened", "ts_closed", "position_id"] report = pd.DataFrame(data=positions).set_index("position_id").sort_values(sort) del report["signed_qty"] - del report["quantity"] del report["quote_currency"] del report["base_currency"] del report["settlement_currency"] From 2ff856cac4acf4678ccfc6d127f3fba67e3b590e Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 17:23:09 +1000 Subject: [PATCH 41/60] Comment development logging --- nautilus_trader/adapters/bybit/execution.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautilus_trader/adapters/bybit/execution.py b/nautilus_trader/adapters/bybit/execution.py index cee01c42fb20..5abc12731b52 100644 --- a/nautilus_trader/adapters/bybit/execution.py +++ b/nautilus_trader/adapters/bybit/execution.py @@ -411,7 +411,7 @@ async def generate_position_status_reports( positions = await self._http_account.query_position_info(product_type) for position in positions: # Uncomment for development - self._log.info(f"Generating report {position}", LogColor.MAGENTA) + # self._log.info(f"Generating report {position}", LogColor.MAGENTA) instr: InstrumentId = BybitSymbol( position.symbol + "-" + product_type.value.upper(), ).parse_as_nautilus() From 3b4ab6e13470597b4cddac27ae7ef51bad76958b Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 17:27:46 +1000 Subject: [PATCH 42/60] Update dependencies --- nautilus_core/Cargo.lock | 12 ++++++------ nautilus_core/Cargo.toml | 2 +- poetry.lock | 19 ++++++++----------- 3 files changed, 15 insertions(+), 18 deletions(-) diff --git a/nautilus_core/Cargo.lock b/nautilus_core/Cargo.lock index 7dc5e92d36dc..9560da57c414 100644 --- a/nautilus_core/Cargo.lock +++ b/nautilus_core/Cargo.lock @@ -733,9 +733,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26a5c3fd7bfa1ce3897a3a3501d362b2d87b7f2583ebcb4a949ec25911025cbc" +checksum = "504bdec147f2cc13c8b57ed9401fd8a147cc66b67ad5cb241394244f2c947549" dependencies = [ "jobserver", "libc", @@ -4161,18 +4161,18 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4" [[package]] name = "serde" -version = "1.0.204" +version = "1.0.205" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" +checksum = "e33aedb1a7135da52b7c21791455563facbbcc43d0f0f66165b42c21b3dfb150" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.204" +version = "1.0.205" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" +checksum = "692d6f5ac90220161d6774db30c662202721e64aed9058d2c394f451261420c1" dependencies = [ "proc-macro2", "quote", diff --git a/nautilus_core/Cargo.toml b/nautilus_core/Cargo.toml index c36ef95dbd05..c0d6a223866b 100644 --- a/nautilus_core/Cargo.toml +++ b/nautilus_core/Cargo.toml @@ -43,7 +43,7 @@ rmp-serde = "1.3.0" rust_decimal = "1.35.0" rust_decimal_macros = "1.35.0" semver = "1.0.23" -serde = { version = "1.0.203", features = ["derive"] } +serde = { version = "1.0.205", features = ["derive"] } serde_json = "1.0.122" strum = { version = "0.26.3", features = ["derive"] } thiserror = "1.0.63" diff --git a/poetry.lock b/poetry.lock index 60b1a9c5e040..3500dabfbeae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "aiohappyeyeballs" -version = "2.3.4" +version = "2.3.5" description = "Happy Eyeballs for asyncio" optional = false -python-versions = "<4.0,>=3.8" +python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.3.4-py3-none-any.whl", hash = "sha256:40a16ceffcf1fc9e142fd488123b2e218abc4188cf12ac20c67200e1579baa42"}, - {file = "aiohappyeyeballs-2.3.4.tar.gz", hash = "sha256:7e1ae8399c320a8adec76f6c919ed5ceae6edd4c3672f4d9eae2b27e37c80ff6"}, + {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, + {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, ] [[package]] @@ -1405,20 +1405,17 @@ xml = ["lxml (>=4.9.2)"] [[package]] name = "pandas-stubs" -version = "2.2.2.240603" +version = "2.2.2.240807" description = "Type annotations for pandas" optional = false python-versions = ">=3.9" files = [ - {file = "pandas_stubs-2.2.2.240603-py3-none-any.whl", hash = "sha256:e08ce7f602a4da2bff5a67475ba881c39f2a4d4f7fccc1cba57c6f35a379c6c0"}, - {file = "pandas_stubs-2.2.2.240603.tar.gz", hash = "sha256:2dcc86e8fa6ea41535a4561c1f08b3942ba5267b464eff2e99caeee66f9e4cd1"}, + {file = "pandas_stubs-2.2.2.240807-py3-none-any.whl", hash = "sha256:893919ad82be4275f0d07bb47a95d08bae580d3fdea308a7acfcb3f02e76186e"}, + {file = "pandas_stubs-2.2.2.240807.tar.gz", hash = "sha256:64a559725a57a449f46225fbafc422520b7410bff9252b661a225b5559192a93"}, ] [package.dependencies] -numpy = [ - {version = ">=1.23.5", markers = "python_version >= \"3.9\" and python_version < \"3.12\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\" and python_version < \"3.13\""}, -] +numpy = ">=1.23.5" types-pytz = ">=2022.1.1" [[package]] From bcc7cdded9547194e75f2b65da229706dc39b6db Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 17:33:06 +1000 Subject: [PATCH 43/60] Stabilize Bybit integration --- README.md | 2 +- docs/integrations/index.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 42e8e68ee6c7..b4bf4b6a20cb 100644 --- a/README.md +++ b/README.md @@ -127,7 +127,7 @@ The following integrations are currently supported: | [Binance](https://binance.com) | `BINANCE` | Crypto Exchange (CEX) | ![status](https://img.shields.io/badge/stable-green) | [Guide](https://nautilustrader.io/docs/latest/integrations/binance.html) | | [Binance US](https://binance.us) | `BINANCE` | Crypto Exchange (CEX) | ![status](https://img.shields.io/badge/stable-green) | [Guide](https://nautilustrader.io/docs/latest/integrations/binance.html) | | [Binance Futures](https://www.binance.com/en/futures) | `BINANCE` | Crypto Exchange (CEX) | ![status](https://img.shields.io/badge/stable-green) | [Guide](https://nautilustrader.io/docs/latest/integrations/binance.html) | -| [Bybit](https://www.bybit.com) | `BYBIT` | Crypto Exchange (CEX) | ![status](https://img.shields.io/badge/beta-yellow) | [Guide](https://nautilustrader.io/docs/latest/integrations/bybit.html) | +| [Bybit](https://www.bybit.com) | `BYBIT` | Crypto Exchange (CEX) | ![status](https://img.shields.io/badge/stable-green) | [Guide](https://nautilustrader.io/docs/latest/integrations/bybit.html) | | [Databento](https://databento.com) | `DATABENTO` | Data Provider | ![status](https://img.shields.io/badge/beta-yellow) | [Guide](https://nautilustrader.io/docs/latest/integrations/databento.html) | | [Interactive Brokers](https://www.interactivebrokers.com) | `INTERACTIVE_BROKERS` | Brokerage (multi-venue) | ![status](https://img.shields.io/badge/stable-green) | [Guide](https://nautilustrader.io/docs/latest/integrations/ib.html) | diff --git a/docs/integrations/index.md b/docs/integrations/index.md index 12be06976487..4a128e4ac423 100644 --- a/docs/integrations/index.md +++ b/docs/integrations/index.md @@ -11,7 +11,7 @@ The following integrations are currently supported: | [Binance](https://binance.com) | `BINANCE` | Crypto Exchange (CEX) | ![status](https://img.shields.io/badge/stable-green) | [Guide](binance.md) | | [Binance US](https://binance.us) | `BINANCE` | Crypto Exchange (CEX) | ![status](https://img.shields.io/badge/stable-green) | [Guide](binance.md) | | [Binance Futures](https://www.binance.com/en/futures) | `BINANCE` | Crypto Exchange (CEX) | ![status](https://img.shields.io/badge/stable-green) | [Guide](binance.md) | -| [Bybit](https://www.bybit.com) | `BYBIT` | Crypto Exchange (CEX) | ![status](https://img.shields.io/badge/beta-yellow) | [Guide](bybit.md) | +| [Bybit](https://www.bybit.com) | `BYBIT` | Crypto Exchange (CEX) | ![status](https://img.shields.io/badge/stable-green) | [Guide](bybit.md) | | [Databento](https://databento.com) | `DATABENTO` | Data Provider | ![status](https://img.shields.io/badge/beta-yellow) | [Guide](databento.md) | | [Interactive Brokers](https://www.interactivebrokers.com) | `INTERACTIVE_BROKERS` | Brokerage (multi-venue) | ![status](https://img.shields.io/badge/stable-green) | [Guide](ib.md) | From cf3bd43e4aeaab8816c7f64f99eedc7041588a08 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 17:39:21 +1000 Subject: [PATCH 44/60] Fix copyright alignment --- nautilus_core/common/src/logging/headers.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautilus_core/common/src/logging/headers.rs b/nautilus_core/common/src/logging/headers.rs index ce8184826fb5..4dfff2a3b259 100644 --- a/nautilus_core/common/src/logging/headers.rs +++ b/nautilus_core/common/src/logging/headers.rs @@ -37,7 +37,7 @@ pub fn log_header(trader_id: TraderId, machine_id: &str, instance_id: UUID4, com header_sepr(c, "================================================================="); header_sepr(c, " NAUTILUS TRADER - Automated Algorithmic Trading Platform"); header_sepr(c, " by Nautech Systems Pty Ltd."); - header_sepr(c, "Copyright (C) 2015-2024. All rights reserved."); + header_sepr(c, " Copyright (C) 2015-2024. All rights reserved."); header_sepr(c, "================================================================="); header_line(c, ""); header_line(c, "⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣠⣴⣶⡟⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀"); From 17a9e46c986b22506975f414af3907041b7c4fbe Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 18:22:49 +1000 Subject: [PATCH 45/60] Cleanup Binance order update handling --- .../adapters/binance/futures/schemas/user.py | 30 +++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/nautilus_trader/adapters/binance/futures/schemas/user.py b/nautilus_trader/adapters/binance/futures/schemas/user.py index a69d95a8c3f4..d6beaf152ba2 100644 --- a/nautilus_trader/adapters/binance/futures/schemas/user.py +++ b/nautilus_trader/adapters/binance/futures/schemas/user.py @@ -279,18 +279,10 @@ def handle_order_trade_update( # noqa: C901 (too complex) ts_event = millis_to_nanos(self.T) venue_order_id = VenueOrderId(str(self.i)) instrument_id = exec_client._get_cached_instrument_id(self.s) - strategy_id = exec_client._cache.strategy_id_for_order(client_order_id) - instrument = exec_client._instrument_provider.find(instrument_id=instrument_id) - if instrument is None: - raise ValueError(f"Cannot handle trade: instrument {instrument_id} not found") - - price_precision = instrument.price_precision - size_precision = instrument.size_precision - - order = exec_client._cache.order(client_order_id) - if not order: - exec_client._log.error(f"Cannot find order {client_order_id!r}") + strategy_id = None + if client_order_id: + strategy_id = exec_client._cache.strategy_id_for_order(client_order_id) if strategy_id is None: report = self.parse_to_order_status_report( @@ -303,7 +295,21 @@ def handle_order_trade_update( # noqa: C901 (too complex) enum_parser=exec_client._enum_parser, ) exec_client._send_order_status_report(report) - elif self.x == BinanceExecutionType.NEW: + return + + instrument = exec_client._instrument_provider.find(instrument_id=instrument_id) + if instrument is None: + raise ValueError(f"Cannot handle trade: instrument {instrument_id} not found") + + price_precision = instrument.price_precision + size_precision = instrument.size_precision + + order = exec_client._cache.order(client_order_id) + if not order: + exec_client._log.error(f"Cannot find order {client_order_id!r}") + return + + if self.x == BinanceExecutionType.NEW: if order.order_type == OrderType.TRAILING_STOP_MARKET and order.is_open: return # Already accepted: this is an update From eb95b56f9008eb5bbe863cc4ae6a3684205af41b Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 18:25:46 +1000 Subject: [PATCH 46/60] Fix BybitExecutionClient external order handling --- RELEASES.md | 1 + nautilus_trader/adapters/bybit/execution.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/RELEASES.md b/RELEASES.md index 620be710051e..f8d58d99b170 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -15,6 +15,7 @@ None - Fixed creation of `instrumend_id` folder when writing PyO3 bars in catalog (#1832), thanks @faysou - Fixed `StreamingFeatherWriter` handling of `include_types` option (#1833), thanks @faysou - Fixed `BybitExecutionClient` position reports error handling and logging +- Fixed `BybitExecutionClient` order report handling to correctly process external orders --- diff --git a/nautilus_trader/adapters/bybit/execution.py b/nautilus_trader/adapters/bybit/execution.py index 5abc12731b52..3ec49cf59489 100644 --- a/nautilus_trader/adapters/bybit/execution.py +++ b/nautilus_trader/adapters/bybit/execution.py @@ -865,9 +865,9 @@ def _process_execution(self, execution: BybitWsAccountExecution) -> None: client_order_id = self._cache.client_order_id(venue_order_id) if client_order_id is None: - # TODO: We can generate an external order fill here instead - self._log.error( - f"Cannot process order execution for {venue_order_id!r}: no `ClientOrderId` found", + self._log.debug( + f"Cannot process order execution for {venue_order_id!r}: no `ClientOrderId` found " + "(most likely due to being an external order)", ) return @@ -976,7 +976,11 @@ def _handle_account_order_update(self, raw: bytes) -> None: # noqa: C901 (too c enum_parser=self._enum_parser, ts_init=self._clock.timestamp_ns(), ) - strategy_id = self._cache.strategy_id_for_order(report.client_order_id) + + strategy_id = None + if report.client_order_id: + strategy_id = self._cache.strategy_id_for_order(report.client_order_id) + if strategy_id is None: # External order self._send_order_status_report(report) From 1d602e99f279cae9ba8331d692b226ceed39c94c Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 19:07:46 +1000 Subject: [PATCH 47/60] Add BarType.from_str test case --- tests/unit_tests/model/test_bar.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/tests/unit_tests/model/test_bar.py b/tests/unit_tests/model/test_bar.py index 59bda875e1b7..96d902030950 100644 --- a/tests/unit_tests/model/test_bar.py +++ b/tests/unit_tests/model/test_bar.py @@ -344,7 +344,11 @@ def test_bar_type_from_str_with_invalid_values(self, input: str, expected_err: s @pytest.mark.parametrize( "value", - ["", "AUD/USD", "AUD/USD.IDEALPRO-1-MILLISECOND-BID"], + [ + "", + "AUD/USD", + "AUD/USD.IDEALPRO-1-MILLISECOND-BID", + ], ) def test_from_str_given_various_invalid_strings_raises_value_error(self, value): # Arrange, Act, Assert @@ -393,6 +397,14 @@ def test_from_str_given_various_invalid_strings_raises_value_error(self, value): AggregationSource.INTERNAL, ), ], + [ + "TOTAL-INDEX.TRADINGVIEW-2-HOUR-LAST-EXTERNAL", + BarType( + InstrumentId(Symbol("TOTAL-INDEX"), Venue("TRADINGVIEW")), + BarSpecification(2, BarAggregation.HOUR, PriceType.LAST), + AggregationSource.EXTERNAL, + ), + ], ], ) def test_from_str_given_various_valid_string_returns_expected_specification( From 599631b7fff1215808fdca5668e682175bb3e33b Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 21:18:54 +1000 Subject: [PATCH 48/60] Fix typos --- nautilus_trader/adapters/_template/data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautilus_trader/adapters/_template/data.py b/nautilus_trader/adapters/_template/data.py index 0596d97a7ec5..de2b624c538d 100644 --- a/nautilus_trader/adapters/_template/data.py +++ b/nautilus_trader/adapters/_template/data.py @@ -40,7 +40,7 @@ class TemplateLiveDataClient(LiveDataClient): """ An example of a ``LiveDataClient`` highlighting the overridable abstract methods. - A live data client general handles non-market or custom data feeds and requests. + A live data client generally handles non-market or custom data feeds and requests. +---------------------------------------+-------------+ | Method | Requirement | @@ -103,7 +103,7 @@ class TemplateLiveMarketDataClient(LiveMarketDataClient): An example of a ``LiveMarketDataClient`` highlighting the overridable abstract methods. - A live market data client general handles market data feeds and requests. + A live market data client generally handles market data feeds and requests. +----------------------------------------+-------------+ | Method | Requirement | From f8d9e34c1a9273040700c5077548ad6587a83474 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Thu, 8 Aug 2024 22:37:40 +1000 Subject: [PATCH 49/60] Add Windows library for linker --- build.py | 1 + 1 file changed, 1 insertion(+) diff --git a/build.py b/build.py index 97fa6a18731d..8dc4fb405b07 100644 --- a/build.py +++ b/build.py @@ -172,6 +172,7 @@ def _build_extensions() -> list[Extension]: "OleAut32.lib", "Pdh.lib", "PowrProf.lib", + "Propsys.lib", "Psapi.lib", "schannel.lib", "secur32.lib", From dd0318f5206b4d390a8d95c7618fe49367fc55d7 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Fri, 9 Aug 2024 07:16:36 +1000 Subject: [PATCH 50/60] Revert cargo cache for some GitHub workflows --- .github/workflows/build-wheels.yml | 13 ------------- .github/workflows/coverage.yml | 13 ------------- 2 files changed, 26 deletions(-) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 4d314b9680fc..f9b7dbbe9ed7 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -63,19 +63,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Set up cached cargo - id: cached-cargo - uses: actions/cache@v4 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - target/ - key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - restore-keys: ${{ runner.os }}-cargo- - - name: Get Poetry version from poetry-version run: | version=$(cat poetry-version) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 076c2e1c38d8..86efe110e4cc 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -98,19 +98,6 @@ jobs: path: ~/.cache/pre-commit key: ${{ runner.os }}-${{ matrix.python-version }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} - - name: Set up cached cargo - id: cached-cargo - uses: actions/cache@v4 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - target/ - key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - restore-keys: ${{ runner.os }}-cargo- - - name: Run pre-commit run: pre-commit run --all-files From 9baf10e712e4f40a68d29a94ce8154088560dc5d Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Fri, 9 Aug 2024 17:16:14 +1000 Subject: [PATCH 51/60] Add wholearchive for linking Windows libraries --- build.py | 23 +---------------------- 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/build.py b/build.py index 8dc4fb405b07..adf7b90f3d81 100644 --- a/build.py +++ b/build.py @@ -159,28 +159,7 @@ def _build_extensions() -> list[Extension]: extra_compile_args.append("-pipe") if platform.system() == "Windows": - extra_link_args += [ - "AdvAPI32.Lib", - "bcrypt.lib", - "Crypt32.lib", - "Iphlpapi.lib", - "Kernel32.lib", - "ncrypt.lib", - "Netapi32.lib", - "ntdll.lib", - "Ole32.lib", - "OleAut32.lib", - "Pdh.lib", - "PowrProf.lib", - "Propsys.lib", - "Psapi.lib", - "schannel.lib", - "secur32.lib", - "Shell32.lib", - "User32.Lib", - "UserEnv.Lib", - "WS2_32.Lib", - ] + extra_link_args.append("/WHOLEARCHIVE") print("Creating C extension modules...") print(f"define_macros={define_macros}") From 69d70477e6221c8ca17931d9177516c691a7bfe9 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Fri, 9 Aug 2024 17:23:14 +1000 Subject: [PATCH 52/60] Update GitHub workflow --- .github/workflows/build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 02c23c7fcdeb..c38224d6ec84 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -152,7 +152,7 @@ jobs: - name: Run nautilus_core cargo tests (Linux) run: | - cargo install cargo-nextest + cargo install cargo-nextest --force make cargo-test - name: Run tests (Linux) @@ -360,7 +360,7 @@ jobs: - name: Run nautilus_core cargo tests (macOS) run: | - cargo install cargo-nextest + cargo install cargo-nextest --force make cargo-test - name: Run tests (macOS) From 88514bfa277c55bb57934ca44fe1f8610e1811f2 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Fri, 9 Aug 2024 17:32:58 +1000 Subject: [PATCH 53/60] Update dependencies --- .pre-commit-config.yaml | 2 +- nautilus_core/Cargo.lock | 12 +-- nautilus_core/cli/Cargo.toml | 2 +- poetry.lock | 200 +++++++++++++++++------------------ pyproject.toml | 2 +- 5 files changed, 109 insertions(+), 109 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2a3b26d9b087..12afb3e256f3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -83,7 +83,7 @@ repos: exclude: "docs/_pygments/monokai.py" - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.6 + rev: v0.5.7 hooks: - id: ruff args: ["--fix"] diff --git a/nautilus_core/Cargo.lock b/nautilus_core/Cargo.lock index 9560da57c414..6627a63bef86 100644 --- a/nautilus_core/Cargo.lock +++ b/nautilus_core/Cargo.lock @@ -833,9 +833,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.13" +version = "4.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fbb260a053428790f3de475e304ff84cdbc4face759ea7a3e64c1edd938a7fc" +checksum = "c937d4061031a6d0c8da4b9a4f98a172fc2976dfb1c19213a9cf7d0d3c837e36" dependencies = [ "clap_builder", "clap_derive", @@ -843,9 +843,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.13" +version = "4.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64b17d7ea74e9f833c7dbf2cbe4fb12ff26783eda4782a8975b72f895c9b4d99" +checksum = "85379ba512b21a328adf887e85f7742d12e96eb31f3ef077df4ffc26b506ffed" dependencies = [ "anstream", "anstyle", @@ -1020,7 +1020,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.5.13", + "clap 4.5.14", "criterion-plot", "is-terminal", "itertools 0.10.5", @@ -2619,7 +2619,7 @@ name = "nautilus-cli" version = "0.28.0" dependencies = [ "anyhow", - "clap 4.5.13", + "clap 4.5.14", "clap_derive", "dotenvy", "log", diff --git a/nautilus_core/cli/Cargo.toml b/nautilus_core/cli/Cargo.toml index 1c64355a007e..377d7ff6cde2 100644 --- a/nautilus_core/cli/Cargo.toml +++ b/nautilus_core/cli/Cargo.toml @@ -18,7 +18,7 @@ nautilus-infrastructure = { path = "../infrastructure" , features = ["postgres"] anyhow = { workspace = true } log = { workspace = true } tokio = {workspace = true} -clap = { version = "4.5.13", features = ["derive", "env"] } +clap = { version = "4.5.14", features = ["derive", "env"] } clap_derive = { version = "4.5.13" } dotenvy = { version = "0.15.7" } simple_logger = "5.0.0" diff --git a/poetry.lock b/poetry.lock index 3500dabfbeae..ab82f092bc78 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,87 +13,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.10.1" +version = "3.10.2" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:47b4c2412960e64d97258f40616efddaebcb34ff664c8a972119ed38fac2a62c"}, - {file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7dbf637f87dd315fa1f36aaed8afa929ee2c607454fb7791e74c88a0d94da59"}, - {file = "aiohttp-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c8fb76214b5b739ce59e2236a6489d9dc3483649cfd6f563dbf5d8e40dbdd57d"}, - {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c577cdcf8f92862363b3d598d971c6a84ed8f0bf824d4cc1ce70c2fb02acb4a"}, - {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:777e23609899cb230ad2642b4bdf1008890f84968be78de29099a8a86f10b261"}, - {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b07286a1090483799599a2f72f76ac396993da31f6e08efedb59f40876c144fa"}, - {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9db600a86414a9a653e3c1c7f6a2f6a1894ab8f83d11505247bd1b90ad57157"}, - {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c3f1eb280008e51965a8d160a108c333136f4a39d46f516c64d2aa2e6a53f2"}, - {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f5dd109a925fee4c9ac3f6a094900461a2712df41745f5d04782ebcbe6479ccb"}, - {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8c81ff4afffef9b1186639506d70ea90888218f5ddfff03870e74ec80bb59970"}, - {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:2a384dfbe8bfebd203b778a30a712886d147c61943675f4719b56725a8bbe803"}, - {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b9fb6508893dc31cfcbb8191ef35abd79751db1d6871b3e2caee83959b4d91eb"}, - {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:88596384c3bec644a96ae46287bb646d6a23fa6014afe3799156aef42669c6bd"}, - {file = "aiohttp-3.10.1-cp310-cp310-win32.whl", hash = "sha256:68164d43c580c2e8bf8e0eb4960142919d304052ccab92be10250a3a33b53268"}, - {file = "aiohttp-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:d6bbe2c90c10382ca96df33b56e2060404a4f0f88673e1e84b44c8952517e5f3"}, - {file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6979b4f20d3e557a867da9d9227de4c156fcdcb348a5848e3e6190fd7feb972"}, - {file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03c0c380c83f8a8d4416224aafb88d378376d6f4cadebb56b060688251055cd4"}, - {file = "aiohttp-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c2b104e81b3c3deba7e6f5bc1a9a0e9161c380530479970766a6655b8b77c7c"}, - {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b023b68c61ab0cd48bd38416b421464a62c381e32b9dc7b4bdfa2905807452a4"}, - {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a07c76a82390506ca0eabf57c0540cf5a60c993c442928fe4928472c4c6e5e6"}, - {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:41d8dab8c64ded1edf117d2a64f353efa096c52b853ef461aebd49abae979f16"}, - {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:615348fab1a9ef7d0960a905e83ad39051ae9cb0d2837da739b5d3a7671e497a"}, - {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:256ee6044214ee9d66d531bb374f065ee94e60667d6bbeaa25ca111fc3997158"}, - {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7d5bb926805022508b7ddeaad957f1fce7a8d77532068d7bdb431056dc630cd"}, - {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:028faf71b338f069077af6315ad54281612705d68889f5d914318cbc2aab0d50"}, - {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5c12310d153b27aa630750be44e79313acc4e864c421eb7d2bc6fa3429c41bf8"}, - {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:de1a91d5faded9054957ed0a9e01b9d632109341942fc123947ced358c5d9009"}, - {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c186b270979fb1dee3ababe2d12fb243ed7da08b30abc83ebac3a928a4ddb15"}, - {file = "aiohttp-3.10.1-cp311-cp311-win32.whl", hash = "sha256:4a9ce70f5e00380377aac0e568abd075266ff992be2e271765f7b35d228a990c"}, - {file = "aiohttp-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:a77c79bac8d908d839d32c212aef2354d2246eb9deb3e2cb01ffa83fb7a6ea5d"}, - {file = "aiohttp-3.10.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2212296cdb63b092e295c3e4b4b442e7b7eb41e8a30d0f53c16d5962efed395d"}, - {file = "aiohttp-3.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4dcb127ca3eb0a61205818a606393cbb60d93b7afb9accd2fd1e9081cc533144"}, - {file = "aiohttp-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb8b79a65332e1a426ccb6290ce0409e1dc16b4daac1cc5761e059127fa3d134"}, - {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc24f707ed9cb961f6ee04020ca01de2c89b2811f3cf3361dc7c96a14bfbcc"}, - {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cb54f5725b4b37af12edf6c9e834df59258c82c15a244daa521a065fbb11717"}, - {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51d03e948e53b3639ce4d438f3d1d8202898ec6655cadcc09ec99229d4adc2a9"}, - {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786299d719eb5d868f161aeec56d589396b053925b7e0ce36e983d30d0a3e55c"}, - {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abda4009a30d51d3f06f36bc7411a62b3e647fa6cc935ef667e3e3d3a7dd09b1"}, - {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:67f7639424c313125213954e93a6229d3a1d386855d70c292a12628f600c7150"}, - {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e5a26d7aac4c0d8414a347da162696eea0629fdce939ada6aedf951abb1d745"}, - {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:120548d89f14b76a041088b582454d89389370632ee12bf39d919cc5c561d1ca"}, - {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f5293726943bdcea24715b121d8c4ae12581441d22623b0e6ab12d07ce85f9c4"}, - {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f8605e573ed6c44ec689d94544b2c4bb1390aaa723a8b5a2cc0a5a485987a68"}, - {file = "aiohttp-3.10.1-cp312-cp312-win32.whl", hash = "sha256:e7168782621be4448d90169a60c8b37e9b0926b3b79b6097bc180c0a8a119e73"}, - {file = "aiohttp-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fbf8c0ded367c5c8eaf585f85ca8dd85ff4d5b73fb8fe1e6ac9e1b5e62e11f7"}, - {file = "aiohttp-3.10.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:54b7f4a20d7cc6bfa4438abbde069d417bb7a119f870975f78a2b99890226d55"}, - {file = "aiohttp-3.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2fa643ca990323db68911b92f3f7a0ca9ae300ae340d0235de87c523601e58d9"}, - {file = "aiohttp-3.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8311d0d690487359fe2247ec5d2cac9946e70d50dced8c01ce9e72341c21151"}, - {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222821c60b8f6a64c5908cb43d69c0ee978a1188f6a8433d4757d39231b42cdb"}, - {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7b55d9ede66af7feb6de87ff277e0ccf6d51c7db74cc39337fe3a0e31b5872d"}, - {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a95151a5567b3b00368e99e9c5334a919514f60888a6b6d2054fea5e66e527e"}, - {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e9e9171d2fe6bfd9d3838a6fe63b1e91b55e0bf726c16edf265536e4eafed19"}, - {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a57e73f9523e980f6101dc9a83adcd7ac0006ea8bf7937ca3870391c7bb4f8ff"}, - {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0df51a3d70a2bfbb9c921619f68d6d02591f24f10e9c76de6f3388c89ed01de6"}, - {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b0de63ff0307eac3961b4af74382d30220d4813f36b7aaaf57f063a1243b4214"}, - {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8db9b749f589b5af8e4993623dbda6716b2b7a5fcb0fa2277bf3ce4b278c7059"}, - {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6b14c19172eb53b63931d3e62a9749d6519f7c121149493e6eefca055fcdb352"}, - {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cd57ad998e3038aa87c38fe85c99ed728001bf5dde8eca121cadee06ee3f637"}, - {file = "aiohttp-3.10.1-cp38-cp38-win32.whl", hash = "sha256:df31641e3f02b77eb3c5fb63c0508bee0fc067cf153da0e002ebbb0db0b6d91a"}, - {file = "aiohttp-3.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:93094eba50bc2ad4c40ff4997ead1fdcd41536116f2e7d6cfec9596a8ecb3615"}, - {file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:440954ddc6b77257e67170d57b1026aa9545275c33312357472504eef7b4cc0b"}, - {file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f9f8beed277488a52ee2b459b23c4135e54d6a819eaba2e120e57311015b58e9"}, - {file = "aiohttp-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8a8221a63602008550022aa3a4152ca357e1dde7ab3dd1da7e1925050b56863"}, - {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a702bd3663b5cbf3916e84bf332400d24cdb18399f0877ca6b313ce6c08bfb43"}, - {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1988b370536eb14f0ce7f3a4a5b422ab64c4e255b3f5d7752c5f583dc8c967fc"}, - {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ccf1f0a304352c891d124ac1a9dea59b14b2abed1704aaa7689fc90ef9c5be1"}, - {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3ea6ef2a83edad84bbdb5d96e22f587b67c68922cd7b6f9d8f24865e655bcf"}, - {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b47c125ab07f0831803b88aeb12b04c564d5f07a1c1a225d4eb4d2f26e8b5e"}, - {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21778552ef3d44aac3278cc6f6d13a6423504fa5f09f2df34bfe489ed9ded7f5"}, - {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bde0693073fd5e542e46ea100aa6c1a5d36282dbdbad85b1c3365d5421490a92"}, - {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bf66149bb348d8e713f3a8e0b4f5b952094c2948c408e1cfef03b49e86745d60"}, - {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:587237571a85716d6f71f60d103416c9df7d5acb55d96d3d3ced65f39bff9c0c"}, - {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bfe33cba6e127d0b5b417623c9aa621f0a69f304742acdca929a9fdab4593693"}, - {file = "aiohttp-3.10.1-cp39-cp39-win32.whl", hash = "sha256:9fbff00646cf8211b330690eb2fd64b23e1ce5b63a342436c1d1d6951d53d8dd"}, - {file = "aiohttp-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:5951c328f9ac42d7bce7a6ded535879bc9ae13032818d036749631fa27777905"}, - {file = "aiohttp-3.10.1.tar.gz", hash = "sha256:8b0d058e4e425d3b45e8ec70d49b402f4d6b21041e674798b1f91ba027c73f28"}, + {file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:95213b3d79c7e387144e9cb7b9d2809092d6ff2c044cb59033aedc612f38fb6d"}, + {file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1aa005f060aff7124cfadaa2493f00a4e28ed41b232add5869e129a2e395935a"}, + {file = "aiohttp-3.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eabe6bf4c199687592f5de4ccd383945f485779c7ffb62a9b9f1f8a3f9756df8"}, + {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e010736fc16d21125c7e2dc5c350cd43c528b85085c04bf73a77be328fe944"}, + {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99f81f9c1529fd8e03be4a7bd7df32d14b4f856e90ef6e9cbad3415dbfa9166c"}, + {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d611d1a01c25277bcdea06879afbc11472e33ce842322496b211319aa95441bb"}, + {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00191d38156e09e8c81ef3d75c0d70d4f209b8381e71622165f22ef7da6f101"}, + {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74c091a5ded6cb81785de2d7a8ab703731f26de910dbe0f3934eabef4ae417cc"}, + {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:18186a80ec5a701816adbf1d779926e1069392cf18504528d6e52e14b5920525"}, + {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5a7ceb2a0d2280f23a02c64cd0afdc922079bb950400c3dd13a1ab2988428aac"}, + {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8bd7be6ff6c162a60cb8fce65ee879a684fbb63d5466aba3fa5b9288eb04aefa"}, + {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fae962b62944eaebff4f4fddcf1a69de919e7b967136a318533d82d93c3c6bd1"}, + {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a0fde16d284efcacbe15fb0c1013f0967b6c3e379649239d783868230bf1db42"}, + {file = "aiohttp-3.10.2-cp310-cp310-win32.whl", hash = "sha256:f81cd85a0e76ec7b8e2b6636fe02952d35befda4196b8c88f3cec5b4fb512839"}, + {file = "aiohttp-3.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:54ba10eb5a3481c28282eb6afb5f709aedf53cf9c3a31875ffbdc9fc719ffd67"}, + {file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:87fab7f948e407444c2f57088286e00e2ed0003ceaf3d8f8cc0f60544ba61d91"}, + {file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6ad66ed660d46503243cbec7b2b3d8ddfa020f984209b3b8ef7d98ce69c3f2"}, + {file = "aiohttp-3.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4be88807283bd96ae7b8e401abde4ca0bab597ba73b5e9a2d98f36d451e9aac"}, + {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01c98041f90927c2cbd72c22a164bb816fa3010a047d264969cf82e1d4bcf8d1"}, + {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54e36c67e1a9273ecafab18d6693da0fb5ac48fd48417e4548ac24a918c20998"}, + {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7de3ddb6f424af54535424082a1b5d1ae8caf8256ebd445be68c31c662354720"}, + {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dd9c7db94b4692b827ce51dcee597d61a0e4f4661162424faf65106775b40e7"}, + {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e57e21e1167705f8482ca29cc5d02702208d8bf4aff58f766d94bcd6ead838cd"}, + {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a1a50e59b720060c29e2951fd9f13c01e1ea9492e5a527b92cfe04dd64453c16"}, + {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:686c87782481fda5ee6ba572d912a5c26d9f98cc5c243ebd03f95222af3f1b0f"}, + {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:dafb4abb257c0ed56dc36f4e928a7341b34b1379bd87e5a15ce5d883c2c90574"}, + {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:494a6f77560e02bd7d1ab579fdf8192390567fc96a603f21370f6e63690b7f3d"}, + {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6fe8503b1b917508cc68bf44dae28823ac05e9f091021e0c41f806ebbb23f92f"}, + {file = "aiohttp-3.10.2-cp311-cp311-win32.whl", hash = "sha256:4ddb43d06ce786221c0dfd3c91b4892c318eaa36b903f7c4278e7e2fa0dd5102"}, + {file = "aiohttp-3.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:ca2f5abcb0a9a47e56bac173c01e9f6c6e7f27534d91451c5f22e6a35a5a2093"}, + {file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:14eb6b17f6246959fb0b035d4f4ae52caa870c4edfb6170aad14c0de5bfbf478"}, + {file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:465e445ec348d4e4bd349edd8b22db75f025da9d7b6dc1369c48e7935b85581e"}, + {file = "aiohttp-3.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:341f8ece0276a828d95b70cd265d20e257f5132b46bf77d759d7f4e0443f2906"}, + {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01fbb87b5426381cd9418b3ddcf4fc107e296fa2d3446c18ce6c76642f340a3"}, + {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c474af073e1a6763e1c5522bbb2d85ff8318197e4c6c919b8d7886e16213345"}, + {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9076810a5621236e29b2204e67a68e1fe317c8727ee4c9abbfbb1083b442c38"}, + {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f515d6859e673940e08de3922b9c4a2249653b0ac181169313bd6e4b1978ac"}, + {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:655e583afc639bef06f3b2446972c1726007a21003cd0ef57116a123e44601bc"}, + {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8da9449a575133828cc99985536552ea2dcd690e848f9d41b48d8853a149a959"}, + {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19073d57d0feb1865d12361e2a1f5a49cb764bf81a4024a3b608ab521568093a"}, + {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c8e98e1845805f184d91fda6f9ab93d7c7b0dddf1c07e0255924bfdb151a8d05"}, + {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:377220a5efde6f9497c5b74649b8c261d3cce8a84cb661be2ed8099a2196400a"}, + {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92f7f4a4dc9cdb5980973a74d43cdbb16286dacf8d1896b6c3023b8ba8436f8e"}, + {file = "aiohttp-3.10.2-cp312-cp312-win32.whl", hash = "sha256:9bb2834a6f11d65374ce97d366d6311a9155ef92c4f0cee543b2155d06dc921f"}, + {file = "aiohttp-3.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:518dc3cb37365255708283d1c1c54485bbacccd84f0a0fb87ed8917ba45eda5b"}, + {file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7f98e70bbbf693086efe4b86d381efad8edac040b8ad02821453083d15ec315f"}, + {file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f6f0b252a009e98fe84028a4ec48396a948e7a65b8be06ccfc6ef68cf1f614d"}, + {file = "aiohttp-3.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9360e3ffc7b23565600e729e8c639c3c50d5520e05fdf94aa2bd859eef12c407"}, + {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3988044d1635c7821dd44f0edfbe47e9875427464e59d548aece447f8c22800a"}, + {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a9d59da1543a6f1478c3436fd49ec59be3868bca561a33778b4391005e499d"}, + {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f49bdb94809ac56e09a310a62f33e5f22973d6fd351aac72a39cd551e98194"}, + {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfd2dca3f11c365d6857a07e7d12985afc59798458a2fdb2ffa4a0332a3fd43"}, + {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c1508ec97b2cd3e120bfe309a4ff8e852e8a7460f1ef1de00c2c0ed01e33c"}, + {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:49904f38667c44c041a0b44c474b3ae36948d16a0398a8f8cd84e2bb3c42a069"}, + {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:352f3a4e5f11f3241a49b6a48bc5b935fabc35d1165fa0d87f3ca99c1fcca98b"}, + {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:fc61f39b534c5d5903490478a0dd349df397d2284a939aa3cbaa2fb7a19b8397"}, + {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:ad2274e707be37420d0b6c3d26a8115295fe9d8e6e530fa6a42487a8ca3ad052"}, + {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c836bf3c7512100219fe1123743fd8dd9a2b50dd7cfb0c3bb10d041309acab4b"}, + {file = "aiohttp-3.10.2-cp38-cp38-win32.whl", hash = "sha256:53e8898adda402be03ff164b0878abe2d884e3ea03a4701e6ad55399d84b92dc"}, + {file = "aiohttp-3.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:7cc8f65f5b22304693de05a245b6736b14cb5bc9c8a03da6e2ae9ef15f8b458f"}, + {file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9dfc906d656e14004c5bc672399c1cccc10db38df2b62a13fb2b6e165a81c316"}, + {file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:91b10208b222ddf655c3a3d5b727879d7163db12b634492df41a9182a76edaae"}, + {file = "aiohttp-3.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fd16b5e1a7bdd14668cd6bde60a2a29b49147a535c74f50d8177d11b38433a7"}, + {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2bfdda4971bd79201f59adbad24ec2728875237e1c83bba5221284dbbf57bda"}, + {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69d73f869cf29e8a373127fc378014e2b17bcfbe8d89134bc6fb06a2f67f3cb3"}, + {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df59f8486507c421c0620a2c3dce81fbf1d54018dc20ff4fecdb2c106d6e6abc"}, + {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df930015db36b460aa9badbf35eccbc383f00d52d4b6f3de2ccb57d064a6ade"}, + {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:562b1153ab7f766ee6b8b357ec777a302770ad017cf18505d34f1c088fccc448"}, + {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d984db6d855de58e0fde1ef908d48fe9a634cadb3cf715962722b4da1c40619d"}, + {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:14dc3fcb0d877911d775d511eb617a486a8c48afca0a887276e63db04d3ee920"}, + {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b52a27a5c97275e254704e1049f4b96a81e67d6205f52fa37a4777d55b0e98ef"}, + {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:cd33d9de8cfd006a0d0fe85f49b4183c57e91d18ffb7e9004ce855e81928f704"}, + {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1238fc979160bc03a92fff9ad021375ff1c8799c6aacb0d8ea1b357ea40932bb"}, + {file = "aiohttp-3.10.2-cp39-cp39-win32.whl", hash = "sha256:e2f43d238eae4f0b04f58d4c0df4615697d4ca3e9f9b1963d49555a94f0f5a04"}, + {file = "aiohttp-3.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:947847f07a8f81d7b39b2d0202fd73e61962ebe17ac2d8566f260679e467da7b"}, + {file = "aiohttp-3.10.2.tar.gz", hash = "sha256:4d1f694b5d6e459352e5e925a42e05bac66655bfde44d81c59992463d2897014"}, ] [package.dependencies] @@ -165,13 +165,13 @@ tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "babel" -version = "2.15.0" +version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] @@ -1838,29 +1838,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.5.6" +version = "0.5.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.6-py3-none-linux_armv6l.whl", hash = "sha256:a0ef5930799a05522985b9cec8290b185952f3fcd86c1772c3bdbd732667fdcd"}, - {file = "ruff-0.5.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b652dc14f6ef5d1552821e006f747802cc32d98d5509349e168f6bf0ee9f8f42"}, - {file = "ruff-0.5.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:80521b88d26a45e871f31e4b88938fd87db7011bb961d8afd2664982dfc3641a"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9bc8f328a9f1309ae80e4d392836e7dbc77303b38ed4a7112699e63d3b066ab"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d394940f61f7720ad371ddedf14722ee1d6250fd8d020f5ea5a86e7be217daf"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111a99cdb02f69ddb2571e2756e017a1496c2c3a2aeefe7b988ddab38b416d36"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e395daba77a79f6dc0d07311f94cc0560375ca20c06f354c7c99af3bf4560c5d"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c476acb43c3c51e3c614a2e878ee1589655fa02dab19fe2db0423a06d6a5b1b6"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2ff8003f5252fd68425fd53d27c1f08b201d7ed714bb31a55c9ac1d4c13e2eb"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c94e084ba3eaa80c2172918c2ca2eb2230c3f15925f4ed8b6297260c6ef179ad"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1f77c1c3aa0669fb230b06fb24ffa3e879391a3ba3f15e3d633a752da5a3e670"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f908148c93c02873210a52cad75a6eda856b2cbb72250370ce3afef6fb99b1ed"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:563a7ae61ad284187d3071d9041c08019975693ff655438d8d4be26e492760bd"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:94fe60869bfbf0521e04fd62b74cbca21cbc5beb67cbb75ab33fe8c174f54414"}, - {file = "ruff-0.5.6-py3-none-win32.whl", hash = "sha256:e6a584c1de6f8591c2570e171cc7ce482bb983d49c70ddf014393cd39e9dfaed"}, - {file = "ruff-0.5.6-py3-none-win_amd64.whl", hash = "sha256:d7fe7dccb1a89dc66785d7aa0ac283b2269712d8ed19c63af908fdccca5ccc1a"}, - {file = "ruff-0.5.6-py3-none-win_arm64.whl", hash = "sha256:57c6c0dd997b31b536bff49b9eee5ed3194d60605a4427f735eeb1f9c1b8d264"}, - {file = "ruff-0.5.6.tar.gz", hash = "sha256:07c9e3c2a8e1fe377dd460371c3462671a728c981c3205a5217291422209f642"}, + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, ] [[package]] @@ -2398,4 +2398,4 @@ ib = ["async-timeout", "defusedxml", "nautilus_ibapi"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "87819ed24c0a0158744979441eacb1e39e8847d4072c4f7aa71e3a2685e6db46" +content-hash = "5e63477164b813049a19c7e3a729259e0bea658bb35122528ab55f3c02fff9ac" diff --git a/pyproject.toml b/pyproject.toml index 5126fec760d1..b7dd9b23d928 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,7 +84,7 @@ docformatter = "^1.7.5" mypy = "^1.11.1" pandas-stubs = "^2.2.2" pre-commit = "^3.8.0" -ruff = "^0.5.6" +ruff = "^0.5.7" types-pytz = "^2024.1" types-requests = "^2.32" types-toml = "^0.10.2" From 0332626a051a9a99667cfd6f79574b637d592356 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Fri, 9 Aug 2024 17:36:39 +1000 Subject: [PATCH 54/60] Conditionally install cargo-nextest --- .github/workflows/build.yml | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c38224d6ec84..2dec6fab6af8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -152,7 +152,13 @@ jobs: - name: Run nautilus_core cargo tests (Linux) run: | - cargo install cargo-nextest --force + if ! command -v cargo-nextest &> /dev/null + then + echo "cargo-nextest not found, installing..." + cargo install cargo-nextest + else + echo "cargo-nextest is already installed" + fi make cargo-test - name: Run tests (Linux) @@ -360,7 +366,13 @@ jobs: - name: Run nautilus_core cargo tests (macOS) run: | - cargo install cargo-nextest --force + if ! command -v cargo-nextest &> /dev/null + then + echo "cargo-nextest not found, installing..." + cargo install cargo-nextest + else + echo "cargo-nextest is already installed" + fi make cargo-test - name: Run tests (macOS) From d2a2c669b1ea3df65b048996edba1a8015d1fca4 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Fri, 9 Aug 2024 17:38:52 +1000 Subject: [PATCH 55/60] Upgrade Rust --- README.md | 6 +++--- nautilus_core/Cargo.toml | 2 +- nautilus_core/rust-toolchain.toml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index b4bf4b6a20cb..8cf696b8daa3 100644 --- a/README.md +++ b/README.md @@ -14,9 +14,9 @@ | Platform | Rust | Python | | :----------------- | :------ | :----- | -| `Linux (x86_64)` | 1.80.0+ | 3.10+ | -| `macOS (arm64)` | 1.80.0+ | 3.10+ | -| `Windows (x86_64)` | 1.80.0+ | 3.10+ | +| `Linux (x86_64)` | 1.80.1+ | 3.10+ | +| `macOS (arm64)` | 1.80.1+ | 3.10+ | +| `Windows (x86_64)` | 1.80.1+ | 3.10+ | [![](https://dcbadge.limes.pink/api/server/AUWVs3XaCS)](https://discord.gg/AUWVs3XaCS) diff --git a/nautilus_core/Cargo.toml b/nautilus_core/Cargo.toml index c0d6a223866b..aad7837fbb9a 100644 --- a/nautilus_core/Cargo.toml +++ b/nautilus_core/Cargo.toml @@ -18,7 +18,7 @@ members = [ ] [workspace.package] -rust-version = "1.80.0" +rust-version = "1.80.1" version = "0.28.0" edition = "2021" authors = ["Nautech Systems "] diff --git a/nautilus_core/rust-toolchain.toml b/nautilus_core/rust-toolchain.toml index 0dd8c7b98774..3628336f941d 100644 --- a/nautilus_core/rust-toolchain.toml +++ b/nautilus_core/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -version = "1.80.0" +version = "1.80.1" channel = "stable" From ba770b2a86b98ad3b203d1f0aa75182510ea24e1 Mon Sep 17 00:00:00 2001 From: Filip Macek Date: Fri, 9 Aug 2024 09:45:50 +0200 Subject: [PATCH 56/60] Add contingent order check for order processing in OrderMatchingEngine (#1839) --- nautilus_core/backtest/src/matching_engine.rs | 434 ++++++++++++------ nautilus_core/common/src/cache/mod.rs | 2 +- nautilus_core/model/src/orders/any.rs | 34 +- 3 files changed, 337 insertions(+), 133 deletions(-) diff --git a/nautilus_core/backtest/src/matching_engine.rs b/nautilus_core/backtest/src/matching_engine.rs index a84e5d14e3a6..c9c0760227c1 100644 --- a/nautilus_core/backtest/src/matching_engine.rs +++ b/nautilus_core/backtest/src/matching_engine.rs @@ -19,7 +19,7 @@ #![allow(dead_code)] #![allow(unused_variables)] -use std::{any::Any, collections::HashMap, rc::Rc}; +use std::{any::Any, cell::RefCell, collections::HashMap, rc::Rc}; use nautilus_common::{cache::Cache, msgbus::MessageBus}; use nautilus_core::{nanos::UnixNanos, time::AtomicTime, uuid::UUID4}; @@ -29,7 +29,10 @@ use nautilus_model::{ bar::{Bar, BarType}, delta::OrderBookDelta, }, - enums::{AccountType, BookType, LiquiditySide, MarketStatus, OmsType, OrderSide, OrderType}, + enums::{ + AccountType, BookType, ContingencyType, LiquiditySide, MarketStatus, OmsType, OrderSide, + OrderStatus, OrderType, + }, events::order::{ OrderAccepted, OrderCancelRejected, OrderCanceled, OrderEventAny, OrderExpired, OrderFilled, OrderModifyRejected, OrderRejected, OrderTriggered, OrderUpdated, @@ -96,7 +99,7 @@ pub struct OrderMatchingEngine { pub config: OrderMatchingEngineConfig, clock: &'static AtomicTime, msgbus: Rc, - cache: Rc, + cache: Rc>, book: OrderBook, core: OrderMatchingCore, target_bid: Option, @@ -124,7 +127,7 @@ impl OrderMatchingEngine { account_type: AccountType, clock: &'static AtomicTime, msgbus: Rc, - cache: Rc, + cache: Rc>, config: OrderMatchingEngineConfig, ) -> Self { let book = OrderBook::new(book_type, instrument.id()); @@ -221,155 +224,192 @@ impl OrderMatchingEngine { // -- TRADING COMMANDS ---------------------------------------------------- #[allow(clippy::needless_return)] pub fn process_order(&mut self, order: &OrderAny, account_id: AccountId) { - if self.core.order_exists(order.client_order_id()) { - self.generate_order_rejected(order, "Order already exists".into()); - return; - } + // enter the scope where you will borrow a cache + { + let cache_borrow = self.cache.as_ref().borrow(); + + if self.core.order_exists(order.client_order_id()) { + self.generate_order_rejected(order, "Order already exists".into()); + return; + } + + // Index identifiers + self.account_ids.insert(order.trader_id(), account_id); + + // Check for instrument expiration or activation + if EXPIRING_INSTRUMENT_TYPES.contains(&self.instrument.instrument_class()) { + if let Some(activation_ns) = self.instrument.activation_ns() { + if self.clock.get_time_ns() < activation_ns { + self.generate_order_rejected( + order, + format!( + "Contract {} is not yet active, activation {}", + self.instrument.id(), + self.instrument.activation_ns().unwrap() + ) + .into(), + ); + return; + } + } + if let Some(expiration_ns) = self.instrument.expiration_ns() { + if self.clock.get_time_ns() >= expiration_ns { + self.generate_order_rejected( + order, + format!( + "Contract {} has expired, expiration {}", + self.instrument.id(), + self.instrument.expiration_ns().unwrap() + ) + .into(), + ); + return; + } + } + } + + // Contingent orders checks + if self.config.support_contingent_orders { + if let Some(parent_order_id) = order.parent_order_id() { + println!("Search for parent order {}", parent_order_id); + let parent_order = cache_borrow.order(&parent_order_id); + if parent_order.is_none() + || parent_order.unwrap().contingency_type().unwrap() != ContingencyType::Oto + { + panic!("OTO parent not found"); + } + if let Some(parent_order) = parent_order { + let parent_order_status = parent_order.status(); + let order_is_open = order.is_open(); + if parent_order.status() == OrderStatus::Rejected && order.is_open() { + self.generate_order_rejected( + order, + format!("Rejected OTO order from {}", parent_order_id).into(), + ); + return; + } else if parent_order.status() == OrderStatus::Accepted + && parent_order.status() == OrderStatus::Triggered + { + log::info!( + "Pending OTO order {} triggers from {}", + order.client_order_id(), + parent_order_id + ); + return; + } + } + } + } - // Index identifiers - self.account_ids.insert(order.trader_id(), account_id); + // Check fo valid order quantity precision + if order.quantity().precision != self.instrument.size_precision() { + self.generate_order_rejected( + order, + format!( + "Invalid order quantity precision for order {}, was {} when {} size precision is {}", + order.client_order_id(), + order.quantity().precision, + self.instrument.id(), + self.instrument.size_precision() + ) + .into(), + ); + return; + } - // Check for instrument expiration or activation - if EXPIRING_INSTRUMENT_TYPES.contains(&self.instrument.instrument_class()) { - if let Some(activation_ns) = self.instrument.activation_ns() { - if self.clock.get_time_ns() < activation_ns { + // Check for valid order price precision + if let Some(price) = order.price() { + if price.precision != self.instrument.price_precision() { self.generate_order_rejected( order, format!( - "Contract {} is not yet active, activation {}", + "Invalid order price precision for order {}, was {} when {} price precision is {}", + order.client_order_id(), + price.precision, self.instrument.id(), - self.instrument.activation_ns().unwrap() + self.instrument.price_precision() ) - .into(), + .into(), ); - return; } + return; } - if let Some(expiration_ns) = self.instrument.expiration_ns() { - if self.clock.get_time_ns() >= expiration_ns { + + // Check for valid order trigger price precision + if let Some(trigger_price) = order.trigger_price() { + if trigger_price.precision != self.instrument.price_precision() { self.generate_order_rejected( order, format!( - "Contract {} has expired, expiration {}", + "Invalid order trigger price precision for order {}, was {} when {} price precision is {}", + order.client_order_id(), + trigger_price.precision, self.instrument.id(), - self.instrument.expiration_ns().unwrap() + self.instrument.price_precision() ) - .into(), + .into(), ); return; } } - } - - // Check fo valid order quantity precision - if order.quantity().precision != self.instrument.size_precision() { - self.generate_order_rejected( - order, - format!( - "Invalid order quantity precision for order {}, was {} when {} size precision is {}", - order.client_order_id(), - order.quantity().precision, - self.instrument.id(), - self.instrument.size_precision() - ) - .into(), - ); - return; - } - // Check for valid order price precision - if let Some(price) = order.price() { - if price.precision != self.instrument.price_precision() { + // Get position if exists + let position: Option<&Position> = cache_borrow + .position_for_order(&order.client_order_id()) + .or_else(|| { + if self.oms_type == OmsType::Netting { + let position_id = PositionId::new( + format!("{}-{}", order.instrument_id(), order.strategy_id()).as_str(), + ) + .unwrap(); + cache_borrow.position(&position_id) + } else { + None + } + }); + + // Check not shorting an equity without a MARGIN account + if order.order_side() == OrderSide::Sell + && self.account_type != AccountType::Margin + && matches!(self.instrument, InstrumentAny::Equity(_)) + && (position.is_none() + || !order.would_reduce_only(position.unwrap().side, position.unwrap().quantity)) + { + let position_string = position.map_or("None".to_string(), |pos| pos.id.to_string()); self.generate_order_rejected( order, format!( - "Invalid order price precision for order {}, was {} when {} price precision is {}", - order.client_order_id(), - price.precision, - self.instrument.id(), - self.instrument.price_precision() + "Short selling not permitted on a CASH account with position {position_string} and order {order}", ) .into(), ); + return; } - return; - } - // Check for valid order trigger price precision - if let Some(trigger_price) = order.trigger_price() { - if trigger_price.precision != self.instrument.price_precision() { + // Check reduce-only instruction + if self.config.use_reduce_only + && order.is_reduce_only() + && !order.is_closed() + && position.map_or(true, |pos| { + pos.is_closed() + || (order.is_buy() && pos.is_long()) + || (order.is_sell() && pos.is_short()) + }) + { self.generate_order_rejected( order, format!( - "Invalid order trigger price precision for order {}, was {} when {} price precision is {}", + "Reduce-only order {} ({}-{}) would have increased position", order.client_order_id(), - trigger_price.precision, - self.instrument.id(), - self.instrument.price_precision() + order.order_type().to_string().to_uppercase(), + order.order_side().to_string().to_uppercase() ) - .into(), + .into(), ); return; } } - // Get position if exists - let position: Option<&Position> = self - .cache - .position_for_order(&order.client_order_id()) - .or_else(|| { - if self.oms_type == OmsType::Netting { - let position_id = PositionId::new( - format!("{}-{}", order.instrument_id(), order.strategy_id()).as_str(), - ) - .unwrap(); - self.cache.position(&position_id) - } else { - None - } - }); - - // Check not shorting an equity without a MARGIN account - if order.order_side() == OrderSide::Sell - && self.account_type != AccountType::Margin - && matches!(self.instrument, InstrumentAny::Equity(_)) - && (position.is_none() - || !order.would_reduce_only(position.unwrap().side, position.unwrap().quantity)) - { - let position_string = position.map_or("None".to_string(), |pos| pos.id.to_string()); - self.generate_order_rejected( - order, - format!( - "Short selling not permitted on a CASH account with position {position_string} and order {order}", - ) - .into(), - ); - return; - } - - // Check reduce-only instruction - if self.config.use_reduce_only - && order.is_reduce_only() - && !order.is_closed() - && position.map_or(true, |pos| { - pos.is_closed() - || (order.is_buy() && pos.is_long()) - || (order.is_sell() && pos.is_short()) - }) - { - self.generate_order_rejected( - order, - format!( - "Reduce-only order {} ({}-{}) would have increased position", - order.client_order_id(), - order.order_type().to_string().to_uppercase(), - order.order_side().to_string().to_uppercase() - ) - .into(), - ); - return; - } - match order.order_type() { OrderType::Market => self.process_market_order(order), OrderType::Limit => self.process_limit_order(order), @@ -774,7 +814,7 @@ impl OrderMatchingEngine { //////////////////////////////////////////////////////////////////////////////// #[cfg(test)] mod tests { - use std::{rc::Rc, sync::LazyLock}; + use std::{cell::RefCell, rc::Rc, sync::LazyLock}; use chrono::{TimeZone, Utc}; use nautilus_common::{ @@ -785,17 +825,21 @@ mod tests { MessageBus, }, }; - use nautilus_core::{nanos::UnixNanos, time::AtomicTime}; + use nautilus_core::{nanos::UnixNanos, time::AtomicTime, uuid::UUID4}; use nautilus_model::{ - enums::{AccountType, BookType, OmsType, OrderSide}, - events::order::{OrderEventAny, OrderEventType}, - identifiers::AccountId, + enums::{ + AccountType, BookType, ContingencyType, OmsType, OrderSide, TimeInForce, TriggerType, + }, + events::order::{OrderEventAny, OrderEventType, OrderRejected}, + identifiers::{AccountId, ClientOrderId, StrategyId, TraderId}, instruments::{ any::InstrumentAny, equity::Equity, stubs::{futures_contract_es, *}, }, - orders::stubs::TestOrderStubs, + orders::{ + any::OrderAny, market::MarketOrder, stop_market::StopMarketOrder, stubs::TestOrderStubs, + }, types::{price::Price, quantity::Quantity}, }; use rstest::{fixture, rstest}; @@ -850,10 +894,11 @@ mod tests { fn get_order_matching_engine( instrument: InstrumentAny, msgbus: Rc, + cache: Option>>, account_type: Option, config: Option, ) -> OrderMatchingEngine { - let cache = Rc::new(Cache::default()); + let cache = cache.unwrap_or(Rc::new(RefCell::new(Cache::default()))); let config = config.unwrap_or_default(); OrderMatchingEngine::new( instrument, @@ -897,7 +942,8 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None, None); + let mut engine = + get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None, None, None); let order = TestOrderStubs::market_order( instrument.id(), OrderSide::Buy, @@ -947,7 +993,8 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None, None); + let mut engine = + get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None, None, None); let order = TestOrderStubs::market_order( instrument.id(), OrderSide::Buy, @@ -984,7 +1031,7 @@ mod tests { // Create engine and process order let mut engine = - get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, None); + get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, None, None); let order = TestOrderStubs::market_order( instrument_es.id(), OrderSide::Buy, @@ -1021,7 +1068,7 @@ mod tests { // Create engine and process order let mut engine = - get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, None); + get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, None, None); let limit_order = TestOrderStubs::limit_order( instrument_es.id(), OrderSide::Sell, @@ -1060,7 +1107,7 @@ mod tests { // Create engine and process order let mut engine = - get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, None); + get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, None, None); let stop_order = TestOrderStubs::stop_market_order( instrument_es.id(), OrderSide::Sell, @@ -1100,7 +1147,8 @@ mod tests { ); // Create engine and process order - let mut engine = get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None, None); + let mut engine = + get_order_matching_engine(instrument.clone(), Rc::new(msgbus), None, None, None); let order = TestOrderStubs::market_order( instrument.id(), OrderSide::Sell, @@ -1150,8 +1198,13 @@ mod tests { use_position_ids: false, use_random_ids: false, }; - let mut engine = - get_order_matching_engine(instrument_es.clone(), Rc::new(msgbus), None, Some(config)); + let mut engine = get_order_matching_engine( + instrument_es.clone(), + Rc::new(msgbus), + None, + None, + Some(config), + ); let market_order = TestOrderStubs::market_order_reduce( instrument_es.id(), OrderSide::Buy, @@ -1172,4 +1225,125 @@ mod tests { Ustr::from("Reduce-only order O-19700101-000000-001-001-1 (MARKET-BUY) would have increased position") ); } + + #[rstest] + fn test_order_matching_engine_contingent_orders_errors( + mut msgbus: MessageBus, + order_event_handler: ShareableMessageHandler, + account_id: AccountId, + time: AtomicTime, + instrument_es: InstrumentAny, + ) { + // Register saving message handler to exec engine endpoint + msgbus.register( + msgbus.switchboard.exec_engine_process.as_str(), + order_event_handler.clone(), + ); + + // Create engine (with reduce_only option) and process order + let config = OrderMatchingEngineConfig { + use_reduce_only: false, + bar_execution: false, + reject_stop_orders: false, + support_gtd_orders: false, + support_contingent_orders: true, + use_position_ids: false, + use_random_ids: false, + }; + let cache = Rc::new(RefCell::new(Cache::default())); + let mut engine = get_order_matching_engine( + instrument_es.clone(), + Rc::new(msgbus), + Some(cache.clone()), + None, + Some(config), + ); + + let entry_client_order_id = ClientOrderId::from("O-19700101-000000-001-001-1"); + let stop_loss_client_order_id = ClientOrderId::from("O-19700101-000000-001-001-2"); + + // Create entry market order + let mut entry_order = OrderAny::Market( + MarketOrder::new( + TraderId::default(), + StrategyId::default(), + instrument_es.id(), + entry_client_order_id, + OrderSide::Buy, + Quantity::from("1"), + TimeInForce::Gtc, + UUID4::new(), + UnixNanos::default(), + false, + false, + Some(ContingencyType::Oto), // <- set contingency type to OTO + None, + None, + None, + None, + None, + None, + None, + ) + .unwrap(), + ); + // Set entry order status to Rejected with proper event + let rejected_event = OrderRejected::default(); + entry_order + .apply(OrderEventAny::Rejected(rejected_event)) + .unwrap(); + + // Create stop loss order + let stop_order = OrderAny::StopMarket( + StopMarketOrder::new( + entry_order.trader_id(), + entry_order.strategy_id(), + entry_order.instrument_id(), + stop_loss_client_order_id, + OrderSide::Sell, + entry_order.quantity(), + Price::from("0.95"), + TriggerType::BidAsk, + TimeInForce::Gtc, + None, + true, + false, + None, + None, + None, + Some(ContingencyType::Oto), + None, + None, + Some(entry_client_order_id), // <- parent order id set from entry order + None, + None, + None, + None, + UUID4::new(), + UnixNanos::default(), + ) + .unwrap(), + ); + // Make it Accepted + let accepted_stop_order = TestOrderStubs::make_accepted_order(&stop_order); + + // 1. save entry order in the cache as it will be loaded by the matching engine + // 2. send the stop loss order which has parent of entry order + cache + .as_ref() + .borrow_mut() + .add_order(entry_order.clone(), None, None, false) + .unwrap(); + engine.process_order(&accepted_stop_order, account_id); + + // Get messages and test + let saved_messages = get_order_event_handler_messages(order_event_handler); + assert_eq!(saved_messages.len(), 1); + let first_message = saved_messages.first().unwrap(); + assert_eq!(first_message.event_type(), OrderEventType::Rejected); + assert_eq!( + first_message.message().unwrap(), + Ustr::from(format!("Rejected OTO order from {}", entry_client_order_id).as_str()) + ); + } } diff --git a/nautilus_core/common/src/cache/mod.rs b/nautilus_core/common/src/cache/mod.rs index 20fe478fbb4b..36302b6d0723 100644 --- a/nautilus_core/common/src/cache/mod.rs +++ b/nautilus_core/common/src/cache/mod.rs @@ -1397,7 +1397,7 @@ impl Cache { // } } - self.orders.insert(client_order_id, order); + self.orders.insert(client_order_id, order.clone()); Ok(()) } diff --git a/nautilus_core/model/src/orders/any.rs b/nautilus_core/model/src/orders/any.rs index 01ae63378e98..f3fe97b8a74c 100644 --- a/nautilus_core/model/src/orders/any.rs +++ b/nautilus_core/model/src/orders/any.rs @@ -32,8 +32,8 @@ use super::{ }; use crate::{ enums::{ - LiquiditySide, OrderSide, OrderSideSpecified, OrderStatus, OrderType, PositionSide, - TriggerType, + ContingencyType, LiquiditySide, OrderSide, OrderSideSpecified, OrderStatus, OrderType, + PositionSide, TriggerType, }, events::order::OrderEventAny, identifiers::{ @@ -545,6 +545,36 @@ impl OrderAny { Self::TrailingStopMarket(order) => order.is_sell(), } } + + #[must_use] + pub fn parent_order_id(&self) -> Option { + match self { + Self::Limit(order) => order.parent_order_id, + Self::LimitIfTouched(order) => order.parent_order_id, + Self::Market(order) => order.parent_order_id, + Self::MarketIfTouched(order) => order.parent_order_id, + Self::MarketToLimit(order) => order.parent_order_id, + Self::StopLimit(order) => order.parent_order_id, + Self::StopMarket(order) => order.parent_order_id, + Self::TrailingStopLimit(order) => order.parent_order_id, + Self::TrailingStopMarket(order) => order.parent_order_id, + } + } + + #[must_use] + pub fn contingency_type(&self) -> Option { + match self { + Self::Limit(order) => order.contingency_type, + Self::LimitIfTouched(order) => order.contingency_type, + Self::Market(order) => order.contingency_type, + Self::MarketIfTouched(order) => order.contingency_type, + Self::MarketToLimit(order) => order.contingency_type, + Self::StopLimit(order) => order.contingency_type, + Self::StopMarket(order) => order.contingency_type, + Self::TrailingStopLimit(order) => order.contingency_type, + Self::TrailingStopMarket(order) => order.contingency_type, + } + } } impl PartialEq for OrderAny { From 442d5893db9d00104bc6acd33cbc76cd0a28c804 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Fri, 9 Aug 2024 18:02:19 +1000 Subject: [PATCH 57/60] Update install-cli make target --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 093c6663f0fc..1143d8894474 100644 --- a/Makefile +++ b/Makefile @@ -167,4 +167,4 @@ install-talib: .PHONY: install-cli install-cli: - (cd nautilus_core && cargo install --path cli --bin nautilus) + (cd nautilus_core && cargo install --path cli --bin nautilus --force) From c095a1dbe9e2db880ef86c361c79a8e7c1d56b2e Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Fri, 9 Aug 2024 18:07:16 +1000 Subject: [PATCH 58/60] Fix Windows build --- build.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/build.py b/build.py index adf7b90f3d81..7782ca2192b3 100644 --- a/build.py +++ b/build.py @@ -159,7 +159,29 @@ def _build_extensions() -> list[Extension]: extra_compile_args.append("-pipe") if platform.system() == "Windows": - extra_link_args.append("/WHOLEARCHIVE") + extra_link_args += [ + "AdvAPI32.Lib", + "bcrypt.lib", + "Crypt32.lib", + "Iphlpapi.lib", + "Kernel32.lib", + "ncrypt.lib", + "Netapi32.lib", + "ntdll.lib", + "Ole32.lib", + "OleAut32.lib", + "Pdh.lib", + "PowrProf.lib", + "Propsys.lib", + "Psapi.lib", + "runtimeobject.lib", + "schannel.lib", + "secur32.lib", + "Shell32.lib", + "User32.Lib", + "UserEnv.Lib", + "WS2_32.Lib", + ] print("Creating C extension modules...") print(f"define_macros={define_macros}") From 9136c58231715cf35cf25a525fda80ab981b76ca Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Fri, 9 Aug 2024 18:48:23 +1000 Subject: [PATCH 59/60] Fix clippy lints --- nautilus_core/backtest/src/matching_engine.rs | 6 +++--- nautilus_core/common/src/cache/mod.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nautilus_core/backtest/src/matching_engine.rs b/nautilus_core/backtest/src/matching_engine.rs index c9c0760227c1..37e5a1c496b2 100644 --- a/nautilus_core/backtest/src/matching_engine.rs +++ b/nautilus_core/backtest/src/matching_engine.rs @@ -271,7 +271,7 @@ impl OrderMatchingEngine { // Contingent orders checks if self.config.support_contingent_orders { if let Some(parent_order_id) = order.parent_order_id() { - println!("Search for parent order {}", parent_order_id); + println!("Search for parent order {parent_order_id}"); let parent_order = cache_borrow.order(&parent_order_id); if parent_order.is_none() || parent_order.unwrap().contingency_type().unwrap() != ContingencyType::Oto @@ -284,7 +284,7 @@ impl OrderMatchingEngine { if parent_order.status() == OrderStatus::Rejected && order.is_open() { self.generate_order_rejected( order, - format!("Rejected OTO order from {}", parent_order_id).into(), + format!("Rejected OTO order from {parent_order_id}").into(), ); return; } else if parent_order.status() == OrderStatus::Accepted @@ -1343,7 +1343,7 @@ mod tests { assert_eq!(first_message.event_type(), OrderEventType::Rejected); assert_eq!( first_message.message().unwrap(), - Ustr::from(format!("Rejected OTO order from {}", entry_client_order_id).as_str()) + Ustr::from(format!("Rejected OTO order from {entry_client_order_id}").as_str()) ); } } diff --git a/nautilus_core/common/src/cache/mod.rs b/nautilus_core/common/src/cache/mod.rs index 36302b6d0723..20fe478fbb4b 100644 --- a/nautilus_core/common/src/cache/mod.rs +++ b/nautilus_core/common/src/cache/mod.rs @@ -1397,7 +1397,7 @@ impl Cache { // } } - self.orders.insert(client_order_id, order.clone()); + self.orders.insert(client_order_id, order); Ok(()) } From 5786f176e341507e5679670537cd1ad43d91c6f7 Mon Sep 17 00:00:00 2001 From: Chris Sellers Date: Fri, 9 Aug 2024 18:57:15 +1000 Subject: [PATCH 60/60] Update release notes --- RELEASES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RELEASES.md b/RELEASES.md index f8d58d99b170..263ff8c89c24 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,6 +1,6 @@ # NautilusTrader 1.198.0 Beta -Released on TBD (UTC). +Released on 9th August 2024 (UTC). ### Enhancements - Added `@customdata` decorator to reduce need for boiler plate implementing custom data types (#1828), thanks @faysou