diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 1198f1e..62317e6 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -20,7 +20,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8, 3.9, "3.10"] + python-version: [3.8, 3.9, "3.10"] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} diff --git a/CLI.md b/CLI.md index d727092..74840cf 100644 --- a/CLI.md +++ b/CLI.md @@ -36,18 +36,19 @@ Example with default localhost:9999 settings: ``` podping --hive-account --hive-posting-key server -2021-08-30T00:38:58-0500 | INFO | podping 1.0.0a0 starting up in server mode -2021-08-30T00:39:00-0500 | INFO | Podping startup sequence initiated, please stand by, full bozo checks in operation... -2021-08-30T00:39:01-0500 | INFO | Testing Account Resource Credits - before 24.88% -2021-08-30T00:39:02-0500 | INFO | Transaction sent: 39c2a396784ba6ba498cee3055900442953bb13f - JSON size: 204 -2021-08-30T00:39:02-0500 | INFO | Testing Account Resource Credits.... 5s -2021-08-30T00:39:17-0500 | INFO | Testing Account Resource Credits - after 24.52% -2021-08-30T00:39:17-0500 | INFO | Capacity for further podpings : 68.5 -2021-08-30T00:39:19-0500 | INFO | Transaction sent: 39405eaf4a522deb2d965fc9bd8c6b92dca44786 - JSON size: 231 -2021-08-30T00:39:19-0500 | INFO | Startup of Podping status: SUCCESS! Hit the BOOST Button. -2021-08-30T00:39:19-0500 | INFO | Hive account: @podping.test -2021-08-30T00:39:19-0500 | INFO | Running ZeroMQ server on 127.0.0.1:9999 -2021-08-30T00:39:19-0500 | INFO | Status - Hive Node: - Uptime: 0:00:20.175997 - IRIs Received: 0 - IRIs Deduped: 0 - IRIs Sent: 0 +2022-01-17T13:16:43+0200 | INFO | podping 1.1.0a1 starting up in server mode +2022-01-17T13:16:44+0200 | INFO | Podping startup sequence initiated, please stand by, full bozo checks in operation... +2022-01-17T13:16:45+0200 | INFO | Testing Account Resource Credits - before 99.73% +2022-01-17T13:16:48+0200 | INFO | Calculating Account Resource Credits for 100 pings: 8.55% | Capacity: 1,169 +2022-01-17T13:16:49+0200 | INFO | Configuration override from Podping Hive: hive_operation_period=30 max_url_list_bytes=8000 diagnostic_report_period=180 control_account='podping' control_account_check_period=180 test_nodes=('https://testnet.openhive.network',) +2022-01-17T13:16:51+0200 | INFO | Lighthive Node: https://api.hive.blog +2022-01-17T13:16:51+0200 | INFO | JSON size: 179 +2022-01-17T13:16:51+0200 | INFO | Startup of Podping status: SUCCESS! Hit the BOOST Button. +2022-01-17T13:16:53+0200 | INFO | Lighthive Fastest: https://api.deathwing.me +2022-01-17T13:16:53+0200 | INFO | Hive account: @podping.bol +2022-01-17T13:16:53+0200 | INFO | Running ZeroMQ server on 127.0.0.1:9999 +2022-01-17T13:16:54+0200 | INFO | Lighthive Fastest: https://api.deathwing.me +2022-01-17T13:16:54+0200 | INFO | Status - Uptime: 0:00:10 | IRIs Received: 0 | IRIs Deduped: 0 | IRIs Sent: 0 | last_node: https://api.deathwing.me ``` **Usage**: @@ -79,6 +80,12 @@ podping --hive-account --hive-posting-key 2021-08-30T00:14:37-0500 | INFO | Transaction sent: c9cbaace76ec365052c11ec4a3726e4ed3a7c54d - JSON size: 170 ``` +Adding a Medium and Reason: +``` +podping --hive-account --hive-posting-key --no-dry-run --no-sanity-check write https://3speak.tv/rss/podping.xml --medium video --reason update +``` + + Or add `--dry-run` to test functionality without broadcasting: ``` podping --hive-account --hive-posting-key --dry-run --no-sanity-check write https://www.example.com/feed.xml @@ -101,4 +108,6 @@ $ podping write [OPTIONS] IRI... **Options**: +* `--medium TEXT`: The medium of the feed being updated. Must be one of the following: audiobook music podcast video newsletter blog film [env var: PODPING_MEDIUM; default: podcast] +* `--reason TEXT`: The reason the feed is being updated. Must be one of the following: update live [env var: PODPING_REASON; default: update] * `--help`: Show this message and exit. diff --git a/build.py b/build.py index b217ea1..7a07f82 100644 --- a/build.py +++ b/build.py @@ -7,7 +7,9 @@ def __init__(self, d): @property def capnpy_options(self): - return {} + return { + "convert_case": False, # do NOT convert camelCase to camel_case + } @property def ext_modules(self): diff --git a/poetry.lock b/poetry.lock index 6e812e3..c7ebfc4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,6 +1,6 @@ [[package]] name = "anyio" -version = "3.4.0" +version = "3.5.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false @@ -9,10 +9,9 @@ python-versions = ">=3.6.2" [package.dependencies] idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] trio = ["trio (>=0.16)"] @@ -24,9 +23,6 @@ category = "main" optional = false python-versions = ">=3.6" -[package.dependencies] -typing-extensions = {version = "*", markers = "python_version < \"3.8\""} - [package.extras] tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] @@ -88,7 +84,6 @@ mypy-extensions = ">=0.4.3" pathspec = ">=0.9.0,<1" platformdirs = ">=2" tomli = ">=0.2.6,<2.0.0" -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = [ {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, @@ -227,7 +222,6 @@ optional = false python-versions = ">=3.6" [package.dependencies] -importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.8.0,<2.9.0" pyflakes = ">=2.4.0,<2.5.0" @@ -253,7 +247,6 @@ python-versions = ">=3.7" [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} [[package]] name = "h11" @@ -308,22 +301,6 @@ category = "main" optional = false python-versions = ">=3.5" -[[package]] -name = "importlib-metadata" -version = "3.10.1" -description = "Read metadata from Python packages" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] - [[package]] name = "iniconfig" version = "1.1.1" @@ -348,7 +325,7 @@ plugins = ["setuptools"] [[package]] name = "lighthive" -version = "0.3.1" +version = "0.3.2" description = "A light python client to interact with the HIVE blockchain" category = "main" optional = false @@ -372,6 +349,23 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "mypy" +version = "0.931" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = ">=1.1.0" +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -427,9 +421,6 @@ category = "dev" optional = false python-versions = ">=3.6" -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] @@ -515,7 +506,6 @@ python-versions = ">=3.6" atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" @@ -660,9 +650,6 @@ category = "main" optional = false python-versions = ">=3.6,<4.0" -[package.dependencies] -importlib_metadata = {version = ">=3.0,<4.0", markers = "python_version < \"3.8\""} - [[package]] name = "six" version = "1.16.0" @@ -696,7 +683,6 @@ optional = false python-versions = ">=3.6" [package.dependencies] -importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] @@ -715,14 +701,6 @@ category = "dev" optional = false python-versions = ">=3.6" -[[package]] -name = "typed-ast" -version = "1.5.1" -description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" -optional = false -python-versions = ">=3.6" - [[package]] name = "typer" version = "0.3.2" @@ -776,30 +754,18 @@ brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -[[package]] -name = "zipp" -version = "3.7.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] - [extras] server = ["pyzmq"] [metadata] lock-version = "1.1" -python-versions = "^3.7" -content-hash = "5e94fb1b97e69d5f2b1ea51063fdaf8c47efed7327db6b07b1bc2b929faf4427" +python-versions = "^3.8" +content-hash = "78cbdf0cd2c7cd9eef4a81d9709aed3cbc2e9b8dad00a111a7072fd928aeea18" [metadata.files] anyio = [ - {file = "anyio-3.4.0-py3-none-any.whl", hash = "sha256:2855a9423524abcdd652d942f8932fda1735210f77a6b392eafd9ff34d3fe020"}, - {file = "anyio-3.4.0.tar.gz", hash = "sha256:24adc69309fb5779bc1e06158e143e0b6d2c56b302a3ac3de3083c705a6ed39d"}, + {file = "anyio-3.5.0-py3-none-any.whl", hash = "sha256:b5fa16c5ff93fa1046f2eeb5bbff2dad4d3514d6cda61d02816dba34fa8c3c2e"}, + {file = "anyio-3.5.0.tar.gz", hash = "sha256:a0aeffe2fb1fdf374a8e4b471444f0f3ac4fb9f5a5b542b48824475e0042a5a6"}, ] asgiref = [ {file = "asgiref-3.4.1-py3-none-any.whl", hash = "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"}, @@ -1037,10 +1003,6 @@ idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] -importlib-metadata = [ - {file = "importlib_metadata-3.10.1-py3-none-any.whl", hash = "sha256:2ec0faae539743ae6aaa84b49a169670a465f7f5d64e6add98388cc29fd1f2f6"}, - {file = "importlib_metadata-3.10.1.tar.gz", hash = "sha256:c9356b657de65c53744046fa8f7358afe0714a1af7d570c00c3835c2d724a7c1"}, -] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -1050,12 +1012,34 @@ isort = [ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] lighthive = [ - {file = "lighthive-0.3.1.tar.gz", hash = "sha256:161fdae5fed38ba79f13b8438fd1e04379f19fd580c1976164c030b3d5569ecd"}, + {file = "lighthive-0.3.2.tar.gz", hash = "sha256:9986b3ccfcfcaddf3e3076f1846f1b119dc6dfd891c59ff436d3737ca7520f38"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] +mypy = [ + {file = "mypy-0.931-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c5b42d0815e15518b1f0990cff7a705805961613e701db60387e6fb663fe78a"}, + {file = "mypy-0.931-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c89702cac5b302f0c5d33b172d2b55b5df2bede3344a2fbed99ff96bddb2cf00"}, + {file = "mypy-0.931-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:300717a07ad09525401a508ef5d105e6b56646f7942eb92715a1c8d610149714"}, + {file = "mypy-0.931-cp310-cp310-win_amd64.whl", hash = "sha256:7b3f6f557ba4afc7f2ce6d3215d5db279bcf120b3cfd0add20a5d4f4abdae5bc"}, + {file = "mypy-0.931-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1bf752559797c897cdd2c65f7b60c2b6969ffe458417b8d947b8340cc9cec08d"}, + {file = "mypy-0.931-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4365c60266b95a3f216a3047f1d8e3f895da6c7402e9e1ddfab96393122cc58d"}, + {file = "mypy-0.931-cp36-cp36m-win_amd64.whl", hash = "sha256:1b65714dc296a7991000b6ee59a35b3f550e0073411ac9d3202f6516621ba66c"}, + {file = "mypy-0.931-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e839191b8da5b4e5d805f940537efcaa13ea5dd98418f06dc585d2891d228cf0"}, + {file = "mypy-0.931-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:50c7346a46dc76a4ed88f3277d4959de8a2bd0a0fa47fa87a4cde36fe247ac05"}, + {file = "mypy-0.931-cp37-cp37m-win_amd64.whl", hash = "sha256:d8f1ff62f7a879c9fe5917b3f9eb93a79b78aad47b533911b853a757223f72e7"}, + {file = "mypy-0.931-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9fe20d0872b26c4bba1c1be02c5340de1019530302cf2dcc85c7f9fc3252ae0"}, + {file = "mypy-0.931-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1b06268df7eb53a8feea99cbfff77a6e2b205e70bf31743e786678ef87ee8069"}, + {file = "mypy-0.931-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8c11003aaeaf7cc2d0f1bc101c1cc9454ec4cc9cb825aef3cafff8a5fdf4c799"}, + {file = "mypy-0.931-cp38-cp38-win_amd64.whl", hash = "sha256:d9d2b84b2007cea426e327d2483238f040c49405a6bf4074f605f0156c91a47a"}, + {file = "mypy-0.931-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ff3bf387c14c805ab1388185dd22d6b210824e164d4bb324b195ff34e322d166"}, + {file = "mypy-0.931-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5b56154f8c09427bae082b32275a21f500b24d93c88d69a5e82f3978018a0266"}, + {file = "mypy-0.931-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ca7f8c4b1584d63c9a0f827c37ba7a47226c19a23a753d52e5b5eddb201afcd"}, + {file = "mypy-0.931-cp39-cp39-win_amd64.whl", hash = "sha256:74f7eccbfd436abe9c352ad9fb65872cc0f1f0a868e9d9c44db0893440f0c697"}, + {file = "mypy-0.931-py3-none-any.whl", hash = "sha256:1171f2e0859cfff2d366da2c7092b06130f232c636a3f7301e3feb8b41f6377d"}, + {file = "mypy-0.931.tar.gz", hash = "sha256:0038b21890867793581e4cb0d810829f5fd4441aa75796b53033af3aa30430ce"}, +] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, @@ -1292,27 +1276,6 @@ tomli = [ {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, ] -typed-ast = [ - {file = "typed_ast-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d8314c92414ce7481eee7ad42b353943679cf6f30237b5ecbf7d835519e1212"}, - {file = "typed_ast-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b53ae5de5500529c76225d18eeb060efbcec90ad5e030713fe8dab0fb4531631"}, - {file = "typed_ast-1.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:24058827d8f5d633f97223f5148a7d22628099a3d2efe06654ce872f46f07cdb"}, - {file = "typed_ast-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a6d495c1ef572519a7bac9534dbf6d94c40e5b6a608ef41136133377bba4aa08"}, - {file = "typed_ast-1.5.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:de4ecae89c7d8b56169473e08f6bfd2df7f95015591f43126e4ea7865928677e"}, - {file = "typed_ast-1.5.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:256115a5bc7ea9e665c6314ed6671ee2c08ca380f9d5f130bd4d2c1f5848d695"}, - {file = "typed_ast-1.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:7c42707ab981b6cf4b73490c16e9d17fcd5227039720ca14abe415d39a173a30"}, - {file = "typed_ast-1.5.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:71dcda943a471d826ea930dd449ac7e76db7be778fcd722deb63642bab32ea3f"}, - {file = "typed_ast-1.5.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4f30a2bcd8e68adbb791ce1567fdb897357506f7ea6716f6bbdd3053ac4d9471"}, - {file = "typed_ast-1.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca9e8300d8ba0b66d140820cf463438c8e7b4cdc6fd710c059bfcfb1531d03fb"}, - {file = "typed_ast-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9caaf2b440efb39ecbc45e2fabde809cbe56272719131a6318fd9bf08b58e2cb"}, - {file = "typed_ast-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9bcad65d66d594bffab8575f39420fe0ee96f66e23c4d927ebb4e24354ec1af"}, - {file = "typed_ast-1.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:591bc04e507595887160ed7aa8d6785867fb86c5793911be79ccede61ae96f4d"}, - {file = "typed_ast-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:a80d84f535642420dd17e16ae25bb46c7f4c16ee231105e7f3eb43976a89670a"}, - {file = "typed_ast-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:38cf5c642fa808300bae1281460d4f9b7617cf864d4e383054a5ef336e344d32"}, - {file = "typed_ast-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5b6ab14c56bc9c7e3c30228a0a0b54b915b1579613f6e463ba6f4eb1382e7fd4"}, - {file = "typed_ast-1.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2b8d7007f6280e36fa42652df47087ac7b0a7d7f09f9468f07792ba646aac2d"}, - {file = "typed_ast-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:b6d17f37f6edd879141e64a5db17b67488cfeffeedad8c5cec0392305e9bc775"}, - {file = "typed_ast-1.5.1.tar.gz", hash = "sha256:484137cab8ecf47e137260daa20bafbba5f4e3ec7fda1c1e69ab299b75fa81c5"}, -] typer = [ {file = "typer-0.3.2-py3-none-any.whl", hash = "sha256:ba58b920ce851b12a2d790143009fa00ac1d05b3ff3257061ff69dbdfc3d161b"}, {file = "typer-0.3.2.tar.gz", hash = "sha256:5455d750122cff96745b0dec87368f56d023725a7ebc9d2e54dd23dc86816303"}, @@ -1329,7 +1292,3 @@ urllib3 = [ {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, ] -zipp = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, -] diff --git a/pyproject.toml b/pyproject.toml index 666daab..2bd876f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "podping-hivewriter" -version = "1.1.0-alpha.0" +version = "1.1.0-beta.0" license = "MIT" authors = ["Alecks Gates ", "Brian of London "] maintainers = ["Alecks Gates ", "Brian of London "] @@ -21,7 +21,7 @@ build = "build.py" include = ["src/podping_hivewriter/schema/*.py", "src/podping_hivewriter/schema/*.so"] [tool.poetry.dependencies] -python = "^3.7" +python = "^3.8" pyzmq = "^22.1.0" cffi = "^1.14.5" pydantic = "^1.9.0" @@ -42,6 +42,7 @@ bandit = "^1.7.0" pytest-timeout = "^2.0.2" typer-cli = "^0.0.12" flake8 = "^4.0.1" +mypy = "^0.931" [tool.poetry.extras] server = ["pyzmq"] @@ -55,7 +56,7 @@ requires = ["poetry-core>=1.0.0", "capnpy"] build-backend = "poetry.core.masonry.api" [tool.black] -target-version = ['py37', 'py38', 'py39', 'py310'] +target-version = ['py38', 'py39', 'py310'] [tool.isort] profile = "black" diff --git a/setup.py b/setup.py index 00e0b9d..a1f4308 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ setup_kwargs = { "name": "podping-hivewriter", - "version": "1.1.0-alpha.0", + "version": "1.1.0-beta.0", "description": "This is a tool used to submit RFC 3987-compliant International Resource Identifiers as a Podping notification on the Hive blockchain.", "long_description": "# podping-hivewriter\nThe Hive writer component of podping. You will need a Hive account, see section [Hive account and Authorization](#hive-account) below.\n\n## CLI Install\n\nThe following have been tested on Linux and macOS. However, Windows should work also. If you have issues on Windows we highly recommend the [Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/) and/or Docker.\n\n### Using [pipx](https://pypa.github.io/pipx/) (preferred over pip)\n```shell\npipx install podping-hivewriter\n```\n\n### Using pip\n```shell\npip install --user podping-hivewriter\n```\n\n### Installing the server\n\nIf you'd like to install the server component, it's hidden behind the extra flag `server`. This is to make it easier to install only the `write` CLI component `podping-hivewriter` on non-standard systems without a configured development enviornment.\n\n```shell\npipx install podping-hivewriter[server]\n```\n\nMake sure you have `~/.local/bin/` on your `PATH`.\n\nSee the dedicated [CLI docs](CLI.md) for more information.\n\n## Container\n\nThe container images are hosted on [Docker Hub](https://hub.docker.com/r/podcastindexorg/podping-hivewriter). Images are currently based on Debian bullseye-based Python 3.9 with the following architectures: `amd64`, `i386`, `arm64`, `armv7`, `armv6`\n\n### docker-compose\n\n```yaml\nversion: '2.0'\nservices:\n podping-hivewriter:\n image: podcastindexorg/podping-hivewriter\n restart: always\n ports:\n - \"9999:9999\"\n environment:\n - PODPING_HIVE_ACCOUNT=\n - PODPING_HIVE_POSTING_KEY=\n - PODPING_LISTEN_IP=0.0.0.0\n - PODPING_LISTEN_PORT=9999\n - PODPING_LIVETEST=false\n - PODPING_DRY_RUN=false\n - PODPING_STATUS=true\n - PODPING_IGNORE_CONFIG_UPDATES=false\n - PODPING_I_KNOW_WHAT_IM_DOING=false\n - PODPING_DEBUG=false\n```\n\nAssuming you just copy-pasted without reading, the above will fail at first. As noted in the [server command documentation](https://github.com/Podcastindex-org/podping-hivewriter/blob/main/CLI.md#podping-server):\n\n>WARNING: DO NOT run this on a publicly accessible host. There currently is NO authentication required to submit to the server. Set to * or 0.0.0.0 for all interfaces.\n\nAs all Docker installations vary, we set `0.0.0.0` as the listen IP for connectivity. This doesn't affect the IP address docker listens on when we tell it to pass port `9999` through to the container. If you understand the consequences of this, set `PODPING_I_KNOW_WHAT_IM_DOING` to `true`.\n\n### Building the image with Docker\n\nLocally build the podping-hivewriter container with a \"develop\" tag\n\n```shell\ndocker build -t podping-hivewriter:develop .\n```\n\n\n### Running the image\n\nRun the locally built image in a container, passing local port 9999 to port 9999 in the container.\nENV variables can be passed to docker with `--env-file` option after modifying the `.env.EXAMPLE` file and renaming it to `.env`\n\n```shell\ndocker run --rm -p 9999:9999 --env-file .env --name podping podping-hivewriter:develop\n```\n\nRunning with command line options, like `--dry-run` for example, add them with the full podping command.\nSettings can also be passed with the `-e` option for Docker. Note, we leave out `-p 9999:9999` here because we're not running the server.\n\n```shell\ndocker run --rm \\\n -e PODPING_HIVE_ACCOUNT= \\\n -e PODPING_HIVE_POSTING_KEY= \\\n podping-hivewriter:develop \\\n podping --dry-run write https://www.example.com/feed.xml\n```\n\nAs another example for running a server, to run in *detached* mode, note the `-d` in the `docker run` options. Also note that `client` or `server` must come *after* the command line options for `podping`:\n```shell\ndocker run --rm -d \\\n -p 9999:9999 --env-file .env \\\n --name podping podping-hivewriter:develop \\\n podping --livetest server\n```\n\nOne running you can view and follow the live output with:\n```shell\ndocker logs podping -f\n```\n\nSee the [CLI docs](https://github.com/Podcastindex-org/podping-hivewriter/blob/main/CLI.md) for default values.\n\n## Development\n\nYou'll need a few extras:\n\n1. [capnproto](https://capnproto.org/). On a Mac: `brew instal capnp`\n2. [Poetry](https://python-poetry.org/docs/)\n\n\nWe use [poetry](https://python-poetry.org/) for dependency management. Once you have it, clone this repo and run:\n\n```shell\npoetry install\n```\n\nThen to switch to the virtual environment, use:\n\n```shell\npoetry shell\n```\nMake sure you have a the environment variables `PODPING_HIVE_ACCOUNT` and `PODPING_HIVE_POSTING_KEY` set.\n\nAfter that you should be able to run the `podping` command or run the tests:\n\n```shell\npytest\n```\n\nTo run all tests, make sure to set the necessary environment variables for your Hive account. This can take many minutes:\n\n```shell\npytest --runslow\n```\n\n## Hive account\n\nIf you need a Hive account, please download the [Hive Keychain extension for your browser](https://hive-keychain.com/) then use this link to get your account from [https://HiveOnboard.com?ref=podping](https://hiveonboard.com?ref=podping). You will need at least 20 Hive Power \"powered up\" to get started (worth around $10). Please contact [@brianoflondon](https://peakd.com/@brianoflondon) brian@podping.org if you need assistance getting set up.\n\nIf you use the [Hiveonboard]((https://hiveonboard.com?ref=podping)) link `podping` will **delegate** enough Hive Power to get you started.\n\n### Permissions and Authorization\n\nYou don't need permission, but you do need to tell `podping` that you want to send valid `podpings`:\n\n- Hive is a so-called \"permissionless\" blockchain. Once you have a Hive Account and a minimal amount of Hive Power, that account can post to Hive, including sending `podpings`.\n\n- Nobody can block any valid Hive Account from sending and nobody can help you if you lose your keys.\n\n- Whilst anyone can post `podpings` to Hive, there is a need to register your Hive Accountname for those `podpings` to be recognized by all clients. This is merely a spam-prevention measure and clients may choose to ignore it.\n\n- Please contact new@podping.org or send a Hive Transfer to [@podping](https://peakd.com/@podping) to have your account validated.\n\n- Side note on keys: `podping` uses the `posting-key` which is the lowest value of the four Hive keys (`owner`, `active`, `memo`, `posting` and there is usually a `master password` which can generate all the keys). That is not to say that losing control of it is a good idea, but that key is not authorized to make financially important transfers. It can, however, post public information so should be treated carefully and kept secure.\n\nFor a [comprehensive explanation of Hive and Podping, please see this post](https://peakd.com/podping/@brianoflondon/podping-and-podcasting-20-funding-to-put-hive-at-the-center-of-global-podcasting-infrastructure).", "author": "Alecks Gates", @@ -43,7 +43,7 @@ "install_requires": install_requires, "extras_require": extras_require, "entry_points": entry_points, - "python_requires": ">=3.7,<4.0", + "python_requires": ">=3.8,<4.0", } from build import * diff --git a/src/podping_hivewriter/cli/podping.py b/src/podping_hivewriter/cli/podping.py index c8a66ef..196f568 100644 --- a/src/podping_hivewriter/cli/podping.py +++ b/src/podping_hivewriter/cli/podping.py @@ -1,16 +1,52 @@ import asyncio import logging -from typing import Optional, List +import sys +from typing import List, Optional import rfc3987 import typer +from lighthive.broadcast.base58 import Base58 +from lighthive.broadcast.key_objects import PrivateKey from podping_hivewriter import __version__ -from podping_hivewriter.constants import LIVETEST_OPERATION_ID, PODPING_OPERATION_ID +from podping_hivewriter.constants import ( + LIVETEST_OPERATION_ID, + PODPING_OPERATION_ID, + STARTUP_FAILED_INVALID_ACCOUNT, + STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE, +) +from podping_hivewriter.hive import get_client +from podping_hivewriter.models.medium import Medium, mediums, str_medium_map +from podping_hivewriter.models.reason import Reason, reasons, str_reason_map from podping_hivewriter.podping_hivewriter import PodpingHivewriter from podping_hivewriter.podping_settings_manager import PodpingSettingsManager +def is_base58(sb: str) -> bool: + try: + _ = Base58(sb) + return True + + except Exception: + return False + + +def medium_callback(medium: str) -> str: + if medium not in mediums: + raise typer.BadParameter( + f"Medium be one of the following: {str(', '.join(mediums))}" + ) + return medium + + +def reason_callback(reason: str) -> str: + if reason not in reasons: + raise typer.BadParameter( + f"Reason must be one of the following: {str(', '.join(reasons))}" + ) + return reason + + def iris_callback(iris: List[str]) -> List[str]: for iri in iris: if not rfc3987.match(iri, "IRI"): @@ -52,6 +88,20 @@ def exit_cli(_): @app.command() def write( + medium: str = typer.Option( + str(Medium.podcast), + envvar=["PODPING_MEDIUM"], + callback=medium_callback, + autocompletion=lambda: list(mediums), + help=f"The medium of the feed being updated. Must be one of the following: {str(' '.join(mediums))}", + ), + reason: str = typer.Option( + str(Reason.update), + envvar=["PODPING_REASON"], + callback=reason_callback, + autocompletion=lambda: list(reasons), + help=f"The reason the feed is being updated. Must be one of the following: {str(' '.join(reasons))}", + ), iris: List[str] = typer.Argument( ..., metavar="IRI...", @@ -75,6 +125,12 @@ def write( 2021-08-30T00:14:37-0500 | INFO | Transaction sent: c9cbaace76ec365052c11ec4a3726e4ed3a7c54d - JSON size: 170 ``` + Adding a Medium and Reason: + ``` + podping --hive-account --hive-posting-key --no-dry-run --no-sanity-check write https://3speak.tv/rss/podping.xml --medium video --reason update + ``` + + Or add `--dry-run` to test functionality without broadcasting: ``` podping --hive-account --hive-posting-key --dry-run --no-sanity-check write https://www.example.com/feed.xml @@ -96,7 +152,9 @@ def write( daemon=False, dry_run=Config.dry_run, ) as podping_hivewriter: - coro = podping_hivewriter.failure_retry(set(iris)) + coro = podping_hivewriter.failure_retry( + set(iris), medium=str_medium_map[medium], reason=str_reason_map[reason] + ) try: # Try to get an existing loop in case of running from other program # Mostly used for pytest @@ -138,18 +196,19 @@ def server( ``` podping --hive-account --hive-posting-key server - 2021-08-30T00:38:58-0500 | INFO | podping 1.0.0a0 starting up in server mode - 2021-08-30T00:39:00-0500 | INFO | Podping startup sequence initiated, please stand by, full bozo checks in operation... - 2021-08-30T00:39:01-0500 | INFO | Testing Account Resource Credits - before 24.88% - 2021-08-30T00:39:02-0500 | INFO | Transaction sent: 39c2a396784ba6ba498cee3055900442953bb13f - JSON size: 204 - 2021-08-30T00:39:02-0500 | INFO | Testing Account Resource Credits.... 5s - 2021-08-30T00:39:17-0500 | INFO | Testing Account Resource Credits - after 24.52% - 2021-08-30T00:39:17-0500 | INFO | Capacity for further podpings : 68.5 - 2021-08-30T00:39:19-0500 | INFO | Transaction sent: 39405eaf4a522deb2d965fc9bd8c6b92dca44786 - JSON size: 231 - 2021-08-30T00:39:19-0500 | INFO | Startup of Podping status: SUCCESS! Hit the BOOST Button. - 2021-08-30T00:39:19-0500 | INFO | Hive account: @podping.test - 2021-08-30T00:39:19-0500 | INFO | Running ZeroMQ server on 127.0.0.1:9999 - 2021-08-30T00:39:19-0500 | INFO | Status - Hive Node: - Uptime: 0:00:20.175997 - IRIs Received: 0 - IRIs Deduped: 0 - IRIs Sent: 0 + 2022-01-17T13:16:43+0200 | INFO | podping 1.1.0a1 starting up in server mode + 2022-01-17T13:16:44+0200 | INFO | Podping startup sequence initiated, please stand by, full bozo checks in operation... + 2022-01-17T13:16:45+0200 | INFO | Testing Account Resource Credits - before 99.73% + 2022-01-17T13:16:48+0200 | INFO | Calculating Account Resource Credits for 100 pings: 8.55% | Capacity: 1,169 + 2022-01-17T13:16:49+0200 | INFO | Configuration override from Podping Hive: hive_operation_period=30 max_url_list_bytes=8000 diagnostic_report_period=180 control_account='podping' control_account_check_period=180 test_nodes=('https://testnet.openhive.network',) + 2022-01-17T13:16:51+0200 | INFO | Lighthive Node: https://api.hive.blog + 2022-01-17T13:16:51+0200 | INFO | JSON size: 179 + 2022-01-17T13:16:51+0200 | INFO | Startup of Podping status: SUCCESS! Hit the BOOST Button. + 2022-01-17T13:16:53+0200 | INFO | Lighthive Fastest: https://api.deathwing.me + 2022-01-17T13:16:53+0200 | INFO | Hive account: @podping.bol + 2022-01-17T13:16:53+0200 | INFO | Running ZeroMQ server on 127.0.0.1:9999 + 2022-01-17T13:16:54+0200 | INFO | Lighthive Fastest: https://api.deathwing.me + 2022-01-17T13:16:54+0200 | INFO | Status - Uptime: 0:00:10 | IRIs Received: 0 | IRIs Deduped: 0 | IRIs Sent: 0 | last_node: https://api.deathwing.me ``` """ @@ -285,6 +344,47 @@ def callback( else: Config.operation_id = PODPING_OPERATION_ID + # Check the account exists + posting_keys = [hive_posting_key] + client = get_client(posting_keys=posting_keys) + account_exists = client.get_accounts([hive_account]) + if not account_exists: + logging.error( + f"Hive account @{hive_account} does not exist, " + f"check ENV vars and try again" + ) + logging.error("Exiting") + sys.exit(STARTUP_FAILED_INVALID_ACCOUNT) + + if not is_base58(hive_posting_key): + logging.error("Startup of Podping status: FAILED!") + logging.error( + "Posting Key not valid Base58 - check ENV vars and try again", + ) + logging.error("Exiting") + sys.exit(STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE) + + account = client.account(hive_account) + public_keys = [a[0] for a in account.raw_data["posting"]["key_auths"]] + try: + private_key = PrivateKey(hive_posting_key) + if not str(private_key.pubkey) in public_keys: + logging.error("Startup of Podping status: FAILED!") + logging.error( + f"Posting Key doesn't match @{hive_account} - " + f"check ENV vars and try again", + ) + logging.error("Exiting") + sys.exit(STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE) + except Exception: + logging.error("Startup of Podping status: FAILED!") + logging.error( + f"Some other error with keys for @{hive_account} - " + f"check ENV vars and try again", + ) + logging.error("Exiting") + sys.exit(STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE) + if __name__ == "__main__": app() diff --git a/src/podping_hivewriter/constants.py b/src/podping_hivewriter/constants.py index 1986401..d63429a 100644 --- a/src/podping_hivewriter/constants.py +++ b/src/podping_hivewriter/constants.py @@ -1,11 +1,11 @@ LIVETEST_OPERATION_ID = "pplt" PODPING_OPERATION_ID = "pp" -STARTUP_OPERATION_ID = "ppst" -CURRENT_PODPING_VERSION = "0.3" +STARTUP_OPERATION_ID = "_startup" STARTUP_FAILED_UNKNOWN_EXIT_CODE = 10 STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE = 20 STARTUP_FAILED_HIVE_API_ERROR_EXIT_CODE = 30 +STARTUP_FAILED_INVALID_ACCOUNT = 40 PODPING_SETTINGS_KEY = "podping-settings" # Operation JSON must be less than or equal to 8192 bytes. diff --git a/src/podping_hivewriter/hive.py b/src/podping_hivewriter/hive.py index 01d55ff..4cb2e0d 100644 --- a/src/podping_hivewriter/hive.py +++ b/src/podping_hivewriter/hive.py @@ -6,7 +6,6 @@ def get_client( posting_keys: Optional[List[str]] = None, - nobroadcast: Optional[bool] = False, nodes=None, connect_timeout=3, read_timeout=30, @@ -35,7 +34,7 @@ def get_allowed_accounts( and only react to these accounts""" if not client: - client = Client() + client = get_client() master_account = client.account(account_name) return set(master_account.following()) diff --git a/src/podping_hivewriter/models/medium.py b/src/podping_hivewriter/models/medium.py index 9ed0e63..b16dcf2 100644 --- a/src/podping_hivewriter/models/medium.py +++ b/src/podping_hivewriter/models/medium.py @@ -1,3 +1,22 @@ import capnpy +from capnpy.annotate import Options -Medium = capnpy.load_schema("podping_hivewriter.schema.medium").Medium +medium_module = capnpy.load_schema( + "podping_hivewriter.schema.medium", + # Make sure properties are imported as specified (camelCase) + options=Options(convert_case=False), +) + +Medium = medium_module.Medium + +mediums = frozenset(Medium.__members__) + +# capnpy has a different "constructor" for pyx vs pure python +get_medium_by_num = Medium._new_hack if hasattr(Medium, "_new_hack") else Medium._new + +str_medium_map = { + enumerant.name.decode("UTF-8"): get_medium_by_num(enumerant.codeOrder) + for enumerant in capnpy.get_reflection_data(medium_module) + .get_node(Medium) + .get_enum_enumerants() +} diff --git a/src/podping_hivewriter/models/podping.py b/src/podping_hivewriter/models/podping.py new file mode 100644 index 0000000..e631aa3 --- /dev/null +++ b/src/podping_hivewriter/models/podping.py @@ -0,0 +1,37 @@ +from typing import List, Literal + +from pydantic import BaseModel, validator + +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.reason import reasons + + +class Podping(BaseModel): + """Dataclass for on-chain podping schema""" + + version: Literal["1.0"] = "1.0" + medium: str + reason: str + iris: List[str] + + @validator("medium") + def medium_exists(cls, v): + """Make sure the given medium matches what's available""" + if v not in mediums: + raise ValueError(f"medium must be one of {str(', '.join(mediums))}") + return v + + @validator("reason") + def reason_exists(cls, v): + """Make sure the given reason matches what's available""" + if v not in reasons: + raise ValueError(f"reason must be one of {str(', '.join(reasons))}") + return v + + @validator("iris") + def iris_at_least_one_element(cls, v): + """Make sure the list contains at least one element""" + if len(v) == 0: + raise ValueError("iris must contain at least one element") + + return v diff --git a/src/podping_hivewriter/models/reason.py b/src/podping_hivewriter/models/reason.py index 170f7ae..6ac2d9f 100644 --- a/src/podping_hivewriter/models/reason.py +++ b/src/podping_hivewriter/models/reason.py @@ -1,3 +1,22 @@ import capnpy +from capnpy.annotate import Options -Reason = capnpy.load_schema("podping_hivewriter.schema.reason").Reason +reason_module = capnpy.load_schema( + "podping_hivewriter.schema.reason", + # Make sure properties are imported as specified (camelCase) + options=Options(convert_case=False, include_reflection_data=True), +) + +Reason = reason_module.Reason + +reasons = frozenset(Reason.__members__) + +# capnpy has a different "constructor" for pyx vs pure python +get_reason_by_num = Reason._new_hack if hasattr(Reason, "_new_hack") else Reason._new + +str_reason_map = { + enumerant.name.decode("UTF-8"): get_reason_by_num(enumerant.codeOrder) + for enumerant in capnpy.get_reflection_data(reason_module) + .get_node(Reason) + .get_enum_enumerants() +} diff --git a/src/podping_hivewriter/podping_hivewriter.py b/src/podping_hivewriter/podping_hivewriter.py index 9f2600d..598f49d 100644 --- a/src/podping_hivewriter/podping_hivewriter.py +++ b/src/podping_hivewriter/podping_hivewriter.py @@ -4,21 +4,22 @@ import re import sys import uuid -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta from itertools import cycle from timeit import default_timer as timer -from typing import List, Set, Tuple +from typing import List, Set, Tuple, Union import rfc3987 from lighthive.datastructures import Operation from lighthive.exceptions import RPCNodeException from lighthive.node_picker import compare_nodes +from podping_hivewriter import __version__ as podping_hivewriter_version from podping_hivewriter.async_context import AsyncContext from podping_hivewriter.async_wrapper import sync_to_async from podping_hivewriter.constants import ( - CURRENT_PODPING_VERSION, HIVE_CUSTOM_OP_DATA_MAX_LENGTH, + STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE, STARTUP_FAILED_UNKNOWN_EXIT_CODE, STARTUP_OPERATION_ID, ) @@ -26,22 +27,15 @@ PodpingCustomJsonPayloadExceeded, TooManyCustomJsonsPerBlock, ) -from podping_hivewriter.hive import get_client, get_allowed_accounts +from podping_hivewriter.hive import get_allowed_accounts, get_client from podping_hivewriter.models.hive_operation_id import HiveOperationId from podping_hivewriter.models.iri_batch import IRIBatch from podping_hivewriter.models.medium import Medium +from podping_hivewriter.models.podping import Podping from podping_hivewriter.models.reason import Reason from podping_hivewriter.podping_settings_manager import PodpingSettingsManager -def utc_date_str() -> str: - return datetime.utcnow().replace(tzinfo=timezone.utc).isoformat() - - -def size_of_dict_as_json(payload: dict): - return len(json.dumps(payload, separators=(",", ":")).encode("UTF-8")) - - class PodpingHivewriter(AsyncContext): def __init__( self, @@ -101,21 +95,13 @@ async def _startup(self): allowed = get_allowed_accounts( self.lighthive_client, settings.control_account ) - # Check the account exists - self.lighthive_client.account(self.server_account) # TODO: Should we periodically check if the account is allowed # and shut down if not? if self.server_account not in allowed: logging.error( f"Account @{self.server_account} not authorised to send Podpings" ) - except ValueError as ex: - logging.error( - f"Hive account @{self.server_account} does not exist, " - f"check ENV vars and try again", - exc_info=True, - ) - raise ex + except Exception as ex: logging.error(f"Unknown error occurred: {ex}", exc_info=True) raise ex @@ -159,47 +145,55 @@ async def test_hive_resources(self): f"Testing Account Resource Credits" f' - before {manabar.get("last_mana_percent"):.2f}%' ) + rc = self.lighthive_client.rc() - # TODO: See if anything depends on USE_TEST_NODE before removal custom_json = { "server_account": self.server_account, - "USE_TEST_NODE": False, "message": "Podping startup initiated", "uuid": str(uuid.uuid4()), "hive": str(self.lighthive_client.current_node), } - await self.send_notification(custom_json, STARTUP_OPERATION_ID) - settings = await self.settings_manager.get_settings() - logging.info( - f"Testing Account Resource Credits {settings.hive_operation_period}s" - ) - await asyncio.sleep(settings.hive_operation_period) + startup_hive_operation_id = self.operation_id + STARTUP_OPERATION_ID - manabar_after = account.get_resource_credit_info() + op, size_of_json = await self.construct_operation( + custom_json, startup_hive_operation_id + ) + rc_cost = rc.get_cost(op) + percent_after = ( + 100 + * (manabar.get("last_mana") - (1e6 * rc_cost * 100)) + / manabar["max_mana"] + ) + percent_drop = manabar.get("last_mana_percent") - percent_after + capacity = (100 / percent_drop) * 100 logging.info( - f"Testing Account Resource Credits" - f' - after {manabar_after.get("last_mana_percent"):.2f}%' + f"Calculating Account Resource Credits " + f"for 100 pings: {percent_drop:.2f}% | " + f"Capacity: {capacity:,.0f}" ) - cost = manabar.get("current_mana") - manabar_after.get("current_mana") - if cost == 0: # skip this test if we're going to get ZeroDivision - capacity = 1000000 - else: - capacity = manabar_after.get("current_mana") / cost - logging.info(f"Capacity for further podpings : {capacity:.1f}") - custom_json["v"] = CURRENT_PODPING_VERSION - custom_json["capacity"] = f"{capacity:.1f}" + custom_json["v"] = podping_hivewriter_version + custom_json["capacity"] = f"{capacity:,.0f}" custom_json["message"] = "Podping startup complete" custom_json["hive"] = str(self.lighthive_client.current_node) - await self.send_notification(custom_json, STARTUP_OPERATION_ID) + await self.send_notification(custom_json, startup_hive_operation_id) logging.info("Startup of Podping status: SUCCESS! Hit the BOOST Button.") - except Exception: + except ValueError as ex: + if str(ex) == "Error loading Base58 object": + logging.error( + f"Startup of Podping status: FAILED! {ex}", + exc_info=True, + ) + logging.error("Exiting") + sys.exit(STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE) + + except Exception as ex: logging.error( - "Startup of Podping status: FAILED! Unknown error", + f"Startup of Podping status: FAILED! {ex}", exc_info=True, ) logging.error("Exiting") @@ -228,7 +222,7 @@ async def _iri_batch_handler_loop(self): iri_batch = await self.iri_batch_queue.get() start = timer() - trx_id, failure_count = await self.failure_retry(iri_batch.iri_set) + failure_count = await self.failure_retry(iri_batch.iri_set) duration = timer() - start self.iri_batch_queue.task_done() @@ -237,13 +231,16 @@ async def _iri_batch_handler_loop(self): last_node = self.lighthive_client.current_node logging.info( - f"Batch send time: {duration:0.2f} - trx_id: {trx_id} - " - f"Failures: {failure_count} - IRI batch_id {iri_batch.batch_id} - " - f"IRIs in batch: {len(iri_batch.iri_set)} - " + f"Batch send time: {duration:0.2f} | " + f"Failures: {failure_count} - IRI batch_id {iri_batch.batch_id} | " + f"IRIs in batch: {len(iri_batch.iri_set)} | " f"last_node: {last_node}" ) except asyncio.CancelledError: raise + except Exception as ex: + logging.error(f"{ex} occurred", exc_info=True) + raise async def _iri_batch_loop(self): async def get_from_queue(): @@ -347,101 +344,112 @@ async def num_operations_in_queue(self) -> int: async def output_hive_status(self) -> None: """Output the name of the current hive node on a regular basis""" - up_time = timedelta(seconds=timer() - self.startup_time) + up_time = timedelta(seconds=int(timer() - self.startup_time)) await self.automatic_node_selection() last_node = self.lighthive_client.current_node logging.info( - f"Status - Uptime: {up_time} - " - f"IRIs Received: {self.total_iris_recv} - " - f"IRIs Deduped: {self.total_iris_recv_deduped} - " - f"IRIs Sent: {self.total_iris_sent} - " + f"Status - Uptime: {up_time} | " + f"IRIs Received: {self.total_iris_recv} | " + f"IRIs Deduped: {self.total_iris_recv_deduped} | " + f"IRIs Sent: {self.total_iris_sent} | " f"last_node: {last_node}" ) + async def construct_operation( + self, payload: dict, hive_operation_id: Union[HiveOperationId, str] + ) -> Tuple[Operation, int]: + """Builed the operation for the blockchain""" + payload_json = json.dumps(payload, separators=(",", ":")) + size_of_json = len(payload_json) + if size_of_json > HIVE_CUSTOM_OP_DATA_MAX_LENGTH: + raise PodpingCustomJsonPayloadExceeded("Max custom_json payload exceeded") + + op = Operation( + "custom_json", + { + "required_auths": [], + "required_posting_auths": self.required_posting_auths, + "id": str(hive_operation_id), + "json": payload_json, + }, + ) + return op, size_of_json + async def send_notification( - self, payload: dict, hive_operation_id: HiveOperationId - ) -> str: + self, payload: dict, hive_operation_id: Union[HiveOperationId, str] + ) -> None: + """Build and send an operation to the blockchain""" try: - size_of_json = size_of_dict_as_json(payload) - if size_of_json > HIVE_CUSTOM_OP_DATA_MAX_LENGTH: - raise PodpingCustomJsonPayloadExceeded( - "Max custom_json payload exceeded" - ) - - op = Operation( - "custom_json", - { - "required_auths": [], - "required_posting_auths": self.required_posting_auths, - "id": str(hive_operation_id), - "json": json.dumps(payload), - }, + op, size_of_json = await self.construct_operation( + payload, hive_operation_id ) + # if you want to FORCE the error condition for >5 operations + # in one block, uncomment this line. + # op = [op] * 6 + # Use asynchronous broadcast but means we don't get back tx, kinder to # API servers - tx_new = await self._async_hive_broadcast(op=op, dry_run=self.dry_run) - tx_id = tx_new.get("id") - logging.info(f"Lighthive Node: {self.lighthive_client.current_node}") - logging.info(f"Transaction sent: {tx_id} - JSON size: {size_of_json}") + await self._async_hive_broadcast(op=op, dry_run=self.dry_run) - return tx_id + logging.info(f"Lighthive Node: {self.lighthive_client.current_node}") + logging.info(f"JSON size: {size_of_json}") except RPCNodeException as ex: - logging.error(f"{ex}") - if re.match(r"plugin exception.*custom json.*", str(ex)): + logging.error(f"send_notification error: {ex}") + if re.match( + r"plugin exception.*custom json.*", ex.raw_body["error"]["message"] + ): self.lighthive_client.next_node() raise TooManyCustomJsonsPerBlock() raise ex + except PodpingCustomJsonPayloadExceeded as ex: + raise ex + except Exception as ex: logging.error(f"{ex}") - raise + raise ex async def send_notification_iri( self, iri: str, medium: Medium = Medium.podcast, reason: Reason = Reason.update, - ) -> str: - payload = { - "version": CURRENT_PODPING_VERSION, - "num_urls": 1, - "reason": str(reason), - "urls": [iri], - } + ) -> None: + payload = Podping(medium=medium, reason=reason, iris=[iri]) hive_operation_id = HiveOperationId(self.operation_id, medium, reason) - return await self.send_notification(payload, hive_operation_id) + await self.send_notification(payload.dict(), hive_operation_id) + + self.total_iris_sent += 1 async def send_notification_iris( self, iris: Set[str], medium: Medium = Medium.podcast, reason: Reason = Reason.update, - ) -> str: + ) -> None: num_iris = len(iris) - payload = { - "version": CURRENT_PODPING_VERSION, - "num_urls": num_iris, - "reason": str(reason), - "urls": list(iris), - } + payload = Podping(medium=medium, reason=reason, iris=list(iris)) hive_operation_id = HiveOperationId(self.operation_id, medium, reason) - tx_id = await self.send_notification(payload, hive_operation_id) + await self.send_notification(payload.dict(), hive_operation_id) self.total_iris_sent += num_iris - return tx_id - - async def failure_retry(self, iri_set: Set[str]) -> Tuple[str, int]: + async def failure_retry( + self, + iri_set: Set[str], + medium: Medium = Medium.podcast, + reason: Reason = Reason.update, + ) -> int: await self.wait_startup() failure_count = 0 while True: - # Sleep a maximum of 5 minutes, 2 additional seconds for every retry + # Sleep a maximum of 5 minutes, 3 additional seconds for every retry sleep_time = min(failure_count * 3, 300) if failure_count > 0: logging.warning(f"Waiting {sleep_time}s before retry") @@ -453,19 +461,33 @@ async def failure_retry(self, iri_set: Set[str]) -> Tuple[str, int]: logging.info(f"Received {len(iri_set)} IRIs") try: - trx_id = await self.send_notification_iris(iris=iri_set) + await self.send_notification_iris( + iris=iri_set, medium=medium, reason=reason + ) if failure_count > 0: logging.info( f"FAILURE CLEARED after {failure_count} retries, {sleep_time}s" ) - return trx_id, failure_count + return failure_count + except RPCNodeException as ex: + logging.warning(f"{ex}") + logging.warning(f"Failed to send {len(iri_set)} IRIs") + if ex.raw_body["error"]["data"]["name"] == "tx_missing_posting_auth": + for iri in iri_set: + logging.error(iri) + logging.error( + f"Terminating: exit code: " + f"{STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE}" + ) + sys.exit(STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE) + except Exception as ex: logging.warning(f"Failed to send {len(iri_set)} IRIs") - for iri in iri_set: - logging.warning(iri) - logging.error(f"{ex}") + logging.warning(f"{ex}") if logging.DEBUG >= logging.root.level: for iri in iri_set: logging.debug(iri) + + finally: self.lighthive_client.next_node() failure_count += 1 diff --git a/src/podping_hivewriter/podping_settings_manager.py b/src/podping_hivewriter/podping_settings_manager.py index dc19545..588518d 100644 --- a/src/podping_hivewriter/podping_settings_manager.py +++ b/src/podping_hivewriter/podping_settings_manager.py @@ -48,7 +48,7 @@ async def update_podping_settings(self) -> None: logging.warning(f"Problem with podping control settings: {e}") else: if self._settings != podping_settings: - logging.debug( + logging.info( f"Configuration override from Podping Hive: {podping_settings}" ) async with self._settings_lock: diff --git a/tests/integration/test_startup_checks_and_write_cli_single.py b/tests/integration/test_startup_checks_and_write_cli_single.py new file mode 100644 index 0000000..9adc762 --- /dev/null +++ b/tests/integration/test_startup_checks_and_write_cli_single.py @@ -0,0 +1,98 @@ +import asyncio +import json +import uuid +from platform import python_version as pv + +import pytest +from lighthive.client import Client +from lighthive.helpers.event_listener import EventListener +from typer.testing import CliRunner + +from podping_hivewriter.async_wrapper import sync_to_async +from podping_hivewriter.cli.podping import app +from podping_hivewriter.constants import ( + LIVETEST_OPERATION_ID, + STARTUP_FAILED_INVALID_ACCOUNT, + STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE, +) +from podping_hivewriter.models.hive_operation_id import HiveOperationId +from podping_hivewriter.models.medium import Medium +from podping_hivewriter.models.reason import Reason +from podping_hivewriter.podping_settings_manager import PodpingSettingsManager + + +@pytest.mark.asyncio +@pytest.mark.timeout(600) +@pytest.mark.slow +async def test_startup_checks_and_write_cli_single(): + runner = CliRunner() + + settings_manager = PodpingSettingsManager(ignore_updates=True) + + client = Client() + + session_uuid = uuid.uuid4() + session_uuid_str = str(session_uuid) + + test_name = "cli_single" + iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" + + default_hive_operation_id = HiveOperationId( + LIVETEST_OPERATION_ID, Medium.podcast, Reason.update + ) + default_hive_operation_id_str = str(default_hive_operation_id) + + async def get_iri_from_blockchain(start_block: int): + event_listener = EventListener(client, "head", start_block=start_block) + _on = sync_to_async(event_listener.on, thread_sensitive=False) + async for post in _on( + "custom_json", filter_by={"id": default_hive_operation_id_str} + ): + data = json.loads(post["op"][1]["json"]) + if "iris" in data and len(data["iris"]) == 1: + yield data["iris"][0] + + args = ["--livetest", "write", iri] + + current_block = client.get_dynamic_global_properties()["head_block_number"] + + # Ensure hive env vars are set from .env.test file or this will fail + result = runner.invoke(app, args) + + assert result.exit_code == 0 + + # Sleep to catch up because beem isn't async and blocks + await asyncio.sleep(3 * 25) + + iri_found = False + + async for stream_iri in get_iri_from_blockchain(current_block - 5): + if stream_iri == iri: + iri_found = True + break + + del settings_manager + assert iri_found + + +@pytest.mark.asyncio +async def test_startup_failures(): + """Deliberately force failure in startup of cli""" + runner = CliRunner() + + session_uuid = uuid.uuid4() + session_uuid_str = str(session_uuid) + + test_name = "cli_fail" + iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" + + # This will fail, bad hive account name + args = ["--livetest", "--hive-account", "_podping", "write", iri] + result = runner.invoke(app, args) + + assert result.exit_code == STARTUP_FAILED_INVALID_ACCOUNT + + args = ["--livetest", "--hive-posting-key", "not_a_valid_key", "write", iri] + result = runner.invoke(app, args) + + assert result.exit_code == STARTUP_FAILED_INVALID_POSTING_KEY_EXIT_CODE diff --git a/tests/integration/test_write_cli_multiple.py b/tests/integration/test_write_cli_multiple.py index d6f84e0..1e95ca7 100644 --- a/tests/integration/test_write_cli_multiple.py +++ b/tests/integration/test_write_cli_multiple.py @@ -19,9 +19,9 @@ @pytest.mark.asyncio -@pytest.mark.timeout(180) +@pytest.mark.timeout(600) @pytest.mark.slow -async def test_write_cli_multiple_url(): +async def test_write_cli_multiple(): runner = CliRunner() settings_manager = PodpingSettingsManager(ignore_updates=True) @@ -31,12 +31,12 @@ async def test_write_cli_multiple_url(): session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) - num_urls = randint(2, 25) + num_iris = randint(2, 25) test_name = "cli_multiple" python_version = pv() - test_urls = { + test_iris = { f"https://example.com?t={test_name}&i={i}&v={python_version}&s={session_uuid_str}" - for i in range(num_urls) + for i in range(num_iris) } default_hive_operation_id = HiveOperationId( @@ -44,25 +44,25 @@ async def test_write_cli_multiple_url(): ) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_url_from_blockchain(start_block: int): + async def get_iri_from_blockchain(start_block: int): event_listener = EventListener(client, "head", start_block=start_block) _on = sync_to_async(event_listener.on, thread_sensitive=False) async for post in _on( "custom_json", filter_by={"id": default_hive_operation_id_str} ): data = json.loads(post["op"][1]["json"]) - if "urls" in data: - for u in data["urls"]: - # Only look for URLs from current session - if u.endswith(session_uuid_str): - yield u + if "iris" in data: + for iri in data["iris"]: + # Only look for IRIs from current session + if iri.endswith(session_uuid_str): + yield iri args = [ "--livetest", "--no-sanity-check", "--ignore-config-updates", "write", - *test_urls, + *test_iris, ] current_block = client.get_dynamic_global_properties()["head_block_number"] @@ -77,12 +77,12 @@ async def get_url_from_blockchain(start_block: int): # Sleep to catch up because beem isn't async and blocks await asyncio.sleep(op_period * 30) - answer_urls = set() - async for stream_url in get_url_from_blockchain(current_block - 5): - answer_urls.add(stream_url) + answer_iris = set() + async for stream_iri in get_iri_from_blockchain(current_block - 5): + answer_iris.add(stream_iri) # If we're done, end early - if len(answer_urls) == len(test_urls): + if len(answer_iris) == len(test_iris): break - assert answer_urls == test_urls + assert answer_iris == test_iris diff --git a/tests/integration/test_write_cli_single.py b/tests/integration/test_write_cli_single.py index 1e37549..cd74526 100644 --- a/tests/integration/test_write_cli_single.py +++ b/tests/integration/test_write_cli_single.py @@ -18,9 +18,9 @@ @pytest.mark.asyncio -@pytest.mark.timeout(180) +@pytest.mark.timeout(600) @pytest.mark.slow -async def test_write_cli_single_url(): +async def test_write_cli_single(): runner = CliRunner() settings_manager = PodpingSettingsManager(ignore_updates=True) @@ -31,24 +31,24 @@ async def test_write_cli_single_url(): session_uuid_str = str(session_uuid) test_name = "cli_single" - url = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" + iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" default_hive_operation_id = HiveOperationId( LIVETEST_OPERATION_ID, Medium.podcast, Reason.update ) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_url_from_blockchain(start_block: int): + async def get_iri_from_blockchain(start_block: int): event_listener = EventListener(client, "head", start_block=start_block) _on = sync_to_async(event_listener.on, thread_sensitive=False) async for post in _on( "custom_json", filter_by={"id": default_hive_operation_id_str} ): data = json.loads(post["op"][1]["json"]) - if "urls" in data and len(data["urls"]) == 1: - yield data["urls"][0] + if "iris" in data and len(data["iris"]) == 1: + yield data["iris"][0] - args = ["--livetest", "--no-sanity-check", "--ignore-config-updates", "write", url] + args = ["--livetest", "--no-sanity-check", "--ignore-config-updates", "write", iri] current_block = client.get_dynamic_global_properties()["head_block_number"] @@ -62,11 +62,11 @@ async def get_url_from_blockchain(start_block: int): # Sleep to catch up because beem isn't async and blocks await asyncio.sleep(op_period * 25) - url_found = False + iri_found = False - async for stream_url in get_url_from_blockchain(current_block - 5): - if stream_url == url: - url_found = True + async for stream_iri in get_iri_from_blockchain(current_block - 5): + if stream_iri == iri: + iri_found = True break - assert url_found + assert iri_found diff --git a/tests/integration/test_write_cli_single_simulcast.py b/tests/integration/test_write_cli_single_simulcast.py index f5bac08..94d948a 100644 --- a/tests/integration/test_write_cli_single_simulcast.py +++ b/tests/integration/test_write_cli_single_simulcast.py @@ -19,10 +19,10 @@ @pytest.mark.asyncio -@pytest.mark.timeout(180) +@pytest.mark.timeout(900) @pytest.mark.slow async def test_write_cli_single_simulcast(): - """This test forces 11 separate posts to ensure we retry after exceeding the + """This test forces 6 separate posts to ensure we retry after exceeding the limit of posts per block (5)""" runner = CliRunner() start = timer() @@ -43,35 +43,36 @@ async def _run_cli_once(_app, _args): result = runner.invoke(_app, _args) return result - async def get_url_from_blockchain(start_block: int): + async def get_iri_from_blockchain(start_block: int): event_listener = EventListener(client, "head", start_block=start_block) _on = sync_to_async(event_listener.on, thread_sensitive=False) async for post in _on( "custom_json", filter_by={"id": default_hive_operation_id_str} ): data = json.loads(post["op"][1]["json"]) - if "urls" in data and len(data["urls"]) == 1: - u = data["urls"][0] - # Only look for URLs from current session - if u.endswith(session_uuid_str): - yield u + if "iris" in data and len(data["iris"]) == 1: + iri = data["iris"][0] + # Only look for IRIs from current session + if iri.endswith(session_uuid_str): + yield iri # Ensure hive env vars are set from .env.test file or this will fail python_version = pv() tasks = [] - test_urls = { - f"https://example.com?t=cli_simulcast_{n}&v={python_version}&s={session_uuid_str}" + test_iris = { + f"https://example.com?t=cli_simulcast_{n}" + f"&v={python_version}&s={session_uuid_str}" for n in range(6) } - for url in test_urls: + for iri in test_iris: args = [ "--livetest", "--no-sanity-check", "--ignore-config-updates", "--debug", "write", - url, + iri, ] tasks.append(_run_cli_once(app, args)) @@ -87,15 +88,15 @@ async def get_url_from_blockchain(start_block: int): # Sleep to catch up because beem isn't async and blocks await asyncio.sleep(op_period * 25) - answer_urls = set() - async for stream_url in get_url_from_blockchain(current_block - 5): - answer_urls.add(stream_url) + answer_iris = set() + async for stream_iri in get_iri_from_blockchain(current_block - 5): + answer_iris.add(stream_iri) # If we're done, end early - if len(answer_urls) == len(test_urls): + if len(answer_iris) == len(test_iris): break - assert answer_urls == test_urls + assert answer_iris == test_iris if __name__ == "__main__": diff --git a/tests/integration/test_write_zmq_multiple.py b/tests/integration/test_write_zmq_multiple.py index 6ca7ac1..45b64e4 100644 --- a/tests/integration/test_write_zmq_multiple.py +++ b/tests/integration/test_write_zmq_multiple.py @@ -21,9 +21,9 @@ @pytest.mark.asyncio -@pytest.mark.timeout(180) +@pytest.mark.timeout(600) @pytest.mark.slow -async def test_write_zmq_multiple_url(event_loop): +async def test_write_zmq_multiple(event_loop): settings_manager = PodpingSettingsManager(ignore_updates=True) client = Client() @@ -31,12 +31,12 @@ async def test_write_zmq_multiple_url(event_loop): session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) - num_urls = randint(2, 25) + num_iris = randint(2, 25) test_name = "zmq_multiple" python_version = pv() - test_urls = { + test_iris = { f"https://example.com?t={test_name}&i={i}&v={python_version}&s={session_uuid_str}" - for i in range(num_urls) + for i in range(num_iris) } default_hive_operation_id = HiveOperationId( @@ -44,18 +44,18 @@ async def test_write_zmq_multiple_url(event_loop): ) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_url_from_blockchain(start_block: int): + async def get_iri_from_blockchain(start_block: int): event_listener = EventListener(client, "head", start_block=start_block) _on = sync_to_async(event_listener.on, thread_sensitive=False) async for post in _on( "custom_json", filter_by={"id": default_hive_operation_id_str} ): data = json.loads(post["op"][1]["json"]) - if "urls" in data: - for u in data["urls"]: - # Only look for URLs from current session - if u.endswith(session_uuid_str): - yield u + if "iris" in data: + for iri in data["iris"]: + # Only look for IRIs from current session + if iri.endswith(session_uuid_str): + yield iri host = "127.0.0.1" port = 9979 @@ -77,27 +77,27 @@ async def get_url_from_blockchain(start_block: int): current_block = client.get_dynamic_global_properties()["head_block_number"] - for url in test_urls: - await socket.send_string(url) + for iri in test_iris: + await socket.send_string(iri) response = await socket.recv_string() assert response == "OK" # Sleep until all items in the queue are done processing - num_urls_processing = await podping_hivewriter.num_operations_in_queue() - while num_urls_processing > 0: + num_iris_processing = await podping_hivewriter.num_operations_in_queue() + while num_iris_processing > 0: await asyncio.sleep(op_period) - num_urls_processing = await podping_hivewriter.num_operations_in_queue() + num_iris_processing = await podping_hivewriter.num_operations_in_queue() # Sleep to catch up because lighthive isn't async and blocks await asyncio.sleep(op_period * 30) - answer_urls = set() - async for stream_url in get_url_from_blockchain(current_block - 5): - answer_urls.add(stream_url) + answer_iris = set() + async for stream_iri in get_iri_from_blockchain(current_block - 5): + answer_iris.add(stream_iri) # If we're done, end early - if len(answer_urls) == len(test_urls): + if len(answer_iris) == len(test_iris): break - assert answer_urls == test_urls + assert answer_iris == test_iris podping_hivewriter.close() diff --git a/tests/integration/test_write_zmq_single.py b/tests/integration/test_write_zmq_single.py index f3c18b9..2d30407 100644 --- a/tests/integration/test_write_zmq_single.py +++ b/tests/integration/test_write_zmq_single.py @@ -20,9 +20,9 @@ @pytest.mark.asyncio -@pytest.mark.timeout(180) +@pytest.mark.timeout(600) @pytest.mark.slow -async def test_write_zmq_single_url(event_loop): +async def test_write_zmq_single(event_loop): settings_manager = PodpingSettingsManager(ignore_updates=True) client = Client() @@ -31,22 +31,22 @@ async def test_write_zmq_single_url(event_loop): session_uuid_str = str(session_uuid) test_name = "zmq_single" - url = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" + iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" default_hive_operation_id = HiveOperationId( LIVETEST_OPERATION_ID, Medium.podcast, Reason.update ) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_url_from_blockchain(start_block: int): + async def get_iri_from_blockchain(start_block: int): event_listener = EventListener(client, "head", start_block=start_block) _on = sync_to_async(event_listener.on, thread_sensitive=False) async for post in _on( "custom_json", filter_by={"id": default_hive_operation_id_str} ): data = json.loads(post["op"][1]["json"]) - if "urls" in data and len(data["urls"]) == 1: - yield data["urls"][0] + if "iris" in data and len(data["iris"]) == 1: + yield data["iris"][0] host = "127.0.0.1" port = 9979 @@ -68,7 +68,7 @@ async def get_url_from_blockchain(start_block: int): current_block = client.get_dynamic_global_properties()["head_block_number"] - await socket.send_string(url) + await socket.send_string(iri) response = await socket.recv_string() assert response == "OK" @@ -76,12 +76,12 @@ async def get_url_from_blockchain(start_block: int): # Sleep to catch up because lighthive isn't async and blocks await asyncio.sleep(op_period * 25) - url_found = False + iri_found = False - async for stream_url in get_url_from_blockchain(current_block - 5): - if stream_url == url: - url_found = True + async for stream_iri in get_iri_from_blockchain(current_block - 5): + if stream_iri == iri: + iri_found = True break - assert url_found + assert iri_found podping_hivewriter.close()