diff --git a/.github/workflows/docker_publish.yml b/.github/workflows/docker_publish.yml index 6dafbb6..0ac7062 100644 --- a/.github/workflows/docker_publish.yml +++ b/.github/workflows/docker_publish.yml @@ -44,7 +44,6 @@ jobs: type=semver,pattern={{major}}.{{minor}} - name: Login to DockerHub - if: github.event_name != 'pull_request' uses: docker/login-action@v1 with: username: ${{ secrets.DOCKERHUB_USERNAME }} @@ -55,7 +54,7 @@ jobs: with: context: . platforms: linux/amd64 - push: ${{ github.event_name != 'pull_request' }} + push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} cache-from: type=gha diff --git a/.github/workflows/pypi_publish.yml b/.github/workflows/pypi_publish.yml index 8c545d2..0d87d48 100644 --- a/.github/workflows/pypi_publish.yml +++ b/.github/workflows/pypi_publish.yml @@ -9,26 +9,9 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Build and publish to pypi (3.10) - uses: JRubics/poetry-publish@v1.12 + - name: Build and publish to pypi + uses: JRubics/poetry-publish@v1.15 with: - python_version: "3.10.4" + python_version: "3.11.0" ignore_dev_requirements: "yes" pypi_token: ${{ secrets.PYPI_TOKEN }} - extra_build_dependency_packages: "capnproto libzmq3-dev" - - name: Build and publish to pypi (3.9) - uses: JRubics/poetry-publish@v1.12 - with: - python_version: "3.9.13" - build_format: wheel - ignore_dev_requirements: "yes" - pypi_token: ${{ secrets.PYPI_TOKEN }} - extra_build_dependency_packages: "capnproto libzmq3-dev" - - name: Build and publish to pypi (3.8) - uses: JRubics/poetry-publish@v1.12 - with: - python_version: "3.8.13" - build_format: wheel - ignore_dev_requirements: "yes" - pypi_token: ${{ secrets.PYPI_TOKEN }} - extra_build_dependency_packages: "capnproto libzmq3-dev" \ No newline at end of file diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 6e3180f..041c649 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -29,6 +29,30 @@ jobs: strategy: matrix: python-version: [3.8, 3.9, "3.10"] + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt install capnproto + python -m pip install --upgrade pip + pip install poetry + poetry config virtualenvs.create false + poetry install --no-interaction --no-ansi + - name: Test with pytest + env: + PODPING_HIVE_ACCOUNT: ${{ secrets.PODPING_HIVE_ACCOUNT }} + PODPING_HIVE_POSTING_KEY: ${{ secrets.PODPING_HIVE_POSTING_KEY }} + run: | + pytest + test-runslow: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [ "3.11" ] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} diff --git a/CLI.md b/CLI.md index 541911a..a4f9785 100644 --- a/CLI.md +++ b/CLI.md @@ -8,7 +8,7 @@ $ podping [OPTIONS] COMMAND [ARGS]... **Options**: -* `--medium TEXT`: The medium of the feed being updated. If used in combination with the 'server', this sets the default medium only. Must be one of the following: blog music audiobook newsletter video film podcast [env var: PODPING_MEDIUM; default: podcast] +* `--medium TEXT`: The medium of the feed being updated. If used in combination with the 'server', this sets the default medium only. Must be one of the following: mixed podcast podcastL music musicL video videoL film filmL audiobook audiobookL newsletter newsletterL blog blogL [env var: PODPING_MEDIUM; default: podcast] * `--reason TEXT`: The reason the feed is being updated. If used in combination with the 'server', this sets the default reason only. Must be one of the following: update live liveEnd [env var: PODPING_REASON; default: update] * `--hive-account TEXT`: Hive account used to post [env var: PODPING_HIVE_ACCOUNT, HIVE_ACCOUNT, HIVE_SERVER_ACCOUNT; required] * `--hive-posting-key TEXT`: Hive account used to post [env var: PODPING_HIVE_POSTING_KEY, HIVE_POSTING_KEY; required] diff --git a/Dockerfile b/Dockerfile index f51fb92..5140a68 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,33 +1,4 @@ -FROM docker.io/python:3.10-bullseye AS compile - -ENV PYTHONFAULTHANDLER=1 \ - PYTHONHASHSEED=random \ - PYTHONDONTWRITEBYTECODE=1 \ - PYTHONUNBUFFERED=1 \ - PIP_DEFAULT_TIMEOUT=100 \ - PIP_DISABLE_PIP_VERSION_CHECK=1 \ - PIP_NO_CACHE_DIR=1 \ - PATH="/home/podping/.local/bin:${PATH}" - -RUN useradd --create-home podping && mkdir /home/podping/app && chown -R podping:podping /home/podping - -RUN apt-get update \ - && apt-get -y upgrade \ - # rustc, cargo for armhf "cryptography" - # libzmq3-dev for armhf "pyzmq" - && apt-get -y install --no-install-recommends capnproto cargo libzmq3-dev rustc build-essential libssl-dev libffi-dev - -USER podping -WORKDIR /home/podping/app - -COPY pyproject.toml poetry.lock ./ - -RUN pip install --upgrade pip \ - && pip install --user poetry \ - && poetry config virtualenvs.in-project true \ - && poetry install --no-root --only main --no-interaction --no-ansi - -FROM docker.io/python:3.10-slim-bullseye AS app +FROM docker.io/python:3.11-slim-bullseye ENV PYTHONFAULTHANDLER=1 \ PYTHONHASHSEED=random \ @@ -37,23 +8,20 @@ ENV PYTHONFAULTHANDLER=1 \ PIP_DISABLE_PIP_VERSION_CHECK=1 \ PIP_NO_CACHE_DIR=1 -RUN useradd --create-home podping && mkdir /home/podping/app && chown -R podping:podping /home/podping +RUN useradd --create-home podping && mkdir /home/podping/app && mkdir /home/podping/.config && chown -R podping:podping /home/podping COPY install-packages.sh . RUN ./install-packages.sh -COPY --from=compile --chown=podping:podping /home/podping/.local /home/podping/.local -COPY --from=compile --chown=podping:podping /home/podping/app/.venv /home/podping/app/.venv USER podping WORKDIR /home/podping/app # poetry command installs here from pip ENV PATH="/home/podping/.local/bin:${PATH}" COPY --chown=podping:podping . . -RUN pip install --upgrade pip \ - && pip install poetry \ +RUN pip install --user poetry \ && poetry config virtualenvs.in-project true \ - && poetry install --only main --no-interaction --no-ansi + && poetry install --no-interaction --no-ansi --only main # podping command installs here ENV PATH="/home/podping/app/.venv/bin:${PATH}" diff --git a/build.py b/build.py deleted file mode 100644 index 7a07f82..0000000 --- a/build.py +++ /dev/null @@ -1,33 +0,0 @@ -from capnpy.compiler.distutils import capnpy_schemas - - -class SetupKwargsProxy: - def __init__(self, d): - self._d = d - - @property - def capnpy_options(self): - return { - "convert_case": False, # do NOT convert camelCase to camel_case - } - - @property - def ext_modules(self): - try: - return self._d["ext_modules"] - except KeyError: - return None - - @ext_modules.setter - def ext_modules(self, v): - self._d["ext_modules"] = v - - -schema_files = [ - "src/podping_hivewriter/schema/medium.capnp", - "src/podping_hivewriter/schema/reason.capnp", -] - - -def build(setup_kwargs): - capnpy_schemas(SetupKwargsProxy(setup_kwargs), "capnpy_schemas", schema_files) diff --git a/examples/memory_profile/long_running_zmq.py b/examples/memory_profile/long_running_zmq.py index 7d6e31f..ef71b3a 100644 --- a/examples/memory_profile/long_running_zmq.py +++ b/examples/memory_profile/long_running_zmq.py @@ -2,24 +2,36 @@ import linecache import logging import os +import random +import uuid +from ipaddress import IPv4Address + +from plexo.ganglion.tcp_pair import GanglionZmqTcpPair +from plexo.plexus import Plexus +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping_medium import PodpingMedium +from podping_schemas.org.podcastindex.podping.podping_reason import PodpingReason +from podping_schemas.org.podcastindex.podping.podping_write import PodpingWrite + +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.reason import reasons +from podping_hivewriter.neuron import ( + podping_hive_transaction_neuron, + podping_write_neuron, +) try: import tracemalloc except ModuleNotFoundError: tracemalloc = False -import uuid -from random import randint -from platform import python_version as pv, python_implementation as pi from timeit import default_timer as timer -import zmq -import zmq.asyncio - -from podping_hivewriter.constants import LIVETEST_OPERATION_ID -from podping_hivewriter.models.medium import Medium -from podping_hivewriter.models.reason import Reason -from podping_hivewriter.podping_hivewriter import PodpingHivewriter -from podping_hivewriter.podping_settings_manager import PodpingSettingsManager +host = "127.0.0.1" +port = 9979 +metrics = {"iris_sent": 0, "ops_received": 0, "iris_received": 0, "txs_received": 0} +txs_received_lock = asyncio.Lock() def display_top(snapshot, key_type="lineno", limit=3): @@ -51,34 +63,65 @@ def display_top(snapshot, key_type="lineno", limit=3): logging.info("Total allocated size: %.1f KiB" % (total / 1024)) -async def endless_send_loop(event_loop): - context = zmq.asyncio.Context() - socket = context.socket(zmq.REQ, io_loop=event_loop) - socket.connect(f"tcp://{host}:{port}") +async def podping_hive_transaction_reaction(transaction: PodpingHiveTransaction, _, _2): + num_iris = sum(len(podping.iris) for podping in transaction.podpings) + + async with txs_received_lock: + metrics["ops_received"] = metrics["ops_received"] + len(transaction.podpings) + metrics["iris_received"] = metrics["iris_received"] + num_iris + metrics["txs_received"] = metrics["txs_received"] + 1 + + +async def endless_send_loop(): + tcp_pair_ganglion = GanglionZmqTcpPair( + peer=(IPv4Address(host), port), + relevant_neurons=( + podping_hive_transaction_neuron, + podping_write_neuron, + ), + ) + plexus = Plexus(ganglia=(tcp_pair_ganglion,)) + await plexus.adapt( + podping_hive_transaction_neuron, + reactants=(podping_hive_transaction_reaction,), + ) + await plexus.adapt(podping_write_neuron) - test_name = "long_running_zmq" - python_version = pv() - python_implementation = pi() start_time = timer() + diag_time = timer() while True: + loop_start = timer() session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) - num_iris = randint(1, 10) + for i in range(1000): + iri = f"https://example.com?t=agates_test&i={i}&s={session_uuid_str}" + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] + podping_write = PodpingWrite(medium=medium, reason=reason, iri=iri) - for i in range(num_iris): - await socket.send_string( - f"https://example.com?t={test_name}&i={i}&v={python_version}&pi={python_implementation}&s={session_uuid_str}" - ) - response = await socket.recv_string() - assert response == "OK" + await plexus.transmit(podping_write) + + metrics["iris_sent"] = metrics["iris_sent"] + 1000 - if tracemalloc and (timer() - start_time) >= 60: + await asyncio.sleep(3 - (timer() - loop_start)) + if tracemalloc and (timer() - diag_time) >= 60: snapshot = tracemalloc.take_snapshot() display_top(snapshot) - start_time = timer() - await asyncio.sleep(3) + diag_time = timer() + logging.info( + f"IRIs sent: {metrics['iris_sent']} - {metrics['iris_sent'] / (diag_time - start_time)}s" + ) + logging.info( + f"TXs received: {metrics['txs_received']} - {metrics['txs_received'] / (diag_time - start_time)}s" + ) + logging.info( + f"OPs received: {metrics['ops_received']} - {metrics['ops_received'] / (diag_time - start_time)}s" + ) + logging.info( + f"IRIs received: {metrics['iris_received']} - {metrics['iris_received'] / (diag_time - start_time)}s" + ) if __name__ == "__main__": @@ -86,22 +129,5 @@ async def endless_send_loop(event_loop): tracemalloc.start() loop = asyncio.get_event_loop() logging.getLogger().setLevel(level=logging.INFO) - settings_manager = PodpingSettingsManager() - - host = "127.0.0.1" - port = 9979 - podping_hivewriter = PodpingHivewriter( - os.environ["PODPING_HIVE_ACCOUNT"], - [os.environ["PODPING_HIVE_POSTING_KEY"]], - settings_manager, - medium=Medium.podcast, - reason=Reason.update, - listen_ip=host, - listen_port=port, - resource_test=True, - operation_id=LIVETEST_OPERATION_ID, - ) - loop.run_until_complete(podping_hivewriter.wait_startup()) - loop.run_until_complete(endless_send_loop(loop)) - podping_hivewriter.close() + loop.run_until_complete(endless_send_loop()) diff --git a/install-packages.sh b/install-packages.sh index b6f2460..4098fa1 100755 --- a/install-packages.sh +++ b/install-packages.sh @@ -17,7 +17,7 @@ apt-get update apt-get -y upgrade # Install application dependencies -apt-get -y install --no-install-recommends capnproto libffi7 libssl1.1 libzmq5 zlib1g gcc libstdc++-10-dev +apt-get -y install --no-install-recommends capnproto libffi7 zlib1g # Delete cached files we don't need anymore (note that if you're # using official Docker images for Debian or Ubuntu, this happens diff --git a/poetry.lock b/poetry.lock index f155c7b..e83d5a9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -30,7 +30,7 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "attrs" version = "22.1.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" @@ -102,22 +102,22 @@ optional = false python-versions = "~=3.7" [[package]] -name = "capnpy" -version = "0.9.0" +name = "capnpy-agates" +version = "0.9.2rc1" description = "" category = "main" optional = false python-versions = "*" [package.dependencies] -cython = ">=0.25" +cython = ">=0.29.30" docopt = "*" pypytools = ">=0.3.3" six = "*" [[package]] name = "certifi" -version = "2022.9.24" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -220,7 +220,7 @@ gmpy2 = ["gmpy2"] [[package]] name = "exceptiongroup" -version = "1.0.0" +version = "1.0.4" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false @@ -244,11 +244,11 @@ pyflakes = ">=2.5.0,<2.6.0" [[package]] name = "gitdb" -version = "4.0.9" +version = "4.0.10" description = "Git Object Database" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] smmap = ">=3.0.1,<6" @@ -266,24 +266,24 @@ gitdb = ">=4.0.1,<5" [[package]] name = "h11" -version = "0.12.0" +version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "httpcore" -version = "0.15.0" +version = "0.16.2" description = "A minimal low-level HTTP client." category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -anyio = ">=3.0.0,<4.0.0" +anyio = ">=3.0,<5.0" certifi = "*" -h11 = ">=0.11,<0.13" +h11 = ">=0.13,<0.15" sniffio = ">=1.0.0,<2.0.0" [package.extras] @@ -292,7 +292,7 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "httpx" -version = "0.23.0" +version = "0.23.1" description = "The next generation HTTP client." category = "main" optional = false @@ -300,7 +300,7 @@ python-versions = ">=3.7" [package.dependencies] certifi = "*" -httpcore = ">=0.15.0,<0.16.0" +httpcore = ">=0.15.0,<0.17.0" rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} sniffio = "*" @@ -318,6 +318,45 @@ category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "importlib-metadata" +version = "5.1.0" +description = "Read metadata from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + +[[package]] +name = "importlib-resources" +version = "5.10.1" +description = "Read resources from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[[package]] +name = "inflection" +version = "0.5.1" +description = "A port of Ruby on Rails inflector to Python" +category = "main" +optional = false +python-versions = ">=3.5" + [[package]] name = "iniconfig" version = "1.1.1" @@ -340,6 +379,24 @@ pipfile-deprecated-finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] +[[package]] +name = "jsonschema" +version = "4.17.3" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=17.4.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + [[package]] name = "lighthive" version = "0.4.0" @@ -359,6 +416,20 @@ requests = "*" [package.extras] dev = ["requests_mock"] +[[package]] +name = "markdown" +version = "3.4.1" +description = "Python implementation of Markdown." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +testing = ["coverage", "pyyaml"] + [[package]] name = "mccabe" version = "0.7.0" @@ -395,14 +466,11 @@ python-versions = "*" [[package]] name = "packaging" -version = "21.3" +version = "22.0" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" [[package]] name = "patch-env" @@ -417,7 +485,7 @@ dev = ["check-manifest"] [[package]] name = "pathspec" -version = "0.10.1" +version = "0.10.2" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false @@ -431,17 +499,41 @@ category = "dev" optional = false python-versions = ">=2.6" +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "2.6.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] +test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] + +[[package]] +name = "plexo" +version = "1.0.0a8" +description = "Opinionated, reactive, schema-driven, distributed message passing" +category = "main" +optional = false +python-versions = ">=3.8,<3.12" + +[package.dependencies] +capnpy-agates = ">=0.9.0,<0.10.0" +pyrsistent = ">=0.19.0,<0.20.0" +python-jsonschema-objects = ">=0.4.1,<0.5.0" +pyzmq = ">=24.0.0,<25.0.0" +returns = ">=0.19.0,<0.20.0" +typing_extensions = ">=4.0,<5.0" [[package]] name = "pluggy" @@ -455,6 +547,17 @@ python-versions = ">=3.6" dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "podping-schemas" +version = "0.1.0a17" +description = "" +category = "main" +optional = false +python-versions = ">=3.8,<3.12" + +[package.dependencies] +capnpy-agates = ">=0.9.0,<0.10.0" + [[package]] name = "py" version = "1.11.0" @@ -502,17 +605,6 @@ category = "dev" optional = false python-versions = ">=3.6" -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "dev" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - [[package]] name = "pypytools" version = "0.6.2" @@ -524,6 +616,14 @@ python-versions = "*" [package.dependencies] py = "*" +[[package]] +name = "pyrsistent" +version = "0.19.2" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "pytest" version = "7.2.0" @@ -546,7 +646,7 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2. [[package]] name = "pytest-asyncio" -version = "0.20.1" +version = "0.20.3" description = "Pytest support for asyncio" category = "dev" optional = false @@ -556,6 +656,7 @@ python-versions = ">=3.7" pytest = ">=6.1.0" [package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] [[package]] @@ -609,6 +710,20 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" [package.dependencies] six = ">=1.5" +[[package]] +name = "python-jsonschema-objects" +version = "0.4.1" +description = "An object wrapper for JSON Schema definitions" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +inflection = ">=0.2" +jsonschema = ">=2.3" +Markdown = ">=2.4" +six = ">=1.5.2" + [[package]] name = "pytz" version = "2022.6" @@ -655,6 +770,17 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "returns" +version = "0.19.0" +description = "Make your functions return something meaningful, typed, and safe!" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +typing-extensions = ">=4.0,<5.0" + [[package]] name = "rfc3986" version = "1.5.0" @@ -719,7 +845,7 @@ python-versions = ">=3.7" [[package]] name = "stevedore" -version = "4.1.0" +version = "4.1.1" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -778,24 +904,33 @@ python-versions = ">=3.7" [[package]] name = "urllib3" -version = "1.26.12" +version = "1.26.13" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -[extras] -server = ["pyzmq"] +[[package]] +name = "zipp" +version = "3.11.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" -python-versions = "^3.8" -content-hash = "fb0f9e8cb9dc90f6c9ad3e01b4e261e03384d2f58c1fd0cf8a4abe64aeb7211d" +python-versions = "^3.8,<3.12" +content-hash = "597c949c033eca416d694e033721534836120623738c9b3fb81d504b629e84a3" [metadata.files] anyio = [ @@ -826,17 +961,16 @@ cachetools = [ {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, ] -capnpy = [ - {file = "capnpy-0.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cecae85a51930a46a0caa2a986bf28965ae54a1123b304ec7793ce0241fe7e21"}, - {file = "capnpy-0.9.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ed997a9df9509abdd259123d51d72b38b61ebb55688c364907eb93d4bc66aaa6"}, - {file = "capnpy-0.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:15c54d2ef636a060f0fcd343be4f3c49a9d4ff6eb6a4bcc90e5ff416fe64907a"}, - {file = "capnpy-0.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6b6634b0b48a15a3788888b6e5f86f738e15b397f5b429a5452d3eeaca56e64f"}, - {file = "capnpy-0.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fbddb466fe1b54871acdf9ec596d78be01b4ae6c85d7d83c494a912bc9f61f65"}, - {file = "capnpy-0.9.0.tar.gz", hash = "sha256:127b089a2df2c7b9da6f4b63574626a92c63a0fd01c26dc018de6de617297dc7"}, +capnpy-agates = [ + {file = "capnpy_agates-0.9.2rc1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92e6033dacc53a545ad584d49bff3da9b47ef0c819d11caa976f97120ae13016"}, + {file = "capnpy_agates-0.9.2rc1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a8cb7c6990276fbf0d08e890a411bb578660ad8825b4aae6529d92e303f608d"}, + {file = "capnpy_agates-0.9.2rc1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf316155c58959f52475e9b2ff26cd38fb026c88117f9beb37f310f696bfedba"}, + {file = "capnpy_agates-0.9.2rc1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6797afc06032cd5969f5aa39597bc0a7d486274a0a157913aa0df2527139a334"}, + {file = "capnpy_agates-0.9.2rc1-py3-none-any.whl", hash = "sha256:c67bcbc615a6dd9fef546083414aa76ed1515657ac0a593a898daaea116dc2a2"}, ] certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] cffi = [ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, @@ -1022,37 +1156,49 @@ ecdsa = [ {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.0-py3-none-any.whl", hash = "sha256:2ac84b496be68464a2da60da518af3785fff8b7ec0d090a581604bc870bdee41"}, - {file = "exceptiongroup-1.0.0.tar.gz", hash = "sha256:affbabf13fb6e98988c38d9c5650e701569fe3c1de3233cfb61c5f33774690ad"}, + {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, + {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, ] flake8 = [ {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, ] gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, ] gitpython = [ {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, ] h11 = [ - {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, - {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] httpcore = [ - {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"}, - {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"}, + {file = "httpcore-0.16.2-py3-none-any.whl", hash = "sha256:52c79095197178856724541e845f2db86d5f1527640d9254b5b8f6f6cebfdee6"}, + {file = "httpcore-0.16.2.tar.gz", hash = "sha256:c35c5176dc82db732acfd90b581a3062c999a72305df30c0fc8fafd8e4aca068"}, ] httpx = [ - {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"}, - {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, + {file = "httpx-0.23.1-py3-none-any.whl", hash = "sha256:0b9b1f0ee18b9978d637b0776bfd7f54e2ca278e063e3586d8f01cda89e042a8"}, + {file = "httpx-0.23.1.tar.gz", hash = "sha256:202ae15319be24efe9a8bd4ed4360e68fde7b38bcc2ce87088d416f026667d19"}, ] idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +importlib-metadata = [ + {file = "importlib_metadata-5.1.0-py3-none-any.whl", hash = "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313"}, + {file = "importlib_metadata-5.1.0.tar.gz", hash = "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b"}, +] +importlib-resources = [ + {file = "importlib_resources-5.10.1-py3-none-any.whl", hash = "sha256:c09b067d82e72c66f4f8eb12332f5efbebc9b007c0b6c40818108c9870adc363"}, + {file = "importlib_resources-5.10.1.tar.gz", hash = "sha256:32bb095bda29741f6ef0e5278c42df98d135391bee5f932841efc0041f748dc3"}, +] +inflection = [ + {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, + {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, +] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -1061,9 +1207,17 @@ isort = [ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] +jsonschema = [ + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, +] lighthive = [ {file = "lighthive-0.4.0.tar.gz", hash = "sha256:2043a0728605459292fe13c526c32eaa85860e56338277193cb6979dc5cc7d93"}, ] +markdown = [ + {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"}, + {file = "Markdown-3.4.1.tar.gz", hash = "sha256:3b809086bb6efad416156e00a0da66fe47618a5d6918dd688f53f40c8e4cfeff"}, +] mccabe = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -1099,29 +1253,49 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, + {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, ] patch-env = [ {file = "patch_env-1.0.0-py2.py3-none-any.whl", hash = "sha256:f12d54f8451f1347d6040b3167d29e1c60cdbc56dcf5282c28cc7e24f76393b7"}, {file = "patch_env-1.0.0.tar.gz", hash = "sha256:7c9bf612a3ee78eb9f0f7ab2d1c62a85626987ef8c2441c5fb053005f199d4d4"}, ] pathspec = [ - {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, - {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, + {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, + {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, ] pbr = [ {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, ] +pkgutil-resolve-name = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, + {file = "platformdirs-2.6.0-py3-none-any.whl", hash = "sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca"}, + {file = "platformdirs-2.6.0.tar.gz", hash = "sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e"}, +] +plexo = [ + {file = "plexo-1.0.0a8-cp310-cp310-manylinux_2_17_x86_64.whl", hash = "sha256:b36ff05bd7f9a93017e52d70a8481aecc80ddb90a6c7bc8a47767e3d9ffd403d"}, + {file = "plexo-1.0.0a8-cp311-cp311-manylinux_2_17_x86_64.whl", hash = "sha256:f8abbac74d91ca3e53f8ddecb1abd6ade6464ec1d7abfefa3234da83f0bd32f3"}, + {file = "plexo-1.0.0a8-cp38-cp38-manylinux_2_17_x86_64.whl", hash = "sha256:0dbfee97fc71571e89da2928cee9780e29bdfe8bba1068e76a7274fc78aaa010"}, + {file = "plexo-1.0.0a8-cp39-cp39-manylinux_2_17_x86_64.whl", hash = "sha256:c32f4d4d65d2d609c2b752caab9e920526c5d226d9aeb14cb2e17ac08f62adf2"}, + {file = "plexo-1.0.0a8-pp39-pypy39_pp73-manylinux_2_17_x86_64.whl", hash = "sha256:45cdbc3725eb83f58b1901c363157345fa844d0cad94bcb07e8ff41b97c08f6c"}, + {file = "plexo-1.0.0a8.tar.gz", hash = "sha256:31206e80fac2cf29ecf9ab47aa46da2a33603bdb879463f5f6f2745375600fe7"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] +podping-schemas = [ + {file = "podping_schemas-0.1.0a17-cp310-cp310-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d781c461702aa9c94eb87395f6fa08996294a2e7030fe5f35e598260a87801c4"}, + {file = "podping_schemas-0.1.0a17-cp311-cp311-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b606f249a03804debe82a1070006065ac9715e4a37b49a9074719afc15b5062"}, + {file = "podping_schemas-0.1.0a17-cp38-cp38-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82fa9f6fc7c163beb8db4802d4272e2ccc14eb33ac825e7c42ee4700c22e78fe"}, + {file = "podping_schemas-0.1.0a17-cp39-cp39-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73bd9f49a42f425370ada0a80e3115b778ee8d8b93a5c453a9860480df10fd89"}, + {file = "podping_schemas-0.1.0a17-py3-none-any.whl", hash = "sha256:869152422c31f8e97f1dd295dd577f27d185e4e17a0b33371530665ae0c9eb61"}, + {file = "podping_schemas-0.1.0a17.tar.gz", hash = "sha256:875ce2926b26514224a9fbf0255f4bc699cf1a307cb9ae20fb9e575d79940eeb"}, +] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -1176,20 +1350,40 @@ pyflakes = [ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, ] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] pypytools = [ {file = "pypytools-0.6.2.tar.gz", hash = "sha256:a140c053e4d1c0bae835f40662eb0040a75190771acf2b2a8832dfa7beefe529"}, ] +pyrsistent = [ + {file = "pyrsistent-0.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a"}, + {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a"}, + {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed"}, + {file = "pyrsistent-0.19.2-cp310-cp310-win32.whl", hash = "sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41"}, + {file = "pyrsistent-0.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425"}, + {file = "pyrsistent-0.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712"}, + {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78"}, + {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6"}, + {file = "pyrsistent-0.19.2-cp37-cp37m-win32.whl", hash = "sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2"}, + {file = "pyrsistent-0.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73"}, + {file = "pyrsistent-0.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308"}, + {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584"}, + {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb"}, + {file = "pyrsistent-0.19.2-cp38-cp38-win32.whl", hash = "sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a"}, + {file = "pyrsistent-0.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab"}, + {file = "pyrsistent-0.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770"}, + {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95"}, + {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e"}, + {file = "pyrsistent-0.19.2-cp39-cp39-win32.whl", hash = "sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b"}, + {file = "pyrsistent-0.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291"}, + {file = "pyrsistent-0.19.2-py3-none-any.whl", hash = "sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0"}, + {file = "pyrsistent-0.19.2.tar.gz", hash = "sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2"}, +] pytest = [ {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, ] pytest-asyncio = [ - {file = "pytest-asyncio-0.20.1.tar.gz", hash = "sha256:626699de2a747611f3eeb64168b3575f70439b06c3d0206e6ceaeeb956e65519"}, - {file = "pytest_asyncio-0.20.1-py3-none-any.whl", hash = "sha256:2c85a835df33fda40fe3973b451e0c194ca11bc2c007eabff90bb3d156fc172b"}, + {file = "pytest-asyncio-0.20.3.tar.gz", hash = "sha256:83cbf01169ce3e8eb71c6c278ccb0574d1a7a3bb8eaaf5e50e0ad342afb33b36"}, + {file = "pytest_asyncio-0.20.3-py3-none-any.whl", hash = "sha256:f129998b209d04fcc65c96fc85c11e5316738358909a8399e93be553d7656442"}, ] pytest-cov = [ {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, @@ -1207,6 +1401,10 @@ python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] +python-jsonschema-objects = [ + {file = "python_jsonschema_objects-0.4.1-py2.py3-none-any.whl", hash = "sha256:c070cc21083054c9d33905fe1be6fe8b032ff277e0518a29bbee3bf56696033c"}, + {file = "python_jsonschema_objects-0.4.1.tar.gz", hash = "sha256:1d01286ad6f39c6e2b9df4f61a4260e65a7c61cce8394ee03412ecc0d47ef623"}, +] pytz = [ {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, @@ -1333,6 +1531,10 @@ requests = [ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] +returns = [ + {file = "returns-0.19.0-py3-none-any.whl", hash = "sha256:ae3ce9e5165d1218905291b4d5881b4e8a86ca478437bef3b0af1de8df57ec69"}, + {file = "returns-0.19.0.tar.gz", hash = "sha256:4544bb67849c1ef1bbf7823759d433a773959e5b77a8fd06d01fef6d060f2ac5"}, +] rfc3986 = [ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, @@ -1362,8 +1564,8 @@ sniffio = [ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] stevedore = [ - {file = "stevedore-4.1.0-py3-none-any.whl", hash = "sha256:3b1cbd592a87315f000d05164941ee5e164899f8fc0ce9a00bb0f321f40ef93e"}, - {file = "stevedore-4.1.0.tar.gz", hash = "sha256:02518a8f0d6d29be8a445b7f2ac63753ff29e8f2a2faa01777568d5500d777a6"}, + {file = "stevedore-4.1.1-py3-none-any.whl", hash = "sha256:aa6436565c069b2946fe4ebff07f5041e0c8bf18c7376dd29edf80cf7d524e4e"}, + {file = "stevedore-4.1.1.tar.gz", hash = "sha256:7f8aeb6e3f90f96832c301bff21a7eb5eefbe894c88c506483d355565d88cc1a"}, ] tomli = [ {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, @@ -1382,6 +1584,10 @@ typing-extensions = [ {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, ] urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, + {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, + {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, +] +zipp = [ + {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, + {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, ] diff --git a/pyproject.toml b/pyproject.toml index 16fb356..74bec8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "podping-hivewriter" -version = "1.2.10" +version = "2.0.0" license = "MIT" authors = ["Alecks Gates ", "Brian of London "] maintainers = ["Alecks Gates ", "Brian of London "] @@ -17,22 +17,22 @@ classifiers = [ "Programming Language :: Python :: Implementation :: CPython", "Topic :: Software Development :: Libraries :: Python Modules" ] -build = "build.py" -include = ["src/podping_hivewriter/schema/*.py", "src/podping_hivewriter/schema/*.so"] [tool.poetry.dependencies] -python = "^3.8" -pyzmq = "^24.0.1" +python = "^3.8,<3.12" cffi = "^1.14.5" pydantic = "^1.9.0" single-source = "^0.3.0" rfc3987 = "^1.3.8" asgiref = "^3.5" typer = {extras = ["all"], version = "^0.3.2"} -capnpy = "^0.9.0" +capnpy-agates = { version = "^0.9.0", allow-prereleases = true } lighthive = "^0.4.0" +plexo = {version = "1.0.0a8", allow-prereleases = true} +podping-schemas = {version = "^0.1.0a17", allow-prereleases = true} -[tool.poetry.dev-dependencies] + +[tool.poetry.group.dev.dependencies] black = "^21.5b2" isort = "^5.8.0" pytest = "^7.2.0" @@ -46,19 +46,16 @@ mypy = "^0.982" patch-env = "^1.0.0" pytest-mock = "^3.8.2" -[tool.poetry.extras] -server = ["pyzmq"] - [tool.poetry.scripts] podping = "podping_hivewriter.cli.podping:app" [build-system] -requires = ["setuptools", "poetry-core>=1.0.0", "capnpy"] +requires = ["setuptools", "poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.black] -target-version = ['py38', 'py39', 'py310'] +target-version = ['py38', 'py39', 'py310', 'py311'] [tool.isort] profile = "black" diff --git a/setup.py b/setup.py deleted file mode 100644 index 79254bf..0000000 --- a/setup.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -from setuptools import setup - -package_dir = {"": "src"} - -packages = [ - "podping_hivewriter", - "podping_hivewriter.cli", - "podping_hivewriter.models", - "podping_hivewriter.schema", -] - -package_data = {"": ["*"]} - -install_requires = [ - "asgiref>=3.5,<4.0", - "capnpy>=0.9.0,<0.10.0", - "cffi>=1.14.5,<2.0.0", - "lighthive>=0.4.0,<0.5.0", - "pydantic>=1.9.0,<2.0.0", - "rfc3987>=1.3.8,<2.0.0", - "single-source>=0.3.0,<0.4.0", - "typer[all]>=0.3.2,<0.4.0", -] - -extras_require = {"server": ["pyzmq>=24.0.1,<25.0.0"]} - -entry_points = {"console_scripts": ["podping = podping_hivewriter.cli.podping:app"]} - -setup_kwargs = { - "name": "podping-hivewriter", - "version": "1.2.10", - "description": "This is a tool used to submit RFC 3987-compliant International Resource Identifiers as a Podping notification on the Hive blockchain.", - "long_description": "# podping-hivewriter\nThe Hive writer component of Podping. You will need a Hive account, see section [Hive account and Authorization](#hive-account) below.\n\n## What is Podping?\n\nPodping is a mechanism of using decentralized communication to relay notification of updates of RSS feeds that use The Podcast Namespace. It does so by supplying minimum relevant metadata to consumers to be able to make efficient and actionable decisions, allowing them to decide what to do with given RSS feeds without parsing them ahead of time.\n\n*This* project provides a standardized way of posting a \"podping\" specifically to the Hive blockcahin.\n\n## Running podping-hivewriter\n\nThe project has two modes of running: `write` mode and `server` mode.\n\n`write` mode is primarily useful for people with a very small number of feeds to publish updates for relatively infrequently (i.e. a few times a day or less).\n\n`server` mode is for hosts (or other services like the Podcast Index's [podping.cloud](https://podping.cloud/)) who publish updates for a significant amount of feeds on a regular basis. Not that the average small-time podcast can't run it, but it's overkill. This mode is for efficiency only, as the `server` will batch process feeds as they come in to make the most use of the Hive blockchain.\n\nSee the dedicated [CLI docs](CLI.md) for more information on configuration options, including environment variables.\n\n### Container\n\nThe container images are hosted on [Docker Hub](https://hub.docker.com/r/podcastindexorg/podping-hivewriter). Images are currently based on Debian bullseye-based PyPy 3.8 with the following architectures: `amd64`\n\nThese images can be run in either `write` or `server` mode and is likely the easiest option for users who do not have experience installing Python packages.\n\n#### Command Line\n\nRunning in `write` mode with command line options, like `--dry-run` for example, add them with the full podping command.\nSettings can also be passed with the `-e` option for Docker. Note, we leave out `-p 9999:9999` here because we're not running the server.\n\n```shell\ndocker run --rm \\\n -e PODPING_HIVE_ACCOUNT= \\\n -e PODPING_HIVE_POSTING_KEY= \\\n docker.io/podcastindexorg/podping-hivewriter \\\n --dry-run write https://www.example.com/feed.xml\n```\n\nRun in `server` mode, passing local port 9999 to port 9999 in the container.\nENV variables can be passed to docker with `--env-file` option after modifying the `.env.EXAMPLE` file and renaming it to `.env`\n\n```shell\ndocker run --rm -p 9999:9999 --env-file .env --name podping docker.io/podcastindexorg/podping-hivewriter\n```\n\nAs another example for running in `server` mode, to run in *detached* mode, note the `-d` in the `docker run` options. Also note that `write` or `server` must come *after* the command line options for `podping`:\n```shell\ndocker run --rm -d \\\n -p 9999:9999 --env-file .env \\\n --name podping \\\n docker.io/podcastindexorg/podping-hivewriter \\\n --livetest server\n```\n\nOne running you can view and follow the live output with:\n```shell\ndocker logs podping -f\n```\n\nSee the [CLI docs](https://github.com/Podcastindex-org/podping-hivewriter/blob/main/CLI.md) for default values.\n\n\n#### docker-compose\n\n```yaml\nversion: '2.0'\nservices:\n podping-hivewriter:\n image: docker.io/podcastindexorg/podping-hivewriter\n restart: always\n ports:\n - \"9999:9999\"\n environment:\n - PODPING_HIVE_ACCOUNT=\n - PODPING_HIVE_POSTING_KEY=\n - PODPING_LISTEN_IP=0.0.0.0\n - PODPING_LISTEN_PORT=9999\n - PODPING_LIVETEST=false\n - PODPING_DRY_RUN=false\n - PODPING_STATUS=true\n - PODPING_IGNORE_CONFIG_UPDATES=false\n - PODPING_I_KNOW_WHAT_IM_DOING=false\n - PODPING_DEBUG=false\n```\n\nAssuming you just copy-pasted without reading, the above will fail at first. As noted in the [server command documentation](https://github.com/Podcastindex-org/podping-hivewriter/blob/main/CLI.md#podping-server):\n\n>WARNING: DO NOT run this on a publicly accessible host. There currently is NO authentication required to submit to the server. Set to * or 0.0.0.0 for all interfaces.\n\nAs all Docker installations vary, we set `0.0.0.0` as the listen IP for connectivity. This doesn't affect the IP address docker listens on when we tell it to pass port `9999` through to the container. If you understand the consequences of this, set `PODPING_I_KNOW_WHAT_IM_DOING` to `true`.\n\nThis is a temporary measure to limit potential misconfiguration until we fully bundle the `podping.cloud` HTTP front end. Then again, if you're running this, you're probably Dave.\n\n\n### CLI Install\n\nThe following have been tested on Linux and macOS. However, Windows should work also. If you have issues on Windows we highly recommend the [Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/) and/or Docker.\n\n#### Using [pipx](https://pypa.github.io/pipx/) (preferred over pip)\n```shell\npipx install podping-hivewriter\n```\n\n#### Using pip\n```shell\npip install --user podping-hivewriter\n```\n\n#### Installing the server\n\nIf you'd like to install the server component, it's hidden behind the extra flag `server`. This is to make it easier to install only the `write` CLI component `podping-hivewriter` on non-standard systems without a configured development enviornment.\n\n```shell\npipx install podping-hivewriter[server]\n```\n\nMake sure you have `~/.local/bin/` on your `PATH`.\n\nSee the dedicated [CLI docs](CLI.md) for more information.\n\n## Podping reasons\n\nPodping accepts various different \"reasons\" for publishing updates to RSS feeds:\n\n* `update` -- A general indication that an RSS feed has been updated\n* `live` -- An indication that an RSS feed has been updated and a contained [``](https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#live-item) tag's status attribute has been changed to live.\n* `liveEnd` -- An indication that an RSS feed has been updated and either the status attribute of an existing [``](https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#live-item) has been changed from live to ended or a [``](https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#live-item) that previously had a status attribute of live has been removed from the feed entirely.\n\nThe canonical list of reasons within the scope of this project is [maintained in this schema](https://github.com/Podcastindex-org/podping-hivewriter/blob/main/src/podping_hivewriter/schema/reason.capnp).\n\n## Mediums\n\nPodping accepts various different \"mediums\" for identifying types of RSS feeds using the Podcast Namespace. Please check the [``](https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#medium) specification for the full list.\n\n`podping-hivewriter` *may* lag behind the specification, and if it does, please let us know or submit a pull request.\n\nThe canonical list of mediums within the scope of this project is [maintained in this schema](https://github.com/Podcastindex-org/podping-hivewriter/blob/main/src/podping_hivewriter/schema/medium.capnp).\n\n## Development\n\nYou'll need a few extras:\n\n1. [capnproto](https://capnproto.org/). Linux: `capnproto` package in your package manager. On a Mac: `brew instal capnp`\n2. [Poetry](https://python-poetry.org/docs/)\n\n\nWe use [poetry](https://python-poetry.org/) for dependency management. Once you have it, clone this repo and run:\n\n```shell\npoetry install\n```\n\nThen to switch to the virtual environment, use:\n\n```shell\npoetry shell\n```\nMake sure you have a the environment variables `PODPING_HIVE_ACCOUNT` and `PODPING_HIVE_POSTING_KEY` set.\n\nAfter that you should be able to run the `podping` command or run the tests:\n\n```shell\npytest\n```\n\nTo run all tests, make sure to set the necessary environment variables for your Hive account. This will take many minutes:\n\n```shell\npytest --runslow\n```\n\n### Building the image locally with Docker\n\nLocally build the podping-hivewriter container with a \"develop\" tag\n\n```shell\ndocker build -t podping-hivewriter:develop .\n```\n\nSee above for more details on running the docker CLI.\n\n## Hive account\n\nIf you need a Hive account, please download the [Hive Keychain extension for your browser](https://hive-keychain.com/) then use this link to get your account from [https://HiveOnboard.com?ref=podping](https://hiveonboard.com?ref=podping). You will need at least 20 Hive Power \"powered up\" to get started (worth around $10). Please contact [@brianoflondon](https://peakd.com/@brianoflondon) brian@podping.org if you need assistance getting set up.\n\nIf you use the [Hiveonboard]((https://hiveonboard.com?ref=podping)) link `podping` will **delegate** enough Hive Power to get you started. If, for any reason, Hiveonboard is not giving out free accounts, please contact [@brianoflondon](https://peakd.com/@brianoflondon) either on [PodcastIndex Social](https://podcastindex.social/invite/U2m6FY3T) or [Telegram](https://t.me/brianoflondon).\n\n### Permissions and Authorization\n\nYou don't need permission, but you do need to tell `podping` that you want to send valid `podpings`:\n\n- Hive is a so-called \"permissionless\" blockchain. Once you have a Hive Account and a minimal amount of Hive Power, that account can post to Hive, including sending `podpings`.\n- Nobody can block any valid Hive Account from sending and nobody can help you if you lose your keys.\n- Whilst anyone can post `podpings` to Hive, there is a need to register your Hive Accountname for those `podpings` to be recognized by all clients. This is merely a spam-prevention measure and clients may choose to ignore it.\n- Please contact new@podping.org or send a Hive Transfer to [@podping](https://peakd.com/@podping) to have your account validated.\n- Side note on keys: `podping` uses the `posting-key` which is the lowest value of the four Hive keys (`owner`, `active`, `memo`, `posting` and there is usually a `master password` which can generate all the keys). That is not to say that losing control of it is a good idea, but that key is not authorized to make financially important transfers. It can, however, post public information so should be treated carefully and kept secure.\n\nFor a [comprehensive explanation of Hive and Podping, please see this post](https://peakd.com/podping/@brianoflondon/podping-and-podcasting-20-funding-to-put-hive-at-the-center-of-global-podcasting-infrastructure).", - "author": "Alecks Gates", - "author_email": "alecks@podping.org", - "maintainer": "Alecks Gates", - "maintainer_email": "alecks@podping.org", - "url": "http://podping.org/", - "package_dir": package_dir, - "packages": packages, - "package_data": package_data, - "install_requires": install_requires, - "extras_require": extras_require, - "entry_points": entry_points, - "python_requires": ">=3.8,<4.0", -} -from build import * - -build(setup_kwargs) - -setup(**setup_kwargs) diff --git a/src/podping_hivewriter/async_context.py b/src/podping_hivewriter/async_context.py index 3c60b87..cf74d6b 100644 --- a/src/podping_hivewriter/async_context.py +++ b/src/podping_hivewriter/async_context.py @@ -22,5 +22,22 @@ def close(self): except RuntimeError: pass + if self._tasks: + wait_coro = asyncio.wait( + self._tasks, timeout=3, return_when=asyncio.ALL_COMPLETED + ) + try: + loop = asyncio.get_running_loop() + + future = asyncio.run_coroutine_threadsafe(wait_coro, loop) + # This is broken, pending https://bugs.python.org/issue42130 + # future.result(3) + except RuntimeError: + asyncio.run(wait_coro) + except TimeoutError: + pass + finally: + self._tasks = [] + def _add_task(self, task): self._tasks.append(task) diff --git a/src/podping_hivewriter/cli/podping.py b/src/podping_hivewriter/cli/podping.py index d413011..fb2850e 100644 --- a/src/podping_hivewriter/cli/podping.py +++ b/src/podping_hivewriter/cli/podping.py @@ -7,6 +7,12 @@ import typer from lighthive.broadcast.base58 import Base58 from lighthive.client import Client +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) from podping_hivewriter import __version__ from podping_hivewriter.constants import ( @@ -16,8 +22,8 @@ EXIT_CODE_INVALID_POSTING_KEY, ) from podping_hivewriter.hive import get_client -from podping_hivewriter.models.medium import Medium, mediums, str_medium_map -from podping_hivewriter.models.reason import Reason, reasons, str_reason_map +from podping_hivewriter.models.medium import medium_strings, str_medium_map +from podping_hivewriter.models.reason import reason_strings, str_reason_map from podping_hivewriter.podping_hivewriter import PodpingHivewriter from podping_hivewriter.podping_settings_manager import PodpingSettingsManager @@ -32,17 +38,17 @@ def is_base58(sb: str) -> bool: def medium_callback(medium: str) -> str: - if medium not in mediums: + if medium not in medium_strings: raise typer.BadParameter( - f"Medium be one of the following: {str(', '.join(mediums))}" + f"Medium be one of the following: {str(', '.join(sorted(medium_strings)))}" ) return medium def reason_callback(reason: str) -> str: - if reason not in reasons: + if reason not in reason_strings: raise typer.BadParameter( - f"Reason must be one of the following: {str(', '.join(reasons))}" + f"Reason must be one of the following: {str(', '.join(sorted(reason_strings)))}" ) return reason @@ -71,8 +77,8 @@ def version_callback(value: bool): class Config: hive_account: str hive_posting_key: str - medium: Medium - reason: Reason + medium: PodpingMedium + reason: PodpingReason sanity_check: bool livetest: bool dry_run: bool @@ -146,12 +152,17 @@ def write( reason=Config.reason, operation_id=Config.operation_id, resource_test=Config.sanity_check, - daemon=False, + zmq_service=False, dry_run=Config.dry_run, ) as podping_hivewriter: - coro = podping_hivewriter.failure_retry( - set(iris), medium=Config.medium, reason=Config.reason - ) + + async def write_and_log(): + failure_count, response = await podping_hivewriter.broadcast_iris_retry( + set(iris), medium=Config.medium, reason=Config.reason + ) + logging.info(f"Transaction sent: {response.hive_tx_id}") + + coro = write_and_log() try: # Try to get an existing loop in case of running from other program # Mostly used for pytest @@ -246,7 +257,7 @@ def server( operation_id=Config.operation_id, resource_test=Config.sanity_check, dry_run=Config.dry_run, - daemon=True, + zmq_service=True, status=Config.status, client=Config.lighthive_client, ) @@ -254,7 +265,7 @@ def server( try: # Try to get an existing loop in case of running from other program # Mostly used for pytest - loop = asyncio.get_running_loop() + asyncio.get_running_loop() except RuntimeError as _: # If the loop isn't running, RuntimeError is raised. Run normally loop = asyncio.get_event_loop() @@ -266,20 +277,20 @@ def server( @app.callback() def callback( medium: str = typer.Option( - str(Medium.podcast), + str(PodpingMedium.podcast), envvar=["PODPING_MEDIUM"], callback=medium_callback, - autocompletion=lambda: list(mediums), + autocompletion=lambda: sorted(medium_strings), help=f"The medium of the feed being updated. If used in combination with the 'server', this sets the default " - f"medium only. Must be one of the following: {str(' '.join(mediums))}", + f"medium only. Must be one of the following: {str(' '.join(medium_strings))}", ), reason: str = typer.Option( - str(Reason.update), + str(PodpingReason.update), envvar=["PODPING_REASON"], callback=reason_callback, - autocompletion=lambda: list(reasons), + autocompletion=lambda: sorted(reason_strings), help=f"The reason the feed is being updated. If used in combination with the 'server', this sets the default " - f"reason only. Must be one of the following: {str(' '.join(reasons))}", + f"reason only. Must be one of the following: {str(' '.join(reason_strings))}", ), hive_account: str = typer.Option( ..., diff --git a/src/podping_hivewriter/schema/__init__.py b/src/podping_hivewriter/codec/__init__.py similarity index 100% rename from src/podping_hivewriter/schema/__init__.py rename to src/podping_hivewriter/codec/__init__.py diff --git a/src/podping_hivewriter/codec/podping_codec.py b/src/podping_hivewriter/codec/podping_codec.py new file mode 100644 index 0000000..806da79 --- /dev/null +++ b/src/podping_hivewriter/codec/podping_codec.py @@ -0,0 +1,15 @@ +from plexo.codec.capnpy_codec import CapnpyCodec + +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping_write import ( + PodpingWrite, +) +from podping_schemas.org.podcastindex.podping.podping_write_error import ( + PodpingWriteError, +) + +podping_hive_transaction_codec = CapnpyCodec(PodpingHiveTransaction) +podping_write_codec = CapnpyCodec(PodpingWrite) +podping_write_error_codec = CapnpyCodec(PodpingWriteError) diff --git a/src/podping_hivewriter/hive.py b/src/podping_hivewriter/hive.py index a61bbc4..5389e06 100644 --- a/src/podping_hivewriter/hive.py +++ b/src/podping_hivewriter/hive.py @@ -1,5 +1,6 @@ import asyncio import itertools +import json import logging import os from random import shuffle @@ -9,13 +10,19 @@ import backoff from lighthive.client import Client from lighthive.exceptions import RPCNodeException +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping import Podping from podping_hivewriter.async_wrapper import sync_to_async +from podping_hivewriter.models.internal_podping import CURRENT_PODPING_VERSION +from podping_hivewriter.models.medium import str_medium_map +from podping_hivewriter.models.reason import str_reason_map def get_client( posting_keys: Optional[List[str]] = None, - nodes=None, connect_timeout=3, read_timeout=30, loglevel=logging.ERROR, @@ -73,6 +80,8 @@ def get_allowed_accounts( try: master_account = client.account(account_name) return set(master_account.following()) + except KeyError: + logging.warning(f"Unable to get account followers - retrying") except Exception as e: logging.warning(f"Unable to get account followers: {e} - retrying") client.circuit_breaker_cache[client.current_node] = True @@ -84,8 +93,8 @@ def get_allowed_accounts( client.next_node() -async def listen_for_custom_json_operations( - condenser_api_client: Client, start_block: int +async def get_relevant_transactions_from_blockchain( + condenser_api_client: Client, start_block: int, operation_id: str = None ): current_block = start_block if not current_block: @@ -105,24 +114,48 @@ async def listen_for_custom_json_operations( ] while (head_block - current_block) > 0: try: - block = await async_get_block({"block_num": current_block}) - for op in ( - (trx_id, op) - for trx_id, transaction in enumerate( - block["block"]["transactions"] - ) - for op in transaction["operations"] - ): - if op[1]["type"] == "custom_json_operation": - yield { - "block": current_block, - "timestamp": block["block"]["timestamp"], - "trx_id": op[0], - "op": [ - "custom_json", - op[1]["value"], - ], - } + while True: + try: + block = await async_get_block({"block_num": current_block}) + for tx_num, transaction in enumerate( + block["block"]["transactions"] + ): + tx_id = block["block"]["transaction_ids"][tx_num] + podpings = [] + for op in transaction["operations"]: + if op["type"] == "custom_json_operation" and ( + not operation_id + or op["value"]["id"] == operation_id + ): + data = json.loads(op["value"]["json"]) + if ( + "iris" in data + and "version" in data + and data["version"] + == CURRENT_PODPING_VERSION + ): + podpings.append( + Podping( + medium=str_medium_map[ + data["medium"] + ], + reason=str_reason_map[ + data["reason"] + ], + iris=data["iris"], + timestampNs=data["timestampNs"], + sessionId=data["sessionId"], + ) + ) + if len(podpings): + yield PodpingHiveTransaction( + podpings=podpings, + hiveTxId=tx_id, + hiveBlockNum=current_block, + ) + break + except KeyError as e: + pass current_block += 1 head_block = (await async_get_dynamic_global_properties())[ "head_block_number" diff --git a/src/podping_hivewriter/models/hive_operation_id.py b/src/podping_hivewriter/models/hive_operation_id.py index 786ca03..8cb7792 100644 --- a/src/podping_hivewriter/models/hive_operation_id.py +++ b/src/podping_hivewriter/models/hive_operation_id.py @@ -1,17 +1,21 @@ -from podping_hivewriter.models.medium import Medium -from podping_hivewriter.models.reason import Reason +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) class HiveOperationId: def __init__( self, podping: str, - medium: Medium = Medium.podcast, - reason: Reason = Reason.update, + medium: PodpingMedium = PodpingMedium.podcast, + reason: PodpingReason = PodpingReason.update, ): self.podping: str = podping - self.medium: Medium = medium - self.reason: Reason = reason + self.medium: PodpingMedium = medium + self.reason: PodpingReason = reason def __eq__(self, other): return str(self) == str(other) diff --git a/src/podping_hivewriter/models/podping.py b/src/podping_hivewriter/models/internal_podping.py similarity index 66% rename from src/podping_hivewriter/models/podping.py rename to src/podping_hivewriter/models/internal_podping.py index e631aa3..cdf8c4b 100644 --- a/src/podping_hivewriter/models/podping.py +++ b/src/podping_hivewriter/models/internal_podping.py @@ -2,30 +2,35 @@ from pydantic import BaseModel, validator -from podping_hivewriter.models.medium import mediums -from podping_hivewriter.models.reason import reasons +from podping_hivewriter.models.medium import medium_strings +from podping_hivewriter.models.reason import reason_strings -class Podping(BaseModel): +CURRENT_PODPING_VERSION = "1.1" + + +class InternalPodping(BaseModel): """Dataclass for on-chain podping schema""" - version: Literal["1.0"] = "1.0" + version: Literal[CURRENT_PODPING_VERSION] = CURRENT_PODPING_VERSION medium: str reason: str iris: List[str] + timestampNs: int + sessionId: int @validator("medium") def medium_exists(cls, v): """Make sure the given medium matches what's available""" - if v not in mediums: - raise ValueError(f"medium must be one of {str(', '.join(mediums))}") + if v not in medium_strings: + raise ValueError(f"medium must be one of {str(', '.join(medium_strings))}") return v @validator("reason") def reason_exists(cls, v): """Make sure the given reason matches what's available""" - if v not in reasons: - raise ValueError(f"reason must be one of {str(', '.join(reasons))}") + if v not in reason_strings: + raise ValueError(f"reason must be one of {str(', '.join(reason_strings))}") return v @validator("iris") diff --git a/src/podping_hivewriter/models/iri_batch.py b/src/podping_hivewriter/models/iri_batch.py index 664ed5e..1b0e2ec 100644 --- a/src/podping_hivewriter/models/iri_batch.py +++ b/src/podping_hivewriter/models/iri_batch.py @@ -1,13 +1,21 @@ import uuid from typing import Set +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) from pydantic import BaseModel, validator class IRIBatch(BaseModel): - batch_id: uuid.UUID + medium: PodpingMedium + reason: PodpingReason iri_set: Set[str] + priority: int + timestampNs: int - @validator("batch_id", pre=True, always=True) - def default_batch_id(cls, v: uuid.UUID) -> uuid.UUID: - return v or uuid.uuid4() + def __lt__(self, other): + return self.priority <= other.priority diff --git a/src/podping_hivewriter/models/lighthive_broadcast_response.py b/src/podping_hivewriter/models/lighthive_broadcast_response.py new file mode 100644 index 0000000..cadb9d4 --- /dev/null +++ b/src/podping_hivewriter/models/lighthive_broadcast_response.py @@ -0,0 +1,30 @@ +from operator import itemgetter + + +class LighthiveBroadcastResponse: + def __init__( + self, + response_dict: dict, + ): + self.hive_tx_id: str + self.hive_block_num: int + self.hive_tx_num: int + self.expired: bool + ( + self.hive_tx_id, + self.hive_block_num, + self.hive_tx_num, + self.expired, + ) = itemgetter("id", "block_num", "trx_num", "expired")(response_dict) + + def __eq__(self, other): + return str(self) == str(other) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(str(self)) + + def __str__(self): + return self.hive_tx_id diff --git a/src/podping_hivewriter/models/medium.py b/src/podping_hivewriter/models/medium.py index 497b675..d2a7150 100644 --- a/src/podping_hivewriter/models/medium.py +++ b/src/podping_hivewriter/models/medium.py @@ -1,22 +1,26 @@ -import capnpy -from capnpy.annotate import Options +from typing import FrozenSet -medium_module = capnpy.load_schema( - "podping_hivewriter.schema.medium", - # Make sure properties are imported as specified (camelCase) - options=Options(convert_case=False, include_reflection_data=True), +import capnpy +from podping_schemas.org.podcastindex.podping import podping_medium +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, ) -Medium = medium_module.Medium - -mediums = frozenset(Medium.__members__) - # capnpy has a different "constructor" for pyx vs pure python -get_medium_by_num = Medium._new_hack if hasattr(Medium, "_new_hack") else Medium._new +get_medium_by_num = ( + PodpingMedium._new_hack + if hasattr(PodpingMedium, "_new_hack") + else PodpingMedium._new +) str_medium_map = { enumerant.name.decode("UTF-8"): get_medium_by_num(enumerant.codeOrder) - for enumerant in capnpy.get_reflection_data(medium_module) - .get_node(Medium) + for enumerant in capnpy.get_reflection_data(podping_medium) + .get_node(PodpingMedium) .get_enum_enumerants() } + +medium_strings: FrozenSet[str] = frozenset(PodpingMedium.__members__) +mediums: FrozenSet[PodpingMedium] = frozenset( + {str_medium_map[medium] for medium in medium_strings} +) diff --git a/src/podping_hivewriter/models/podping_hive_operation.py b/src/podping_hivewriter/models/podping_hive_operation.py deleted file mode 100644 index c9a19c0..0000000 --- a/src/podping_hivewriter/models/podping_hive_operation.py +++ /dev/null @@ -1,13 +0,0 @@ -import uuid -from typing import Set - -from pydantic import BaseModel, validator - - -class PodpingHiveOperation(BaseModel): - batch_id: uuid.UUID - iri_set: Set[str] - - @validator("batch_id", pre=True, always=True) - def default_batch_id(cls, v: uuid.UUID) -> uuid.UUID: - return v or uuid.uuid4() diff --git a/src/podping_hivewriter/models/reason.py b/src/podping_hivewriter/models/reason.py index 6ac2d9f..e6493c8 100644 --- a/src/podping_hivewriter/models/reason.py +++ b/src/podping_hivewriter/models/reason.py @@ -1,22 +1,26 @@ -import capnpy -from capnpy.annotate import Options +from typing import FrozenSet -reason_module = capnpy.load_schema( - "podping_hivewriter.schema.reason", - # Make sure properties are imported as specified (camelCase) - options=Options(convert_case=False, include_reflection_data=True), +import capnpy +from podping_schemas.org.podcastindex.podping import podping_reason +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, ) -Reason = reason_module.Reason - -reasons = frozenset(Reason.__members__) - # capnpy has a different "constructor" for pyx vs pure python -get_reason_by_num = Reason._new_hack if hasattr(Reason, "_new_hack") else Reason._new +get_reason_by_num = ( + PodpingReason._new_hack + if hasattr(PodpingReason, "_new_hack") + else PodpingReason._new +) str_reason_map = { enumerant.name.decode("UTF-8"): get_reason_by_num(enumerant.codeOrder) - for enumerant in capnpy.get_reflection_data(reason_module) - .get_node(Reason) + for enumerant in capnpy.get_reflection_data(podping_reason) + .get_node(PodpingReason) .get_enum_enumerants() } + +reason_strings: FrozenSet[str] = frozenset(PodpingReason.__members__) +reasons: FrozenSet[PodpingReason] = frozenset( + {str_reason_map[reason] for reason in reason_strings} +) diff --git a/src/podping_hivewriter/namespace.py b/src/podping_hivewriter/namespace.py new file mode 100644 index 0000000..df6749a --- /dev/null +++ b/src/podping_hivewriter/namespace.py @@ -0,0 +1,5 @@ +from plexo.namespace import Namespace + +podping_hivewriter_namespace = Namespace( + ["org", "podcastindex", "podping", "hivewriter"] +) diff --git a/src/podping_hivewriter/neuron.py b/src/podping_hivewriter/neuron.py new file mode 100644 index 0000000..2c1af06 --- /dev/null +++ b/src/podping_hivewriter/neuron.py @@ -0,0 +1,31 @@ +from plexo.neuron.neuron import Neuron + +from podping_hivewriter.codec.podping_codec import ( + podping_write_codec, + podping_write_error_codec, + podping_hive_transaction_codec, +) +from podping_hivewriter.namespace import podping_hivewriter_namespace +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping_write import ( + PodpingWrite, +) +from podping_schemas.org.podcastindex.podping.podping_write_error import ( + PodpingWriteError, +) + +podping_hive_transaction_neuron = Neuron( + PodpingHiveTransaction, + podping_hivewriter_namespace, + podping_hive_transaction_codec, +) +podping_write_neuron = Neuron( + PodpingWrite, podping_hivewriter_namespace, podping_write_codec +) +podping_write_error_neuron = Neuron( + PodpingWriteError, + podping_hivewriter_namespace, + podping_write_error_codec, +) diff --git a/src/podping_hivewriter/podping_hivewriter.py b/src/podping_hivewriter/podping_hivewriter.py index c6c2d0f..6357602 100644 --- a/src/podping_hivewriter/podping_hivewriter.py +++ b/src/podping_hivewriter/podping_hivewriter.py @@ -5,14 +5,24 @@ import re import sys import uuid -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone +from functools import partial from timeit import default_timer as timer -from typing import List, Optional, Set, Tuple, Union +from typing import List, Optional, Set, Tuple, Union, Dict, Iterable import rfc3987 from lighthive.client import Client from lighthive.datastructures import Operation from lighthive.exceptions import RPCNodeException +from plexo.ganglion.tcp_pair import GanglionZmqTcpPair +from plexo.plexus import Plexus +from podping_schemas.org.podcastindex.podping.podping import Podping +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) from podping_hivewriter import __version__ as podping_hivewriter_version from podping_hivewriter.async_context import AsyncContext @@ -34,10 +44,41 @@ from podping_hivewriter.hive import get_client from podping_hivewriter.models.hive_operation_id import HiveOperationId from podping_hivewriter.models.iri_batch import IRIBatch -from podping_hivewriter.models.medium import Medium -from podping_hivewriter.models.podping import Podping -from podping_hivewriter.models.reason import Reason +from podping_hivewriter.models.lighthive_broadcast_response import ( + LighthiveBroadcastResponse, +) +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.internal_podping import InternalPodping +from podping_hivewriter.models.reason import reasons +from podping_hivewriter.neuron import ( + podping_hive_transaction_neuron, + podping_write_neuron, + podping_write_error_neuron, +) from podping_hivewriter.podping_settings_manager import PodpingSettingsManager +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping_write import ( + PodpingWrite, +) +from podping_schemas.org.podcastindex.podping.podping_write_error import ( + PodpingWriteError, + PodpingWriteErrorType, +) + + +logger = logging.getLogger(__name__) +logger.addHandler(logging.NullHandler()) + + +def current_timestamp() -> float: + # returns floating point timestamp in seconds + return datetime.utcnow().replace(tzinfo=timezone.utc).timestamp() + + +def current_timestamp_nanoseconds() -> float: + return current_timestamp() * 1e9 class PodpingHivewriter(AsyncContext): @@ -46,16 +87,17 @@ def __init__( server_account: str, posting_keys: List[str], settings_manager: PodpingSettingsManager, - medium: Medium = Medium.podcast, - reason: Reason = Reason.update, + medium: PodpingMedium = PodpingMedium.podcast, + reason: PodpingReason = PodpingReason.update, listen_ip: str = "127.0.0.1", listen_port: int = 9999, operation_id="pp", resource_test=True, dry_run=False, - daemon=True, + zmq_service=True, status=True, client: Client = None, + plexus: Plexus = None, ): super().__init__() @@ -70,8 +112,12 @@ def __init__( self.operation_id: str = operation_id self.resource_test: bool = resource_test self.dry_run: bool = dry_run - self.daemon: bool = daemon + self.zmq_service: bool = zmq_service self.status: bool = status + self.external_plexus = True if plexus else False + self.plexus = plexus if plexus else Plexus() + + self.session_id = uuid.uuid4().int & (1 << 64) - 1 self.lighthive_client = client or get_client( posting_keys=posting_keys, @@ -79,18 +125,20 @@ def __init__( ) self._async_hive_broadcast = sync_to_async( - self.lighthive_client.broadcast, thread_sensitive=False + self.lighthive_client.broadcast_sync, thread_sensitive=False ) self.total_iris_recv = 0 self.total_iris_sent = 0 self.total_iris_recv_deduped = 0 - self._iris_in_flight = 0 - self._iris_in_flight_lock = asyncio.Lock() - - self.iri_batch_queue: "asyncio.Queue[IRIBatch]" = asyncio.Queue() - self.iri_queue: "asyncio.Queue[str]" = asyncio.Queue() + self.iri_batch_queue: "asyncio.PriorityQueue[IRIBatch]" = ( + asyncio.PriorityQueue() + ) + self.unprocessed_iri_queue: asyncio.Queue[PodpingWrite] = asyncio.Queue(1000) + self.iri_queues: Dict[ + Tuple[PodpingMedium, PodpingReason], asyncio.Queue[str] + ] = {pair: asyncio.Queue() for pair in itertools.product(mediums, reasons)} self.startup_datetime = datetime.utcnow() self.startup_time = timer() @@ -98,18 +146,66 @@ def __init__( self._startup_done = False asyncio.ensure_future(self._startup()) + def close(self): + super().close() + if not self.external_plexus: + self.plexus.close() + async def _startup(self): if self.resource_test and not self.dry_run: await self.test_hive_resources() logging.info(f"Hive account: @{self.server_account}") - if self.daemon: - self._add_task(asyncio.create_task(self._zmq_response_loop())) - self._add_task(asyncio.create_task(self._iri_batch_loop())) - self._add_task(asyncio.create_task(self._iri_batch_handler_loop())) - if self.status: - self._add_task(asyncio.create_task(self._hive_status_loop())) + await self.plexus.adapt(podping_hive_transaction_neuron) + await self.plexus.adapt( + podping_write_neuron, + reactants=( + partial( + self._podping_write_reactant, + self.plexus, + self.unprocessed_iri_queue, + ), + ), + ) + await self.plexus.adapt(podping_write_error_neuron) + + if self.zmq_service: + tcp_pair_ganglion = GanglionZmqTcpPair( + bind_interface=self.listen_ip, + port=self.listen_port, + relevant_neurons=( + podping_hive_transaction_neuron, + podping_write_neuron, + podping_write_error_neuron, + ), + ) + # tcp_pair_ganglion.socket.setsockopt(zmq.RCVHWM, 500) + await self.plexus.infuse_ganglion(tcp_pair_ganglion) + + for (medium, reason), iri_queue in self.iri_queues.items(): + self._add_task( + asyncio.create_task( + self._iri_batch_loop( + medium, reason, iri_queue, self.iri_batch_queue + ) + ) + ) + self._add_task( + asyncio.create_task(self._iri_batch_handler_loop(self.iri_batch_queue)) + ) + self._add_task( + asyncio.create_task( + self._unprocessed_iri_queue_handler( + self.settings_manager, + self.iri_batch_queue, + self.unprocessed_iri_queue, + self.iri_queues, + ) + ) + ) + if self.status: + self._add_task(asyncio.create_task(self._hive_status_loop())) self._startup_done = True @@ -127,6 +223,7 @@ async def test_hive_resources(self): "message": "Podping startup initiated", "uuid": str(uuid.uuid4()), "hive": str(self.lighthive_client.current_node), + "sessionId": self.session_id, } startup_hive_operation_id = self.operation_id + STARTUP_OPERATION_ID @@ -141,7 +238,7 @@ async def test_hive_resources(self): # Retry startup notification for every node before giving up for i in range(startup_notification_attempts_max): try: - await self.send_notification(custom_json, startup_hive_operation_id) + await self.broadcast_dict(custom_json, startup_hive_operation_id) break except RPCNodeException: if i == startup_notification_attempts_max - 1: @@ -195,7 +292,7 @@ async def test_hive_resources(self): sys.exit(EXIT_CODE_UNKNOWN) async def wait_startup(self): - settings = await self.settings_manager.get_settings() + settings = self.settings_manager.get_settings() while not self._startup_done: await asyncio.sleep(settings.hive_operation_period) @@ -203,78 +300,149 @@ async def _hive_status_loop(self): while True: try: await self.output_hive_status() - settings = await self.settings_manager.get_settings() + settings = self.settings_manager.get_settings() await asyncio.sleep(settings.diagnostic_report_period) except asyncio.CancelledError: raise except Exception: logging.exception("Unknown in _hive_status_loop", stack_info=True) - async def _iri_batch_handler_loop(self): + async def _iri_batch_handler_loop( + self, + iri_batch_queue: "asyncio.Queue[IRIBatch]", + ): """Opens and watches a queue and sends notifications to Hive one by one""" + + session_id = self.session_id + while True: try: - iri_batch = await self.iri_batch_queue.get() + settings = self.settings_manager.get_settings() + + start_time = timer() + num_in_batch = 0 + batches = [] + # Limited to 5 custom json operation per block + while not iri_batch_queue.empty() and num_in_batch < 5: + iri_batch = await iri_batch_queue.get() + batches.append(iri_batch) + iri_batch_queue.task_done() + logging.debug( + f"Handling Podping ({iri_batch.timestampNs}, {session_id})" + ) + num_in_batch += 1 - start = timer() - failure_count = await self.failure_retry( - iri_batch.iri_set, medium=self.medium, reason=self.reason - ) - duration = timer() - start + if len(batches) > 0: + broadcast_start_time = timer() + failure_count, response = await self.broadcast_iri_batches_retry( + batches + ) + broadcast_duration = timer() - broadcast_start_time + + podpings = [ + Podping( + medium=iri_batch.medium, + reason=iri_batch.reason, + iris=list(iri_batch.iri_set), + timestampNs=iri_batch.timestampNs, + sessionId=self.session_id, + ) + for iri_batch in batches + ] + + num_iris = sum(len(iri_batch.iri_set) for iri_batch in batches) + + last_node = self.lighthive_client.current_node + if response: + for podping in podpings: + logging.info( + f"Podping ({podping.timestampNs}, {session_id}) | " + f"Hive txid: {response.hive_tx_id}" + ) + logging.info( + f"TX send time: {broadcast_duration:0.2f} | " + f"Failures: {failure_count} | " + f"IRIs in TX: {num_iris} | " + f"Hive txid: {response.hive_tx_id} | " + f"Hive block num: {response.hive_block_num} | " + f"last_node: {last_node}" + ) - self.iri_batch_queue.task_done() - async with self._iris_in_flight_lock: - self._iris_in_flight -= len(iri_batch.iri_set) + await self.plexus.transmit( + PodpingHiveTransaction( + podpings=podpings, + hiveTxId=response.hive_tx_id, + hiveBlockNum=response.hive_block_num, + ) + ) - last_node = self.lighthive_client.current_node - logging.info( - f"Batch send time: {duration:0.2f} | " - f"Failures: {failure_count} - IRI batch_id {iri_batch.batch_id} | " - f"IRIs in batch: {len(iri_batch.iri_set)} | " - f"last_node: {last_node}" - ) + logging.debug(f"Transmitted TX: {response.hive_tx_id}") + + end_time = timer() + sleep_time = settings.hive_operation_period - (end_time - start_time) + if sleep_time > 0: + await asyncio.sleep(sleep_time) except asyncio.CancelledError: raise except Exception: - logging.exception("Unknown in _iri_batch_handler_loop", stack_info=True) + logging.exception( + "Unknown in _iri_batch_handler_loop", + stack_info=True, + ) raise - async def _iri_batch_loop(self): + async def _iri_batch_loop( + self, + medium: PodpingMedium, + reason: PodpingReason, + iri_queue: "asyncio.Queue[str]", + iri_batch_queue: "asyncio.PriorityQueue[IRIBatch]", + ): async def get_from_queue(): try: - return await self.iri_queue.get() + return await iri_queue.get() except RuntimeError: return - settings = await self.settings_manager.get_settings() + priority = 1 + + if reason == PodpingReason.live: + priority = -1 + elif reason == PodpingReason.liveEnd: + priority = 0 + + session_id = self.session_id while True: + settings = self.settings_manager.get_settings() + iri_set: Set[str] = set() start = timer() duration = 0 iris_size_without_commas = 0 iris_size_total = 0 - batch_id = uuid.uuid4() + podping_timestamp = int(current_timestamp_nanoseconds()) # Wait until we have enough IRIs to fit in the payload # or get into the current Hive block while ( duration < settings.hive_operation_period - and iris_size_total < settings.max_url_list_bytes - ): + or iri_batch_queue.qsize() >= 5 + ) and iris_size_total < settings.max_url_list_bytes: try: iri = await asyncio.wait_for( get_from_queue(), timeout=settings.hive_operation_period, ) iri_set.add(iri) - self.iri_queue.task_done() + iri_queue.task_done() logging.debug( - f"_iri_batch_loop - Duration: {duration:.3f} - " - f"IRI in queue: {iri} - " - f"IRI batch_id {batch_id} - " - f"Num IRIs: {len(iri_set)}" + f"_iri_batch_loop | " + f"Podping ({podping_timestamp}, {session_id}) | " + f"Medium: {medium} - Reason: {reason} | " + f"Duration: {duration:.3f} | " + f"IRI in queue: {iri}" ) # byte size of IRI in JSON is IRI + 2 quotes @@ -290,7 +458,10 @@ async def get_from_queue(): raise except Exception: logging.exception( - "Unknown error in _iri_batch_loop", stack_info=True + f"Unknown error in _iri_batch_loop | " + f"Podping ({podping_timestamp}, {session_id}) | " + f"Medium: {medium} - Reason: {reason}", + stack_info=True, ) raise finally: @@ -299,51 +470,110 @@ async def get_from_queue(): try: if len(iri_set): - iri_batch = IRIBatch(batch_id=batch_id, iri_set=iri_set) - await self.iri_batch_queue.put(iri_batch) + iri_batch = IRIBatch( + medium=medium, + reason=reason, + iri_set=iri_set, + priority=priority, + timestampNs=podping_timestamp, + ) + await iri_batch_queue.put(iri_batch) self.total_iris_recv_deduped += len(iri_set) logging.info( - f"IRI batch_id {batch_id} - Size of IRIs: {iris_size_total}" + f"Podping ({podping_timestamp}, {session_id}) | " + f"Medium: {medium} - Reason: {reason} | " + f"Size of IRIs: {iris_size_total}" ) except asyncio.CancelledError: raise except Exception: - logging.exception("Unknown error in _iri_batch_loop", stack_info=True) + logging.exception( + f"Unknown error in _iri_batch_loop | " + f"Podping ({podping_timestamp}, {session_id}) | " + f"Medium: {medium} - Reason: {reason}", + stack_info=True, + ) raise - async def _zmq_response_loop(self): - import zmq.asyncio - - context = zmq.asyncio.Context() - socket = context.socket(zmq.REP) - # TODO: Check IPv6 support - socket.bind(f"tcp://{self.listen_ip}:{self.listen_port}") - - logging.info(f"Running ZeroMQ server on {self.listen_ip}:{self.listen_port}") - + async def _unprocessed_iri_queue_handler( + self, + settings_manager: PodpingSettingsManager, + iri_batch_queue: "asyncio.PriorityQueue[IRIBatch]", + unprocessed_iri_queue: "asyncio.Queue[PodpingWrite]", + iri_queues: "Dict[Tuple[PodpingMedium, PodpingReason], asyncio.Queue[str]]", + ): while True: try: - iri: str = await socket.recv_string() - if rfc3987.match(iri, "IRI"): - await self.iri_queue.put(iri) - async with self._iris_in_flight_lock: - self._iris_in_flight += 1 - self.total_iris_recv += 1 - await socket.send_string("OK") - else: - await socket.send_string("Invalid IRI") + podping_write: PodpingWrite = await unprocessed_iri_queue.get() + queue = iri_queues[(podping_write.medium, podping_write.reason)] + await queue.put(podping_write.iri) + unprocessed_iri_queue.task_done() + self.total_iris_recv += 1 + + qsize = iri_batch_queue.qsize() + + if qsize >= 10: + logging.debug( + f"_unprocessed_iri_queue_handler | " + f"unprocessed_iri_queue size: {unprocessed_iri_queue.qsize()}" + ) + logging.debug( + f"_unprocessed_iri_queue_handler | " + f"iri_batch_queue size: {qsize}" + ) + op_period = settings_manager.get_settings().hive_operation_period + logging.debug( + f"_unprocessed_iri_queue_handler | " + f"Sleeping for {op_period}s" + ) + await asyncio.sleep(op_period) + except asyncio.TimeoutError: + pass except asyncio.CancelledError: - socket.close() raise except Exception: logging.exception( - "Unknown error in _zmq_response_loop", stack_info=True + f"Unknown error in _unprocessed_iri_queue_handler", + stack_info=True, ) raise + @staticmethod + async def _podping_write_reactant( + plexus: Plexus, + unprocessed_iri_queue: "asyncio.Queue[PodpingWrite]", + podping_write: PodpingWrite, + _, + _2, + ): + if rfc3987.match(podping_write.iri, "IRI"): + await unprocessed_iri_queue.put(podping_write) + else: + podping_write_error = PodpingWriteError( + podpingWrite=podping_write, + errorType=PodpingWriteErrorType.invalidIri, + ) + await plexus.transmit(podping_write_error) + + async def send_podping( + self, + iri: str, + medium: Optional[PodpingMedium], + reason: Optional[PodpingReason], + ): + podping_write = PodpingWrite( + medium=medium or self.medium, + reason=reason or self.reason, + iri=iri, + ) + + await self.plexus.transmit(podping_write) + async def num_operations_in_queue(self) -> int: - async with self._iris_in_flight_lock: - return self._iris_in_flight + return ( + sum(queue.qsize() for queue in self.iri_queues.values()) + + self.iri_batch_queue.qsize() + ) async def output_hive_status(self) -> None: """Output the name of the current hive node @@ -358,40 +588,59 @@ async def output_hive_status(self) -> None: f"last_node: {last_node}" ) + def construct_operations( + self, + payload_operation_ids: Iterable[Tuple[dict, Union[HiveOperationId, str]]], + ) -> List[Operation]: + """Build the operation for the blockchain""" + + operations: List[Operation] = [] + + for payload, hive_operation_id in payload_operation_ids: + payload_json = json.dumps(payload, separators=(",", ":")) + size_of_json = len(payload_json) + if size_of_json > HIVE_CUSTOM_OP_DATA_MAX_LENGTH: + raise PodpingCustomJsonPayloadExceeded( + "Max custom_json payload exceeded" + ) + + op = Operation( + "custom_json", + { + "required_auths": [], + "required_posting_auths": self.required_posting_auths, + "id": str(hive_operation_id), + "json": payload_json, + }, + ) + + operations.append(op) + + return operations + def construct_operation( self, payload: dict, hive_operation_id: Union[HiveOperationId, str] - ) -> Tuple[Operation, int]: - """Builed the operation for the blockchain""" - payload_json = json.dumps(payload, separators=(",", ":")) - size_of_json = len(payload_json) - if size_of_json > HIVE_CUSTOM_OP_DATA_MAX_LENGTH: - raise PodpingCustomJsonPayloadExceeded("Max custom_json payload exceeded") - - op = Operation( - "custom_json", - { - "required_auths": [], - "required_posting_auths": self.required_posting_auths, - "id": str(hive_operation_id), - "json": payload_json, - }, - ) - return op, size_of_json + ) -> Operation: + return self.construct_operations(((payload, hive_operation_id),))[0] - async def send_notification( - self, payload: dict, hive_operation_id: Union[HiveOperationId, str] - ) -> None: + async def broadcast_dicts( + self, + payload_operation_ids: Iterable[Tuple[dict, Union[HiveOperationId, str]]], + ) -> LighthiveBroadcastResponse: """Build and send an operation to the blockchain""" try: - op, size_of_json = self.construct_operation(payload, hive_operation_id) + ops = self.construct_operations(payload_operation_ids) # if you want to FORCE the error condition for >5 operations # in one block, uncomment this line. # op = [op] * 6 - await self._async_hive_broadcast(op=op, dry_run=self.dry_run) + broadcast_task = asyncio.create_task( + self._async_hive_broadcast(op=ops, dry_run=self.dry_run) + ) logging.info(f"Lighthive Node: {self.lighthive_client.current_node}") - logging.info(f"JSON size: {size_of_json}") + + return LighthiveBroadcastResponse(await broadcast_task) except RPCNodeException as ex: logging.error(f"send_notification error: {ex}") try: @@ -413,73 +662,118 @@ async def send_notification( raise ex except PodpingCustomJsonPayloadExceeded: raise + except KeyError: + if self.dry_run: + return LighthiveBroadcastResponse( + {"id": "0", "block_num": 0, "trx_num": 0, "expired": False} + ) + else: + logging.exception("Unknown error in send_notification", stack_info=True) + raise except Exception: logging.exception("Unknown error in send_notification", stack_info=True) raise - async def send_notification_iri( + async def broadcast_dict( + self, payload: dict, hive_operation_id: Union[HiveOperationId, str] + ) -> LighthiveBroadcastResponse: + return await self.broadcast_dicts(((payload, hive_operation_id),)) + + async def broadcast_iri_batches( + self, + iri_batches: Iterable[IRIBatch], + ) -> LighthiveBroadcastResponse: + num_iris = sum(len(iri_batch.iri_set) for iri_batch in iri_batches) + payload_operation_ids = ( + ( + InternalPodping( + medium=iri_batch.medium, + reason=iri_batch.reason, + iris=list(iri_batch.iri_set), + timestampNs=iri_batch.timestampNs, + sessionId=self.session_id, + ).dict(), + HiveOperationId(self.operation_id, iri_batch.medium, iri_batch.reason), + ) + for iri_batch in iri_batches + ) + + response = await self.broadcast_dicts(payload_operation_ids) + + self.total_iris_sent += num_iris + + return response + + async def broadcast_iri( self, iri: str, - medium: Optional[Medium], - reason: Optional[Reason], - ) -> None: - payload = Podping( - medium=medium or self.medium, reason=reason or self.reason, iris=[iri] + medium: Optional[PodpingMedium], + reason: Optional[PodpingReason], + ) -> LighthiveBroadcastResponse: + payload = InternalPodping( + medium=medium or self.medium, + reason=reason or self.reason, + iris=[iri], + timestampNs=int(current_timestamp_nanoseconds()), + sessionId=self.session_id, ) hive_operation_id = HiveOperationId(self.operation_id, medium, reason) - await self.send_notification(payload.dict(), hive_operation_id) + response = await self.broadcast_dict(payload.dict(), hive_operation_id) self.total_iris_sent += 1 - async def send_notification_iris( + return response + + async def broadcast_iris( self, - iris: Set[str], - medium: Optional[Medium], - reason: Optional[Reason], - ) -> None: - num_iris = len(iris) - payload = Podping( - medium=medium or self.medium, reason=reason or self.reason, iris=list(iris) - ) + iri_set: Set[str], + medium: Optional[PodpingMedium], + reason: Optional[PodpingReason], + ) -> LighthiveBroadcastResponse: + num_iris = len(iri_set) + timestamp = int(current_timestamp_nanoseconds()) + payload_dict = InternalPodping( + medium=medium or self.medium, + reason=reason or self.reason, + iris=list(iri_set), + timestampNs=timestamp, + sessionId=self.session_id, + ).dict() hive_operation_id = HiveOperationId(self.operation_id, medium, reason) - await self.send_notification(payload.dict(), hive_operation_id) + response = await self.broadcast_dict(payload_dict, hive_operation_id) self.total_iris_sent += num_iris - async def failure_retry( + return response + + async def broadcast_iri_batches_retry( self, - iri_set: Set[str], - medium: Optional[Medium], - reason: Optional[Reason], - ) -> int: + iri_batches: Iterable[IRIBatch], + ) -> Tuple[int, Optional[LighthiveBroadcastResponse]]: await self.wait_startup() failure_count = 0 for _ in itertools.repeat(None): - # Sleep a maximum of 5 minutes, 3 additional seconds for every retry + num_iris = sum(len(iri_batch.iri_set) for iri_batch in iri_batches) if failure_count > 0: logging.info( - f"FAILURE COUNT: {failure_count} - RETRYING {len(iri_set)} IRIs" + f"FAILURE COUNT: {failure_count} - RETRYING {num_iris} IRIs" ) else: - logging.info(f"Received {len(iri_set)} IRIs") + logging.info(f"Received {num_iris} IRIs") # noinspection PyBroadException try: - await self.send_notification_iris( - iris=iri_set, - medium=medium or self.medium, - reason=reason or self.reason, - ) + response = await self.broadcast_iri_batches(iri_batches=iri_batches) if failure_count > 0: logging.info(f"FAILURE CLEARED after {failure_count} retries") - return failure_count + return failure_count, response except RPCNodeException as ex: - logging.error(f"Failed to send {len(iri_set)} IRIs") + logging.error(f"Failed to send {num_iris} IRIs") try: # Test if we have a well-formed Hive error message logging.error(ex) @@ -488,7 +782,9 @@ async def failure_retry( == "tx_missing_posting_auth" ): if logging.DEBUG >= logging.root.level: - for iri in iri_set: + for iri in itertools.chain.from_iterable( + iri_batch.iri_set for iri_batch in iri_batches + ): logging.debug(iri) logging.error( f"Terminating: exit code: " @@ -516,21 +812,39 @@ async def failure_retry( except TooManyCustomJsonsPerBlock as ex: logging.warning(ex) # Wait for the next block to retry - sleep_for = ( - await self.settings_manager.get_settings() - ).hive_operation_period + sleep_for = (self.settings_manager.get_settings()).hive_operation_period logging.warning(f"Sleeping for {sleep_for}s") await asyncio.sleep(sleep_for) except Exception: logging.info(f"Current node: {self.lighthive_client.current_node}") logging.info(self.lighthive_client.nodes) logging.exception("Unknown error in failure_retry", stack_info=True) - logging.error(f"Failed to send {len(iri_set)} IRIs") + logging.error(f"Failed to send {num_iris} IRIs") if logging.DEBUG >= logging.root.level: - for iri in iri_set: + for iri in itertools.chain.from_iterable( + iri_batch.iri_set for iri_batch in iri_batches + ): logging.debug(iri) sys.exit(EXIT_CODE_UNKNOWN) finally: failure_count += 1 - return failure_count + return failure_count, None + + async def broadcast_iris_retry( + self, + iri_set: Set[str], + medium: Optional[PodpingMedium], + reason: Optional[PodpingReason], + ) -> Tuple[int, Optional[LighthiveBroadcastResponse]]: + return await self.broadcast_iri_batches_retry( + ( + IRIBatch( + iri_set=iri_set, + medium=medium or self.medium, + reason=reason or self.reason, + priority=0, + timestampNs=int(current_timestamp_nanoseconds()), + ), + ) + ) diff --git a/src/podping_hivewriter/podping_settings_manager.py b/src/podping_hivewriter/podping_settings_manager.py index c52924c..fc43e40 100644 --- a/src/podping_hivewriter/podping_settings_manager.py +++ b/src/podping_hivewriter/podping_settings_manager.py @@ -67,6 +67,5 @@ async def update_podping_settings(self) -> None: self.override_hive_operation_period ) - async def get_settings(self) -> PodpingSettings: - async with self._settings_lock: - return self._settings + def get_settings(self) -> PodpingSettings: + return self._settings diff --git a/src/podping_hivewriter/schema/medium.capnp b/src/podping_hivewriter/schema/medium.capnp deleted file mode 100644 index d66fd46..0000000 --- a/src/podping_hivewriter/schema/medium.capnp +++ /dev/null @@ -1,11 +0,0 @@ -@0xedda8f1fc8b626fe; - -enum Medium { - podcast @0; - music @1; - video @2; - film @3; - audiobook @4; - newsletter @5; - blog @6; -} diff --git a/src/podping_hivewriter/schema/reason.capnp b/src/podping_hivewriter/schema/reason.capnp deleted file mode 100644 index b0578bc..0000000 --- a/src/podping_hivewriter/schema/reason.capnp +++ /dev/null @@ -1,7 +0,0 @@ -@0x9accdfe4a45164eb; - -enum Reason { - update @0; - live @1; - liveEnd @2; -} diff --git a/tests/conftest.py b/tests/conftest.py index e446d0a..904c7cf 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,9 @@ +import logging + import pytest +from podping_hivewriter.hive import get_client + def pytest_addoption(parser): parser.addoption( @@ -19,3 +23,8 @@ def pytest_collection_modifyitems(config, items): for item in items: if "slow" in item.keywords: item.add_marker(skip_slow) + + +@pytest.fixture(scope="session") +def lighthive_client(): + return get_client(loglevel=logging.WARN) diff --git a/tests/integration/test_get_allowed_accounts.py b/tests/integration/test_get_allowed_accounts.py index 8f98bd4..20e44d8 100644 --- a/tests/integration/test_get_allowed_accounts.py +++ b/tests/integration/test_get_allowed_accounts.py @@ -1,14 +1,11 @@ -import logging - import pytest -from podping_hivewriter.hive import get_allowed_accounts, get_client +from podping_hivewriter.hive import get_allowed_accounts @pytest.mark.asyncio @pytest.mark.skip -async def test_get_allowed_accounts(): +async def test_get_allowed_accounts(lighthive_client): # Checks the allowed accounts checkup - client = get_client(loglevel=logging.WARN) - allowed_accounts = get_allowed_accounts(client) + allowed_accounts = get_allowed_accounts(lighthive_client) assert type(allowed_accounts) == set and len(allowed_accounts) > 0 diff --git a/tests/integration/test_startup_checks_and_write_cli_single.py b/tests/integration/test_startup_checks_and_write_cli_single.py index 1b9f7ca..3143484 100644 --- a/tests/integration/test_startup_checks_and_write_cli_single.py +++ b/tests/integration/test_startup_checks_and_write_cli_single.py @@ -1,8 +1,13 @@ -import json import uuid from platform import python_version as pv import pytest +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) from typer.testing import CliRunner from podping_hivewriter.cli.podping import app @@ -11,41 +16,32 @@ EXIT_CODE_INVALID_ACCOUNT, EXIT_CODE_INVALID_POSTING_KEY, ) -from podping_hivewriter.hive import get_client, listen_for_custom_json_operations +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain from podping_hivewriter.models.hive_operation_id import HiveOperationId -from podping_hivewriter.models.medium import Medium -from podping_hivewriter.models.reason import Reason @pytest.mark.asyncio @pytest.mark.timeout(600) @pytest.mark.slow -async def test_startup_checks_and_write_cli_single(): +async def test_startup_checks_and_write_cli_single(lighthive_client): runner = CliRunner() - client = get_client() - session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) test_name = "cli_single" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - default_hive_operation_id = HiveOperationId( - LIVETEST_OPERATION_ID, Medium.podcast, Reason.update - ) + medium = PodpingMedium.podcast + reason = PodpingReason.update + default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_iri_from_blockchain(start_block: int): - async for post in listen_for_custom_json_operations(client, start_block): - if post["op"][1]["id"] == default_hive_operation_id_str: - data = json.loads(post["op"][1]["json"]) - if "iris" in data and len(data["iris"]) == 1: - yield data["iris"][0] - args = ["--livetest", "write", iri] - current_block = client.get_dynamic_global_properties()["head_block_number"] + current_block = lighthive_client.get_dynamic_global_properties()[ + "head_block_number" + ] # Ensure hive env vars are set from .env.test file or this will fail result = runner.invoke(app, args) @@ -54,9 +50,15 @@ async def get_iri_from_blockchain(start_block: int): iri_found = False - async for stream_iri in get_iri_from_blockchain(current_block): - if stream_iri == iri: + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, current_block, default_hive_operation_id_str + ): + assert len(tx.podpings) == 1 + + if iri in tx.podpings[0].iris: iri_found = True + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason break assert iri_found diff --git a/tests/integration/test_update-podping-settings.py b/tests/integration/test_update-podping-settings.py index 1b5508a..8183f4d 100644 --- a/tests/integration/test_update-podping-settings.py +++ b/tests/integration/test_update-podping-settings.py @@ -31,7 +31,7 @@ async def test_update_podping_settings(): @pytest.mark.asyncio @pytest.mark.slow -async def test_update_podping_settings_loop(): +async def test_update_podping_settings_loop(lighthive_client): # See if we can fetch data from podping # Must not use Testnet when looking for config data @@ -42,7 +42,9 @@ async def test_update_podping_settings_loop(): "control_account_check_period" ].default = check_period - with PodpingSettingsManager(ignore_updates=False) as settings_manager: + with PodpingSettingsManager( + ignore_updates=False, client=lighthive_client + ) as settings_manager: await asyncio.sleep(3) # Check last update time diff --git a/tests/integration/test_write_cli_multiple.py b/tests/integration/test_write_cli_multiple.py index 4d7d5ed..3885aa1 100644 --- a/tests/integration/test_write_cli_multiple.py +++ b/tests/integration/test_write_cli_multiple.py @@ -1,4 +1,3 @@ -import json import random import uuid from platform import python_version as pv @@ -9,20 +8,18 @@ from podping_hivewriter.cli.podping import app from podping_hivewriter.constants import LIVETEST_OPERATION_ID -from podping_hivewriter.hive import get_client, listen_for_custom_json_operations +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain from podping_hivewriter.models.hive_operation_id import HiveOperationId -from podping_hivewriter.models.medium import mediums, str_medium_map -from podping_hivewriter.models.reason import reasons, str_reason_map +from podping_hivewriter.models.medium import medium_strings, str_medium_map +from podping_hivewriter.models.reason import reason_strings, str_reason_map @pytest.mark.asyncio @pytest.mark.timeout(600) @pytest.mark.slow -async def test_write_cli_multiple(): +async def test_write_cli_multiple(lighthive_client): runner = CliRunner() - client = get_client() - session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) @@ -34,22 +31,12 @@ async def test_write_cli_multiple(): for i in range(num_iris) } - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] + medium = str_medium_map[random.sample(sorted(medium_strings), 1)[0]] + reason = str_reason_map[random.sample(sorted(reason_strings), 1)[0]] default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_iri_from_blockchain(start_block: int): - async for post in listen_for_custom_json_operations(client, start_block): - if post["op"][1]["id"] == default_hive_operation_id_str: - data = json.loads(post["op"][1]["json"]) - if "iris" in data: - for iri in data["iris"]: - # Only look for IRIs from current session - if iri.endswith(session_uuid_str): - yield iri - args = [ "--medium", str(medium), @@ -62,7 +49,9 @@ async def get_iri_from_blockchain(start_block: int): *test_iris, ] - current_block = client.get_dynamic_global_properties()["head_block_number"] + current_block = lighthive_client.get_dynamic_global_properties()[ + "head_block_number" + ] # Ensure hive env vars are set from .env.test file or this will fail result = runner.invoke(app, args) @@ -70,11 +59,18 @@ async def get_iri_from_blockchain(start_block: int): assert result.exit_code == 0 answer_iris = set() - async for stream_iri in get_iri_from_blockchain(current_block): - answer_iris.add(stream_iri) - - # If we're done, end early - if len(answer_iris) == len(test_iris): + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, current_block, default_hive_operation_id_str + ): + for podping in tx.podpings: + assert podping.medium == medium + assert podping.reason == reason + + for iri in podping.iris: + if iri.endswith(session_uuid_str): + answer_iris.add(iri) + + if len(test_iris) == len(answer_iris): break - assert answer_iris == test_iris + assert test_iris == answer_iris diff --git a/tests/integration/test_write_cli_single.py b/tests/integration/test_write_cli_single.py index 7b34bcf..26a182b 100644 --- a/tests/integration/test_write_cli_single.py +++ b/tests/integration/test_write_cli_single.py @@ -1,4 +1,3 @@ -import json import random import uuid from platform import python_version as pv @@ -8,18 +7,17 @@ from podping_hivewriter.cli.podping import app from podping_hivewriter.constants import LIVETEST_OPERATION_ID -from podping_hivewriter.hive import get_client, listen_for_custom_json_operations +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain from podping_hivewriter.models.hive_operation_id import HiveOperationId -from podping_hivewriter.models.medium import mediums, str_medium_map -from podping_hivewriter.models.reason import reasons, str_reason_map +from podping_hivewriter.models.medium import medium_strings, str_medium_map +from podping_hivewriter.models.reason import reason_strings, str_reason_map @pytest.mark.asyncio @pytest.mark.timeout(600) @pytest.mark.slow -async def test_write_cli_single(): +async def test_write_cli_single(lighthive_client): runner = CliRunner() - client = get_client() session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) @@ -27,22 +25,12 @@ async def test_write_cli_single(): test_name = "cli_single" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] + medium = str_medium_map[random.sample(sorted(medium_strings), 1)[0]] + reason = str_reason_map[random.sample(sorted(reason_strings), 1)[0]] default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_iri_from_blockchain(start_block: int): - async for post in listen_for_custom_json_operations(client, start_block): - if post["op"][1]["id"] == default_hive_operation_id_str: - data = json.loads(post["op"][1]["json"]) - if "iris" in data and len(data["iris"]) == 1: - iri = data["iris"][0] - # Only look for IRIs from current session - if iri.endswith(session_uuid_str): - yield data["iris"][0] - args = [ "--medium", str(medium), @@ -55,17 +43,26 @@ async def get_iri_from_blockchain(start_block: int): iri, ] - current_block = client.get_dynamic_global_properties()["head_block_number"] + current_block = lighthive_client.get_dynamic_global_properties()[ + "head_block_number" + ] # Ensure hive env vars are set from .env.test file or this will fail result = runner.invoke(app, args) assert result.exit_code == 0 + iri_found = False - async for stream_iri in get_iri_from_blockchain(current_block): - if stream_iri == iri: + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, current_block, default_hive_operation_id_str + ): + assert len(tx.podpings) == 1 + + if iri in tx.podpings[0].iris: iri_found = True + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason break assert iri_found diff --git a/tests/integration/test_write_cli_single_simulcast.py b/tests/integration/test_write_cli_single_simulcast.py deleted file mode 100644 index 02806ea..0000000 --- a/tests/integration/test_write_cli_single_simulcast.py +++ /dev/null @@ -1,92 +0,0 @@ -import asyncio -import json -import random -import uuid -from platform import python_version as pv -from timeit import default_timer as timer - -import pytest -from typer.testing import CliRunner - -from podping_hivewriter.cli.podping import app -from podping_hivewriter.hive import get_client, listen_for_custom_json_operations -from podping_hivewriter.models.medium import mediums, str_medium_map -from podping_hivewriter.models.reason import reasons, str_reason_map - - -@pytest.mark.asyncio -@pytest.mark.timeout(900) -@pytest.mark.slow -async def test_write_cli_single_simulcast(): - """This test forces 7 separate posts to ensure we retry after exceeding the - limit of posts per block (5)""" - runner = CliRunner() - start = timer() - - client = get_client() - - session_uuid = uuid.uuid4() - session_uuid_str = str(session_uuid) - - async def _run_cli_once(_app, _args): - print(f"Timer: {timer()-start}") - result = runner.invoke(_app, _args) - return result - - async def get_iri_from_blockchain(start_block: int): - async for post in listen_for_custom_json_operations(client, start_block): - data = json.loads(post["op"][1]["json"]) - if "iris" in data and len(data["iris"]) == 1: - iri = data["iris"][0] - # Only look for IRIs from current session - if iri.endswith(session_uuid_str): - yield iri - - # Ensure hive env vars are set from .env.test file or this will fail - - python_version = pv() - tasks = [] - test_iris = { - f"https://example.com?t=cli_simulcast_{n}" - f"&v={python_version}&s={session_uuid_str}" - for n in range(7) - } - for iri in test_iris: - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] - args = [ - "--medium", - str(medium), - "--reason", - str(reason), - "--livetest", - "--no-sanity-check", - "--ignore-config-updates", - "--debug", - "write", - iri, - ] - tasks.append(_run_cli_once(app, args)) - - current_block = client.get_dynamic_global_properties()["head_block_number"] - - results = await asyncio.gather(*tasks) - - all_ok = all(r.exit_code == 0 for r in results) - assert all_ok - - answer_iris = set() - async for stream_iri in get_iri_from_blockchain(current_block): - answer_iris.add(stream_iri) - - # If we're done, end early - if len(answer_iris) == len(test_iris): - break - - assert answer_iris == test_iris - - -if __name__ == "__main__": - loop = asyncio.get_event_loop() - coro = test_write_cli_single_simulcast() - loop.run_until_complete(coro) diff --git a/tests/integration/test_write_dry_run_empty_tx.py b/tests/integration/test_write_dry_run_empty_tx.py new file mode 100644 index 0000000..bf1d970 --- /dev/null +++ b/tests/integration/test_write_dry_run_empty_tx.py @@ -0,0 +1,94 @@ +import asyncio +import os +import random +import uuid +from platform import python_version as pv + +import pytest +from plexo.plexus import Plexus +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) + +from podping_hivewriter.constants import LIVETEST_OPERATION_ID +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.reason import reasons +from podping_hivewriter.neuron import ( + podping_hive_transaction_neuron, + podping_write_neuron, +) +from podping_hivewriter.podping_hivewriter import PodpingHivewriter +from podping_hivewriter.podping_settings_manager import PodpingSettingsManager +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping_write import ( + PodpingWrite, +) + + +@pytest.mark.asyncio +@pytest.mark.timeout(600) +@pytest.mark.slow +async def test_write_dry_run_empty_tx(lighthive_client): + settings_manager = PodpingSettingsManager(ignore_updates=True) + + session_uuid = uuid.uuid4() + session_uuid_str = str(session_uuid) + + test_name = "dry_run_empty_tx" + iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" + + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] + + tx_queue: asyncio.Queue[PodpingHiveTransaction] = asyncio.Queue() + + async def _podping_hive_transaction_reaction( + transaction: PodpingHiveTransaction, _, _2 + ): + await tx_queue.put(transaction) + + plexus = Plexus() + await plexus.adapt( + podping_hive_transaction_neuron, + reactants=(_podping_hive_transaction_reaction,), + ) + await plexus.adapt(podping_write_neuron) + + host = "127.0.0.1" + port = 9979 + with PodpingHivewriter( + os.environ["PODPING_HIVE_ACCOUNT"], + [os.environ["PODPING_HIVE_POSTING_KEY"]], + settings_manager, + medium=medium, + reason=reason, + listen_ip=host, + listen_port=port, + resource_test=False, + status=False, + dry_run=True, + operation_id=LIVETEST_OPERATION_ID, + zmq_service=False, + plexus=plexus, + ) as podping_hivewriter: + await podping_hivewriter.wait_startup() + + podping_write = PodpingWrite(medium=medium, reason=reason, iri=iri) + + await plexus.transmit(podping_write) + + tx = await tx_queue.get() + + assert len(tx.podpings) == 1 + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason + assert iri in tx.podpings[0].iris + assert tx.hiveTxId is "0" + assert tx.hiveBlockNum is 0 + + plexus.close() diff --git a/tests/integration/test_write_plexus_multiple.py b/tests/integration/test_write_plexus_multiple.py new file mode 100644 index 0000000..7094cb0 --- /dev/null +++ b/tests/integration/test_write_plexus_multiple.py @@ -0,0 +1,228 @@ +import asyncio +import os +import random +import uuid +from platform import python_version as pv +from random import randint +from typing import List + +import pytest +from plexo.plexus import Plexus +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) + +from podping_hivewriter.constants import LIVETEST_OPERATION_ID +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain +from podping_hivewriter.models.hive_operation_id import HiveOperationId +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.reason import reasons +from podping_hivewriter.neuron import ( + podping_hive_transaction_neuron, + podping_write_neuron, +) +from podping_hivewriter.podping_hivewriter import PodpingHivewriter +from podping_hivewriter.podping_settings_manager import PodpingSettingsManager +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping_write import ( + PodpingWrite, +) + + +@pytest.mark.asyncio +@pytest.mark.timeout(600) +@pytest.mark.slow +async def test_write_plexus_multiple_external(lighthive_client): + settings_manager = PodpingSettingsManager(ignore_updates=True) + + session_uuid = uuid.uuid4() + session_uuid_str = str(session_uuid) + + num_iris = randint(2, 25) + test_name = "plexus_multiple" + python_version = pv() + test_iris = { + f"https://example.com?t={test_name}&i={i}&v={python_version}&s={session_uuid_str}" + for i in range(num_iris) + } + + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] + + default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) + default_hive_operation_id_str = str(default_hive_operation_id) + + tx_queue: asyncio.Queue[PodpingHiveTransaction] = asyncio.Queue() + + async def _podping_hive_transaction_reaction( + transaction: PodpingHiveTransaction, _, _2 + ): + await tx_queue.put(transaction) + + plexus = Plexus() + + await plexus.adapt( + podping_hive_transaction_neuron, + reactants=(_podping_hive_transaction_reaction,), + ) + await plexus.adapt(podping_write_neuron) + + host = "127.0.0.1" + port = 9979 + with PodpingHivewriter( + os.environ["PODPING_HIVE_ACCOUNT"], + [os.environ["PODPING_HIVE_POSTING_KEY"]], + settings_manager, + medium=medium, + reason=reason, + listen_ip=host, + listen_port=port, + resource_test=False, + status=False, + operation_id=LIVETEST_OPERATION_ID, + zmq_service=False, + plexus=plexus, + ) as podping_hivewriter: + await podping_hivewriter.wait_startup() + + op_period = settings_manager._settings.hive_operation_period + + for iri in test_iris: + podping_write = PodpingWrite(medium=medium, reason=reason, iri=iri) + await plexus.transmit(podping_write) + + # Sleep until all items in the queue are done processing + num_iris_processing = await podping_hivewriter.num_operations_in_queue() + while num_iris_processing > 0: + await asyncio.sleep(op_period) + num_iris_processing = await podping_hivewriter.num_operations_in_queue() + + txs: List[PodpingHiveTransaction] = [] + while sum(len(podping.iris) for tx in txs for podping in tx.podpings) < len( + test_iris + ): + txs.append(await tx_queue.get()) + await asyncio.sleep(op_period / 2) + + assert test_iris == set( + iri for tx in txs for podping in tx.podpings for iri in podping.iris + ) + start_block = min(tx.hiveBlockNum for tx in txs) + + answer_iris = set() + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, start_block, default_hive_operation_id_str + ): + for podping in tx.podpings: + assert podping.medium == medium + assert podping.reason == reason + + for iri in podping.iris: + if iri.endswith(session_uuid_str): + answer_iris.add(iri) + + if len(test_iris) == len(answer_iris): + break + + assert test_iris == answer_iris + + plexus.close() + + +@pytest.mark.asyncio +@pytest.mark.timeout(600) +@pytest.mark.slow +async def test_write_plexus_internal(lighthive_client): + settings_manager = PodpingSettingsManager(ignore_updates=True) + + session_uuid = uuid.uuid4() + session_uuid_str = str(session_uuid) + + num_iris = randint(2, 25) + test_name = "plexus_multiple" + python_version = pv() + test_iris = { + f"https://example.com?t={test_name}&i={i}&v={python_version}&s={session_uuid_str}" + for i in range(num_iris) + } + + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] + + default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) + default_hive_operation_id_str = str(default_hive_operation_id) + + tx_queue: asyncio.Queue[PodpingHiveTransaction] = asyncio.Queue() + + async def _podping_hive_transaction_reaction( + transaction: PodpingHiveTransaction, _, _2 + ): + await tx_queue.put(transaction) + + host = "127.0.0.1" + port = 9979 + with PodpingHivewriter( + os.environ["PODPING_HIVE_ACCOUNT"], + [os.environ["PODPING_HIVE_POSTING_KEY"]], + settings_manager, + medium=medium, + reason=reason, + listen_ip=host, + listen_port=port, + resource_test=False, + status=False, + operation_id=LIVETEST_OPERATION_ID, + zmq_service=False, + ) as podping_hivewriter: + await podping_hivewriter.wait_startup() + + await podping_hivewriter.plexus.adapt( + podping_hive_transaction_neuron, + reactants=(_podping_hive_transaction_reaction,), + ) + + op_period = settings_manager._settings.hive_operation_period + + for iri in test_iris: + podping_write = PodpingWrite(medium=medium, reason=reason, iri=iri) + await podping_hivewriter.plexus.transmit(podping_write) + + # Sleep until all items in the queue are done processing + num_iris_processing = await podping_hivewriter.num_operations_in_queue() + while num_iris_processing > 0: + await asyncio.sleep(op_period) + num_iris_processing = await podping_hivewriter.num_operations_in_queue() + + txs: List[PodpingHiveTransaction] = [] + while sum(len(podping.iris) for tx in txs for podping in tx.podpings) < len( + test_iris + ): + txs.append(await tx_queue.get()) + await asyncio.sleep(op_period / 2) + + assert test_iris == set( + iri for tx in txs for podping in tx.podpings for iri in podping.iris + ) + start_block = min(tx.hiveBlockNum for tx in txs) + + answer_iris = set() + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, start_block, default_hive_operation_id_str + ): + for podping in tx.podpings: + assert podping.medium == medium + assert podping.reason == reason + + for iri in podping.iris: + if iri.endswith(session_uuid_str): + answer_iris.add(iri) + + if len(test_iris) == len(answer_iris): + break + + assert test_iris == answer_iris diff --git a/tests/integration/test_write_plexus_single.py b/tests/integration/test_write_plexus_single.py new file mode 100644 index 0000000..b7c3fb9 --- /dev/null +++ b/tests/integration/test_write_plexus_single.py @@ -0,0 +1,189 @@ +import asyncio +import os +import random +import uuid +from platform import python_version as pv + +import pytest +from plexo.plexus import Plexus +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) + +from podping_hivewriter.constants import LIVETEST_OPERATION_ID +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain +from podping_hivewriter.models.hive_operation_id import HiveOperationId +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.reason import reasons +from podping_hivewriter.neuron import ( + podping_hive_transaction_neuron, + podping_write_neuron, +) +from podping_hivewriter.podping_hivewriter import PodpingHivewriter +from podping_hivewriter.podping_settings_manager import PodpingSettingsManager +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping_write import ( + PodpingWrite, +) + + +@pytest.mark.asyncio +@pytest.mark.timeout(600) +@pytest.mark.slow +async def test_write_plexus_single_external(lighthive_client): + settings_manager = PodpingSettingsManager(ignore_updates=True) + + session_uuid = uuid.uuid4() + session_uuid_str = str(session_uuid) + + test_name = "plexus_single" + iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" + + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] + + default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) + default_hive_operation_id_str = str(default_hive_operation_id) + + tx_queue: asyncio.Queue[PodpingHiveTransaction] = asyncio.Queue() + + async def _podping_hive_transaction_reaction( + transaction: PodpingHiveTransaction, _, _2 + ): + await tx_queue.put(transaction) + + plexus = Plexus() + await plexus.adapt( + podping_hive_transaction_neuron, + reactants=(_podping_hive_transaction_reaction,), + ) + await plexus.adapt(podping_write_neuron) + + host = "127.0.0.1" + port = 9979 + with PodpingHivewriter( + os.environ["PODPING_HIVE_ACCOUNT"], + [os.environ["PODPING_HIVE_POSTING_KEY"]], + settings_manager, + medium=medium, + reason=reason, + listen_ip=host, + listen_port=port, + resource_test=False, + status=False, + operation_id=LIVETEST_OPERATION_ID, + zmq_service=False, + plexus=plexus, + ) as podping_hivewriter: + await podping_hivewriter.wait_startup() + + podping_write = PodpingWrite(medium=medium, reason=reason, iri=iri) + + await plexus.transmit(podping_write) + + iri_found = False + + tx = await tx_queue.get() + + assert len(tx.podpings) == 1 + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason + assert iri in tx.podpings[0].iris + assert tx.hiveTxId is not None + assert tx.hiveBlockNum is not None + + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, tx.hiveBlockNum, default_hive_operation_id_str + ): + assert len(tx.podpings) == 1 + + if iri in tx.podpings[0].iris: + iri_found = True + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason + break + + assert iri_found + + plexus.close() + + +@pytest.mark.asyncio +@pytest.mark.timeout(600) +@pytest.mark.slow +async def test_write_plexus_single_internal(lighthive_client): + settings_manager = PodpingSettingsManager(ignore_updates=True) + + session_uuid = uuid.uuid4() + session_uuid_str = str(session_uuid) + + test_name = "plexus_single" + iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" + + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] + + default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) + default_hive_operation_id_str = str(default_hive_operation_id) + + tx_queue: asyncio.Queue[PodpingHiveTransaction] = asyncio.Queue() + + async def _podping_hive_transaction_reaction( + transaction: PodpingHiveTransaction, _, _2 + ): + await tx_queue.put(transaction) + + host = "127.0.0.1" + port = 9979 + with PodpingHivewriter( + os.environ["PODPING_HIVE_ACCOUNT"], + [os.environ["PODPING_HIVE_POSTING_KEY"]], + settings_manager, + medium=medium, + reason=reason, + listen_ip=host, + listen_port=port, + resource_test=False, + status=False, + operation_id=LIVETEST_OPERATION_ID, + zmq_service=False, + ) as podping_hivewriter: + await podping_hivewriter.wait_startup() + + await podping_hivewriter.plexus.adapt( + podping_hive_transaction_neuron, + reactants=(_podping_hive_transaction_reaction,), + ) + + podping_write = PodpingWrite(medium=medium, reason=reason, iri=iri) + + await podping_hivewriter.plexus.transmit(podping_write) + + iri_found = False + + tx = await tx_queue.get() + + assert len(tx.podpings) == 1 + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason + assert iri in tx.podpings[0].iris + assert tx.hiveTxId is not None + assert tx.hiveBlockNum is not None + + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, tx.hiveBlockNum, default_hive_operation_id_str + ): + assert len(tx.podpings) == 1 + + if iri in tx.podpings[0].iris: + iri_found = True + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason + break + + assert iri_found diff --git a/tests/integration/test_write_send_podping_multiple.py b/tests/integration/test_write_send_podping_multiple.py new file mode 100644 index 0000000..3c1dbc6 --- /dev/null +++ b/tests/integration/test_write_send_podping_multiple.py @@ -0,0 +1,122 @@ +import asyncio +import os +import random +import uuid +from platform import python_version as pv +from random import randint +from typing import List + +import pytest +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) + +from podping_hivewriter.constants import LIVETEST_OPERATION_ID +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain +from podping_hivewriter.models.hive_operation_id import HiveOperationId +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.reason import reasons +from podping_hivewriter.neuron import ( + podping_hive_transaction_neuron, +) +from podping_hivewriter.podping_hivewriter import PodpingHivewriter +from podping_hivewriter.podping_settings_manager import PodpingSettingsManager +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) + + +@pytest.mark.asyncio +@pytest.mark.timeout(600) +@pytest.mark.slow +async def test_write_send_podping_multiple(lighthive_client): + settings_manager = PodpingSettingsManager(ignore_updates=True) + + session_uuid = uuid.uuid4() + session_uuid_str = str(session_uuid) + + num_iris = randint(2, 25) + test_name = "send_podping_multiple" + python_version = pv() + test_iris = { + f"https://example.com?t={test_name}&i={i}&v={python_version}&s={session_uuid_str}" + for i in range(num_iris) + } + + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] + + default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) + default_hive_operation_id_str = str(default_hive_operation_id) + + tx_queue: asyncio.Queue[PodpingHiveTransaction] = asyncio.Queue() + + async def _podping_hive_transaction_reaction( + transaction: PodpingHiveTransaction, _, _2 + ): + await tx_queue.put(transaction) + + host = "127.0.0.1" + port = 9979 + with PodpingHivewriter( + os.environ["PODPING_HIVE_ACCOUNT"], + [os.environ["PODPING_HIVE_POSTING_KEY"]], + settings_manager, + medium=medium, + reason=reason, + listen_ip=host, + listen_port=port, + resource_test=False, + status=False, + operation_id=LIVETEST_OPERATION_ID, + zmq_service=False, + ) as podping_hivewriter: + await podping_hivewriter.wait_startup() + + await podping_hivewriter.plexus.adapt( + podping_hive_transaction_neuron, + reactants=(_podping_hive_transaction_reaction,), + ) + + op_period = settings_manager._settings.hive_operation_period + + for iri in test_iris: + await podping_hivewriter.send_podping(medium=medium, reason=reason, iri=iri) + + # Sleep until all items in the queue are done processing + num_iris_processing = await podping_hivewriter.num_operations_in_queue() + while num_iris_processing > 0: + await asyncio.sleep(op_period) + num_iris_processing = await podping_hivewriter.num_operations_in_queue() + + txs: List[PodpingHiveTransaction] = [] + while sum(len(podping.iris) for tx in txs for podping in tx.podpings) < len( + test_iris + ): + txs.append(await tx_queue.get()) + await asyncio.sleep(op_period / 2) + + assert test_iris == set( + iri for tx in txs for podping in tx.podpings for iri in podping.iris + ) + start_block = min(tx.hiveBlockNum for tx in txs) + + answer_iris = set() + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, start_block, default_hive_operation_id_str + ): + for podping in tx.podpings: + assert podping.medium == medium + assert podping.reason == reason + + for iri in podping.iris: + if iri.endswith(session_uuid_str): + answer_iris.add(iri) + + if len(test_iris) == len(answer_iris): + break + + assert test_iris == answer_iris diff --git a/tests/integration/test_write_send_podping_single.py b/tests/integration/test_write_send_podping_single.py new file mode 100644 index 0000000..be85375 --- /dev/null +++ b/tests/integration/test_write_send_podping_single.py @@ -0,0 +1,99 @@ +import asyncio +import os +import random +import uuid +from platform import python_version as pv + +import pytest +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) + +from podping_hivewriter.constants import LIVETEST_OPERATION_ID +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain +from podping_hivewriter.models.hive_operation_id import HiveOperationId +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.reason import reasons +from podping_hivewriter.neuron import podping_hive_transaction_neuron +from podping_hivewriter.podping_hivewriter import PodpingHivewriter +from podping_hivewriter.podping_settings_manager import PodpingSettingsManager +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) + + +@pytest.mark.asyncio +@pytest.mark.timeout(600) +@pytest.mark.slow +async def test_write_send_podping_single(lighthive_client): + settings_manager = PodpingSettingsManager(ignore_updates=True) + + session_uuid = uuid.uuid4() + session_uuid_str = str(session_uuid) + + test_name = "send_podping_single" + iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" + + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] + + default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) + default_hive_operation_id_str = str(default_hive_operation_id) + + tx_queue: asyncio.Queue[PodpingHiveTransaction] = asyncio.Queue() + + async def _podping_hive_transaction_reaction( + transaction: PodpingHiveTransaction, _, _2 + ): + await tx_queue.put(transaction) + + host = "127.0.0.1" + port = 9979 + with PodpingHivewriter( + os.environ["PODPING_HIVE_ACCOUNT"], + [os.environ["PODPING_HIVE_POSTING_KEY"]], + settings_manager, + medium=medium, + reason=reason, + listen_ip=host, + listen_port=port, + resource_test=False, + status=False, + operation_id=LIVETEST_OPERATION_ID, + zmq_service=False, + ) as podping_hivewriter: + await podping_hivewriter.wait_startup() + + await podping_hivewriter.plexus.adapt( + podping_hive_transaction_neuron, + reactants=(_podping_hive_transaction_reaction,), + ) + + await podping_hivewriter.send_podping(medium=medium, reason=reason, iri=iri) + + iri_found = False + + tx = await tx_queue.get() + + assert len(tx.podpings) == 1 + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason + assert iri in tx.podpings[0].iris + assert tx.hiveTxId is not None + assert tx.hiveBlockNum is not None + + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, tx.hiveBlockNum, default_hive_operation_id_str + ): + assert len(tx.podpings) == 1 + + if iri in tx.podpings[0].iris: + iri_found = True + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason + break + + assert iri_found diff --git a/tests/integration/test_write_zmq_multiple.py b/tests/integration/test_write_zmq_multiple.py index a3c079a..47d719b 100644 --- a/tests/integration/test_write_zmq_multiple.py +++ b/tests/integration/test_write_zmq_multiple.py @@ -1,32 +1,41 @@ import asyncio -import json import os import random import uuid +from ipaddress import IPv4Address from platform import python_version as pv from random import randint +from typing import List import pytest -import zmq -import zmq.asyncio +from plexo.ganglion.tcp_pair import GanglionZmqTcpPair +from plexo.plexus import Plexus from podping_hivewriter.constants import LIVETEST_OPERATION_ID -from podping_hivewriter.hive import get_client, listen_for_custom_json_operations +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain from podping_hivewriter.models.hive_operation_id import HiveOperationId -from podping_hivewriter.models.medium import mediums, str_medium_map -from podping_hivewriter.models.reason import reasons, str_reason_map +from podping_hivewriter.models.medium import medium_strings, str_medium_map +from podping_hivewriter.models.reason import reason_strings, str_reason_map +from podping_hivewriter.neuron import ( + podping_hive_transaction_neuron, + podping_write_neuron, +) from podping_hivewriter.podping_hivewriter import PodpingHivewriter from podping_hivewriter.podping_settings_manager import PodpingSettingsManager +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping_write import ( + PodpingWrite, +) @pytest.mark.asyncio @pytest.mark.timeout(600) @pytest.mark.slow -async def test_write_zmq_multiple(event_loop): +async def test_write_zmq_multiple(lighthive_client): settings_manager = PodpingSettingsManager(ignore_updates=True) - client = get_client() - session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) @@ -38,25 +47,22 @@ async def test_write_zmq_multiple(event_loop): for i in range(num_iris) } - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] + medium = str_medium_map[random.sample(sorted(medium_strings), 1)[0]] + reason = str_reason_map[random.sample(sorted(reason_strings), 1)[0]] default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_iri_from_blockchain(start_block: int): - async for post in listen_for_custom_json_operations(client, start_block): - if post["op"][1]["id"] == default_hive_operation_id_str: - data = json.loads(post["op"][1]["json"]) - if "iris" in data: - for iri in data["iris"]: - # Only look for IRIs from current session - if iri.endswith(session_uuid_str): - yield iri + tx_queue: asyncio.Queue[PodpingHiveTransaction] = asyncio.Queue() + + async def _podping_hive_transaction_reaction( + transaction: PodpingHiveTransaction, _, _2 + ): + await tx_queue.put(transaction) host = "127.0.0.1" port = 9979 - podping_hivewriter = PodpingHivewriter( + with PodpingHivewriter( os.environ["PODPING_HIVE_ACCOUNT"], [os.environ["PODPING_HIVE_POSTING_KEY"]], settings_manager, @@ -65,35 +71,64 @@ async def get_iri_from_blockchain(start_block: int): listen_ip=host, listen_port=port, resource_test=False, + status=False, operation_id=LIVETEST_OPERATION_ID, - ) - await podping_hivewriter.wait_startup() - context = zmq.asyncio.Context() - socket = context.socket(zmq.REQ, io_loop=event_loop) - socket.connect(f"tcp://{host}:{port}") - - op_period = settings_manager._settings.hive_operation_period - - current_block = client.get_dynamic_global_properties()["head_block_number"] - - for iri in test_iris: - await socket.send_string(iri) - response = await socket.recv_string() - assert response == "OK" - - # Sleep until all items in the queue are done processing - num_iris_processing = await podping_hivewriter.num_operations_in_queue() - while num_iris_processing > 0: - await asyncio.sleep(op_period) + ) as podping_hivewriter: + await podping_hivewriter.wait_startup() + + tcp_pair_ganglion = GanglionZmqTcpPair( + peer=(IPv4Address(host), port), + relevant_neurons=( + podping_hive_transaction_neuron, + podping_write_neuron, + ), + ) + plexus = Plexus(ganglia=(tcp_pair_ganglion,)) + await plexus.adapt( + podping_hive_transaction_neuron, + reactants=(_podping_hive_transaction_reaction,), + ) + await plexus.adapt(podping_write_neuron) + + op_period = settings_manager._settings.hive_operation_period + + for iri in test_iris: + podping_write = PodpingWrite(medium=medium, reason=reason, iri=iri) + await plexus.transmit(podping_write) + + # Sleep until all items in the queue are done processing num_iris_processing = await podping_hivewriter.num_operations_in_queue() - - answer_iris = set() - async for stream_iri in get_iri_from_blockchain(current_block): - answer_iris.add(stream_iri) - - # If we're done, end early - if len(answer_iris) == len(test_iris): - break - - assert answer_iris == test_iris - podping_hivewriter.close() + while num_iris_processing > 0: + await asyncio.sleep(op_period) + num_iris_processing = await podping_hivewriter.num_operations_in_queue() + + txs: List[PodpingHiveTransaction] = [] + while sum(len(podping.iris) for tx in txs for podping in tx.podpings) < len( + test_iris + ): + txs.append(await tx_queue.get()) + await asyncio.sleep(op_period / 2) + + assert test_iris == set( + iri for tx in txs for podping in tx.podpings for iri in podping.iris + ) + start_block = min(tx.hiveBlockNum for tx in txs) + + answer_iris = set() + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, start_block, default_hive_operation_id_str + ): + for podping in tx.podpings: + assert podping.medium == medium + assert podping.reason == reason + + for iri in podping.iris: + if iri.endswith(session_uuid_str): + answer_iris.add(iri) + + if len(test_iris) == len(answer_iris): + break + + assert test_iris == answer_iris + + plexus.close() diff --git a/tests/integration/test_write_zmq_simulcast.py b/tests/integration/test_write_zmq_simulcast.py new file mode 100644 index 0000000..aa4bdbc --- /dev/null +++ b/tests/integration/test_write_zmq_simulcast.py @@ -0,0 +1,129 @@ +import asyncio +import os +import random +import uuid +from ipaddress import IPv4Address +from platform import python_version as pv +from typing import List + +import pytest +from plexo.ganglion.tcp_pair import GanglionZmqTcpPair +from plexo.plexus import Plexus +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping_write import ( + PodpingWrite, +) + +from podping_hivewriter.constants import LIVETEST_OPERATION_ID +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.reason import reasons +from podping_hivewriter.neuron import ( + podping_hive_transaction_neuron, + podping_write_neuron, +) +from podping_hivewriter.podping_hivewriter import PodpingHivewriter +from podping_hivewriter.podping_settings_manager import PodpingSettingsManager + + +@pytest.mark.asyncio +@pytest.mark.timeout(900) +@pytest.mark.slow +async def test_write_zmq_simulcast(lighthive_client): + """This test forces 7 separate posts to ensure we retry after exceeding the + limit of posts per block (5)""" + settings_manager = PodpingSettingsManager(ignore_updates=True) + + session_uuid = uuid.uuid4() + session_uuid_str = str(session_uuid) + + # Ensure hive env vars are set from .env.test file or this will fail + python_version = pv() + test_iris = { + ( + f"https://example.com?t=zmq_simulcast_{n}" + f"&v={python_version}&s={session_uuid_str}", + random.sample(sorted(mediums), 1)[0], + random.sample(sorted(reasons), 1)[0], + ) + for n in range(7) + } + tx_queue: asyncio.Queue[PodpingHiveTransaction] = asyncio.Queue() + + async def _podping_hive_transaction_reaction( + transaction: PodpingHiveTransaction, _, _2 + ): + await tx_queue.put(transaction) + + host = "127.0.0.1" + port = 9979 + with PodpingHivewriter( + os.environ["PODPING_HIVE_ACCOUNT"], + [os.environ["PODPING_HIVE_POSTING_KEY"]], + settings_manager, + listen_ip=host, + listen_port=port, + resource_test=False, + status=False, + operation_id=LIVETEST_OPERATION_ID, + ) as podping_hivewriter: + await podping_hivewriter.wait_startup() + + tcp_pair_ganglion = GanglionZmqTcpPair( + peer=(IPv4Address(host), port), + relevant_neurons=( + podping_hive_transaction_neuron, + podping_write_neuron, + ), + ) + plexus = Plexus(ganglia=(tcp_pair_ganglion,)) + await plexus.adapt( + podping_hive_transaction_neuron, + reactants=(_podping_hive_transaction_reaction,), + ) + await plexus.adapt(podping_write_neuron) + + op_period = settings_manager._settings.hive_operation_period + + for iri, medium, reason in test_iris: + podping_write = PodpingWrite(medium=medium, reason=reason, iri=iri) + await plexus.transmit(podping_write) + + # Sleep until all items in the queue are done processing + num_iris_processing = await podping_hivewriter.num_operations_in_queue() + while num_iris_processing > 0: + await asyncio.sleep(op_period) + num_iris_processing = await podping_hivewriter.num_operations_in_queue() + + txs: List[PodpingHiveTransaction] = [] + while sum(len(podping.iris) for tx in txs for podping in tx.podpings) < len( + test_iris + ): + txs.append(await tx_queue.get()) + await asyncio.sleep(op_period / 2) + + assert test_iris == set( + (iri, podping.medium, podping.reason) + for tx in txs + for podping in tx.podpings + for iri in podping.iris + ) + start_block = min(tx.hiveBlockNum for tx in txs) + + answer_iris = set() + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, start_block + ): + for podping in tx.podpings: + for iri in podping.iris: + if iri.endswith(session_uuid_str): + answer_iris.add((iri, podping.medium, podping.reason)) + + if len(test_iris) == len(answer_iris): + break + + assert test_iris == answer_iris + + plexus.close() diff --git a/tests/integration/test_write_zmq_single.py b/tests/integration/test_write_zmq_single.py index 56d6c7b..5a81e48 100644 --- a/tests/integration/test_write_zmq_single.py +++ b/tests/integration/test_write_zmq_single.py @@ -1,55 +1,61 @@ -import json +import asyncio import os import random import uuid +from ipaddress import IPv4Address from platform import python_version as pv import pytest -import zmq -import zmq.asyncio +from plexo.ganglion.tcp_pair import GanglionZmqTcpPair +from plexo.plexus import Plexus from podping_hivewriter.constants import LIVETEST_OPERATION_ID -from podping_hivewriter.hive import get_client, listen_for_custom_json_operations +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain from podping_hivewriter.models.hive_operation_id import HiveOperationId -from podping_hivewriter.models.medium import mediums, str_medium_map -from podping_hivewriter.models.reason import reasons, str_reason_map +from podping_hivewriter.models.medium import medium_strings, str_medium_map +from podping_hivewriter.models.reason import reason_strings, str_reason_map +from podping_hivewriter.neuron import ( + podping_hive_transaction_neuron, + podping_write_neuron, +) from podping_hivewriter.podping_hivewriter import PodpingHivewriter from podping_hivewriter.podping_settings_manager import PodpingSettingsManager +from podping_schemas.org.podcastindex.podping.hivewriter.podping_hive_transaction import ( + PodpingHiveTransaction, +) +from podping_schemas.org.podcastindex.podping.podping_write import ( + PodpingWrite, +) @pytest.mark.asyncio @pytest.mark.timeout(600) @pytest.mark.slow -async def test_write_zmq_single(event_loop): +async def test_write_zmq_single(lighthive_client): settings_manager = PodpingSettingsManager(ignore_updates=True) - client = get_client() - session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) test_name = "zmq_single" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] + medium = str_medium_map[random.sample(sorted(medium_strings), 1)[0]] + reason = str_reason_map[random.sample(sorted(reason_strings), 1)[0]] default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_iri_from_blockchain(start_block: int): - async for post in listen_for_custom_json_operations(client, start_block): - if post["op"][1]["id"] == default_hive_operation_id_str: - data = json.loads(post["op"][1]["json"]) - if "iris" in data and len(data["iris"]) == 1: - iri = data["iris"][0] - # Only look for IRIs from current session - if iri.endswith(session_uuid_str): - yield iri + tx_queue: asyncio.Queue[PodpingHiveTransaction] = asyncio.Queue() + + async def _podping_hive_transaction_reaction( + transaction: PodpingHiveTransaction, _, _2 + ): + await tx_queue.put(transaction) host = "127.0.0.1" port = 9979 - podping_hivewriter = PodpingHivewriter( + with PodpingHivewriter( os.environ["PODPING_HIVE_ACCOUNT"], [os.environ["PODPING_HIVE_POSTING_KEY"]], settings_manager, @@ -58,25 +64,51 @@ async def get_iri_from_blockchain(start_block: int): listen_ip=host, listen_port=port, resource_test=False, + status=False, operation_id=LIVETEST_OPERATION_ID, - ) - await podping_hivewriter.wait_startup() - context = zmq.asyncio.Context() - socket = context.socket(zmq.REQ, io_loop=event_loop) - socket.connect(f"tcp://{host}:{port}") - current_block = client.get_dynamic_global_properties()["head_block_number"] - - await socket.send_string(iri) - response = await socket.recv_string() - - assert response == "OK" - - iri_found = False - - async for stream_iri in get_iri_from_blockchain(current_block): - if stream_iri == iri: - iri_found = True - break + ) as podping_hivewriter: + await podping_hivewriter.wait_startup() + + tcp_pair_ganglion = GanglionZmqTcpPair( + peer=(IPv4Address(host), port), + relevant_neurons=( + podping_hive_transaction_neuron, + podping_write_neuron, + ), + ) + plexus = Plexus(ganglia=(tcp_pair_ganglion,)) + await plexus.adapt( + podping_hive_transaction_neuron, + reactants=(_podping_hive_transaction_reaction,), + ) + await plexus.adapt(podping_write_neuron) + + podping_write = PodpingWrite(medium=medium, reason=reason, iri=iri) + + await plexus.transmit(podping_write) + + iri_found = False + + tx = await tx_queue.get() + + assert len(tx.podpings) == 1 + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason + assert iri in tx.podpings[0].iris + assert tx.hiveTxId is not None + assert tx.hiveBlockNum is not None + + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, tx.hiveBlockNum, default_hive_operation_id_str + ): + assert len(tx.podpings) == 1 + + if iri in tx.podpings[0].iris: + iri_found = True + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason + break assert iri_found - podping_hivewriter.close() + + plexus.close() diff --git a/tests/regression/test_#48_send_notification_raises_rpcexception.py b/tests/regression/test_#48_broadcast_iri_raises_rpcexception.py similarity index 66% rename from tests/regression/test_#48_send_notification_raises_rpcexception.py rename to tests/regression/test_#48_broadcast_iri_raises_rpcexception.py index ecbf1a0..67327d1 100644 --- a/tests/regression/test_#48_send_notification_raises_rpcexception.py +++ b/tests/regression/test_#48_broadcast_iri_raises_rpcexception.py @@ -7,22 +7,26 @@ import pytest from lighthive.client import Client from lighthive.exceptions import RPCNodeException +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) from podping_hivewriter.constants import LIVETEST_OPERATION_ID from podping_hivewriter.exceptions import ( NotEnoughResourceCredits, TooManyCustomJsonsPerBlock, ) -from podping_hivewriter.models.medium import mediums, str_medium_map -from podping_hivewriter.models.reason import reasons, str_reason_map +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.reason import reasons from podping_hivewriter.podping_hivewriter import PodpingHivewriter from podping_hivewriter.podping_settings_manager import PodpingSettingsManager @pytest.mark.asyncio -async def test_send_notification_raises_rpcexception_invalid_body( - event_loop, monkeypatch -): +async def test_broadcast_iri_raises_rpcexception_invalid_body(monkeypatch): settings_manager = PodpingSettingsManager(ignore_updates=True) def mock_broadcast(*args, **kwargs): @@ -30,7 +34,7 @@ def mock_broadcast(*args, **kwargs): "mock_broadcast exception", code=42, raw_body={"foo": "bar"} ) - monkeypatch.setattr(lighthive.client.Client, "broadcast", mock_broadcast) + monkeypatch.setattr(lighthive.client.Client, "broadcast_sync", mock_broadcast) session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) @@ -38,8 +42,8 @@ def mock_broadcast(*args, **kwargs): test_name = "test_send_notification_raises_rpcexception_invalid_body" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] podping_hivewriter = PodpingHivewriter( os.environ["PODPING_HIVE_ACCOUNT"], @@ -47,26 +51,25 @@ def mock_broadcast(*args, **kwargs): settings_manager, medium=medium, reason=reason, - daemon=False, + zmq_service=False, resource_test=False, + status=False, operation_id=LIVETEST_OPERATION_ID, ) await podping_hivewriter.wait_startup() with pytest.raises(RPCNodeException): - await podping_hivewriter.send_notification_iri(iri, medium, reason) + await podping_hivewriter.broadcast_iri(iri, medium, reason) with pytest.raises(RPCNodeException): - await podping_hivewriter.send_notification_iris({iri}, medium, reason) + await podping_hivewriter.broadcast_iris({iri}, medium, reason) podping_hivewriter.close() @pytest.mark.asyncio -async def test_send_notification_raises_rpcexception_valid_body( - event_loop, monkeypatch -): +async def test_broadcast_iri_raises_rpcexception_valid_body(monkeypatch): settings_manager = PodpingSettingsManager(ignore_updates=True) def mock_broadcast(*args, **kwargs): @@ -76,7 +79,7 @@ def mock_broadcast(*args, **kwargs): raw_body={"error": {"message": "nonsense"}}, ) - monkeypatch.setattr(lighthive.client.Client, "broadcast", mock_broadcast) + monkeypatch.setattr(lighthive.client.Client, "broadcast_sync", mock_broadcast) session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) @@ -84,8 +87,8 @@ def mock_broadcast(*args, **kwargs): test_name = "test_send_notification_raises_rpcexception_valid_body" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] podping_hivewriter = PodpingHivewriter( os.environ["PODPING_HIVE_ACCOUNT"], @@ -93,26 +96,25 @@ def mock_broadcast(*args, **kwargs): settings_manager, medium=medium, reason=reason, - daemon=False, + zmq_service=False, resource_test=False, + status=False, operation_id=LIVETEST_OPERATION_ID, ) await podping_hivewriter.wait_startup() with pytest.raises(RPCNodeException): - await podping_hivewriter.send_notification_iri(iri, medium, reason) + await podping_hivewriter.broadcast_iri(iri, medium, reason) with pytest.raises(RPCNodeException): - await podping_hivewriter.send_notification_iris({iri}, medium, reason) + await podping_hivewriter.broadcast_iris({iri}, medium, reason) podping_hivewriter.close() @pytest.mark.asyncio -async def test_send_notification_raises_too_many_custom_jsons_per_block( - event_loop, monkeypatch -): +async def test_broadcast_iri_raises_too_many_custom_jsons_per_block(monkeypatch): settings_manager = PodpingSettingsManager(ignore_updates=True) def mock_broadcast(*args, **kwargs): @@ -122,7 +124,7 @@ def mock_broadcast(*args, **kwargs): raw_body={"error": {"message": "plugin exception foobar custom json bizz"}}, ) - monkeypatch.setattr(lighthive.client.Client, "broadcast", mock_broadcast) + monkeypatch.setattr(lighthive.client.Client, "broadcast_sync", mock_broadcast) session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) @@ -130,8 +132,8 @@ def mock_broadcast(*args, **kwargs): test_name = "test_send_notification_raises_too_many_custom_jsons_per_block" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] podping_hivewriter = PodpingHivewriter( os.environ["PODPING_HIVE_ACCOUNT"], @@ -139,26 +141,25 @@ def mock_broadcast(*args, **kwargs): settings_manager, medium=medium, reason=reason, - daemon=False, + zmq_service=False, resource_test=False, + status=False, operation_id=LIVETEST_OPERATION_ID, ) await podping_hivewriter.wait_startup() with pytest.raises(TooManyCustomJsonsPerBlock): - await podping_hivewriter.send_notification_iri(iri, medium, reason) + await podping_hivewriter.broadcast_iri(iri, medium, reason) with pytest.raises(TooManyCustomJsonsPerBlock): - await podping_hivewriter.send_notification_iris({iri}, medium, reason) + await podping_hivewriter.broadcast_iris({iri}, medium, reason) podping_hivewriter.close() @pytest.mark.asyncio -async def test_send_notification_raises_not_enough_resource_credits( - event_loop, monkeypatch -): +async def test_broadcast_iri_raises_not_enough_resource_credits(monkeypatch): settings_manager = PodpingSettingsManager(ignore_updates=True) def mock_broadcast(*args, **kwargs): @@ -168,7 +169,7 @@ def mock_broadcast(*args, **kwargs): raw_body={"error": {"message": "payer has not enough RC mana bizz"}}, ) - monkeypatch.setattr(lighthive.client.Client, "broadcast", mock_broadcast) + monkeypatch.setattr(lighthive.client.Client, "broadcast_sync", mock_broadcast) session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) @@ -176,8 +177,8 @@ def mock_broadcast(*args, **kwargs): test_name = "test_send_notification_raises_not_enough_resource_credits" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] podping_hivewriter = PodpingHivewriter( os.environ["PODPING_HIVE_ACCOUNT"], @@ -185,17 +186,18 @@ def mock_broadcast(*args, **kwargs): settings_manager, medium=medium, reason=reason, - daemon=False, + zmq_service=False, resource_test=False, + status=False, operation_id=LIVETEST_OPERATION_ID, ) await podping_hivewriter.wait_startup() with pytest.raises(NotEnoughResourceCredits): - await podping_hivewriter.send_notification_iri(iri, medium, reason) + await podping_hivewriter.broadcast_iri(iri, medium, reason) with pytest.raises(NotEnoughResourceCredits): - await podping_hivewriter.send_notification_iris({iri}, medium, reason) + await podping_hivewriter.broadcast_iris({iri}, medium, reason) podping_hivewriter.close() diff --git a/tests/regression/test_#51_failure_retry_handles_invalid_error_response.py b/tests/regression/test_#51_broadcast_iris_retry_handles_invalid_error_response.py similarity index 72% rename from tests/regression/test_#51_failure_retry_handles_invalid_error_response.py rename to tests/regression/test_#51_broadcast_iris_retry_handles_invalid_error_response.py index 4c280a0..7f6c960 100644 --- a/tests/regression/test_#51_failure_retry_handles_invalid_error_response.py +++ b/tests/regression/test_#51_broadcast_iris_retry_handles_invalid_error_response.py @@ -6,20 +6,24 @@ import lighthive import pytest from lighthive.exceptions import RPCNodeException +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) from podping_hivewriter import podping_hivewriter from podping_hivewriter.constants import LIVETEST_OPERATION_ID from podping_hivewriter.exceptions import NotEnoughResourceCredits -from podping_hivewriter.models.medium import mediums, str_medium_map -from podping_hivewriter.models.reason import reasons, str_reason_map +from podping_hivewriter.models.medium import mediums +from podping_hivewriter.models.reason import reasons from podping_hivewriter.podping_hivewriter import PodpingHivewriter from podping_hivewriter.podping_settings_manager import PodpingSettingsManager @pytest.mark.asyncio -async def test_failure_retry_handles_invalid_error_response( - event_loop, mocker, monkeypatch -): +async def test_broadcast_iris_retry_handles_invalid_error_response(mocker, monkeypatch): settings_manager = PodpingSettingsManager(ignore_updates=True) logging_warning_stub = mocker.stub(name="logging_warning_stub") @@ -32,7 +36,7 @@ def mock_broadcast(*args, **kwargs): monkeypatch.setattr(podping_hivewriter.logging, "warning", logging_warning_stub) monkeypatch.setattr(podping_hivewriter.logging, "error", logging_error_stub) - monkeypatch.setattr(lighthive.client.Client, "broadcast", mock_broadcast) + monkeypatch.setattr(lighthive.client.Client, "broadcast_sync", mock_broadcast) session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) @@ -40,8 +44,8 @@ def mock_broadcast(*args, **kwargs): test_name = "failure_retry_handles_invalid_error_response" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] writer = PodpingHivewriter( os.environ["PODPING_HIVE_ACCOUNT"], @@ -49,8 +53,9 @@ def mock_broadcast(*args, **kwargs): settings_manager, medium=medium, reason=reason, - daemon=False, + zmq_service=False, resource_test=False, + status=False, operation_id=LIVETEST_OPERATION_ID, ) @@ -59,18 +64,19 @@ def mock_broadcast(*args, **kwargs): mocker.patch.object(podping_hivewriter.itertools, "repeat", return_value=range(1)) logging_warning_stub.reset_mock() - failure_count = await writer.failure_retry({iri}, medium, reason) + failure_count, response = await writer.broadcast_iris_retry({iri}, medium, reason) writer.close() assert logging_warning_stub.call_count == 2 assert logging_error_stub.call_count == 4 assert failure_count == 1 + assert response is None @pytest.mark.asyncio -async def test_failure_retry_handles_not_enough_resource_credits( - event_loop, mocker, monkeypatch +async def test_broadcast_iris_retry_handles_not_enough_resource_credits( + mocker, monkeypatch ): settings_manager = PodpingSettingsManager(ignore_updates=True) @@ -82,7 +88,7 @@ def mock_broadcast(*args, **kwargs): monkeypatch.setattr(podping_hivewriter.logging, "warning", logging_warning_stub) monkeypatch.setattr(podping_hivewriter.logging, "error", logging_error_stub) - monkeypatch.setattr(lighthive.client.Client, "broadcast", mock_broadcast) + monkeypatch.setattr(lighthive.client.Client, "broadcast_sync", mock_broadcast) session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) @@ -90,8 +96,8 @@ def mock_broadcast(*args, **kwargs): test_name = "failure_retry_handles_not_enough_resource_credits" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - medium = str_medium_map[random.sample(sorted(mediums), 1)[0]] - reason = str_reason_map[random.sample(sorted(reasons), 1)[0]] + medium: PodpingMedium = random.sample(sorted(mediums), 1)[0] + reason: PodpingReason = random.sample(sorted(reasons), 1)[0] writer = PodpingHivewriter( os.environ["PODPING_HIVE_ACCOUNT"], @@ -99,8 +105,9 @@ def mock_broadcast(*args, **kwargs): settings_manager, medium=medium, reason=reason, - daemon=False, + zmq_service=False, resource_test=False, + status=False, operation_id=LIVETEST_OPERATION_ID, ) @@ -109,7 +116,7 @@ def mock_broadcast(*args, **kwargs): mocker.patch.object(podping_hivewriter.itertools, "repeat", return_value=range(1)) logging_warning_stub.reset_mock() - failure_count = await writer.failure_retry({iri}, medium, reason) + failure_count, response = await writer.broadcast_iris_retry({iri}, medium, reason) writer.close() @@ -117,3 +124,4 @@ def mock_broadcast(*args, **kwargs): assert logging_warning_stub.call_count == 2 assert logging_error_stub.call_count == 1 assert failure_count == 1 + assert response is None diff --git a/tests/regression/test_#54_override_hive_operation_period_command_line.py b/tests/regression/test_#54_override_hive_operation_period_command_line.py index ab0f225..e754f2d 100644 --- a/tests/regression/test_#54_override_hive_operation_period_command_line.py +++ b/tests/regression/test_#54_override_hive_operation_period_command_line.py @@ -1,58 +1,60 @@ -import json import uuid from platform import python_version as pv import pytest +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) from typer.testing import CliRunner from podping_hivewriter.cli.podping import app from podping_hivewriter.constants import LIVETEST_OPERATION_ID -from podping_hivewriter.hive import get_client, listen_for_custom_json_operations +from podping_hivewriter.hive import get_relevant_transactions_from_blockchain from podping_hivewriter.models.hive_operation_id import HiveOperationId -from podping_hivewriter.models.medium import Medium -from podping_hivewriter.models.reason import Reason @pytest.mark.asyncio @pytest.mark.timeout(600) @pytest.mark.slow -async def test_startup_checks_and_write_cli_single(): +async def test_startup_checks_and_write_cli_single(lighthive_client): runner = CliRunner() - client = get_client() - session_uuid = uuid.uuid4() session_uuid_str = str(session_uuid) test_name = "cli_single" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - default_hive_operation_id = HiveOperationId( - LIVETEST_OPERATION_ID, Medium.podcast, Reason.update - ) + medium = PodpingMedium.podcast + reason = PodpingReason.update + default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_iri_from_blockchain(start_block: int): - async for post in listen_for_custom_json_operations(client, start_block): - if post["op"][1]["id"] == default_hive_operation_id_str: - data = json.loads(post["op"][1]["json"]) - if "iris" in data and len(data["iris"]) == 1: - yield data["iris"][0] - args = ["--livetest", "--hive-operation-period", "30", "write", iri] - current_block = client.get_dynamic_global_properties()["head_block_number"] + current_block = lighthive_client.get_dynamic_global_properties()[ + "head_block_number" + ] - # Ensure hive env vars are set from .env.test file or this will fail + # Ensure hive env vars are set correctly or this will fail result = runner.invoke(app, args) assert result.exit_code == 0 iri_found = False - async for stream_iri in get_iri_from_blockchain(current_block): - if stream_iri == iri: + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, current_block, default_hive_operation_id_str + ): + assert len(tx.podpings) == 1 + + if iri in tx.podpings[0].iris: iri_found = True + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason break assert iri_found diff --git a/tests/regression/test_#56_set_env_to_use_testnet.py b/tests/regression/test_#56_set_env_to_use_testnet.py index 187b2f7..4fdab33 100644 --- a/tests/regression/test_#56_set_env_to_use_testnet.py +++ b/tests/regression/test_#56_set_env_to_use_testnet.py @@ -1,18 +1,23 @@ -import json import os import uuid from platform import python_version as pv import pytest +from podping_schemas.org.podcastindex.podping.podping_medium import ( + PodpingMedium, +) +from podping_schemas.org.podcastindex.podping.podping_reason import ( + PodpingReason, +) from typer.testing import CliRunner from podping_hivewriter.cli.podping import app from podping_hivewriter.constants import LIVETEST_OPERATION_ID -from podping_hivewriter.hive import get_client, listen_for_custom_json_operations +from podping_hivewriter.hive import ( + get_client, + get_relevant_transactions_from_blockchain, +) from podping_hivewriter.models.hive_operation_id import HiveOperationId -from podping_hivewriter.models.medium import Medium -from podping_hivewriter.models.reason import Reason -from podping_hivewriter.podping_settings_manager import PodpingSettingsManager @pytest.mark.asyncio @@ -28,9 +33,9 @@ async def test_use_testnet_startup_checks_and_write_cli_single(): "PODPING_TESTNET_CHAINID" ] = "4200000000000000000000000000000000000000000000000000000000000000" - client = get_client() + lighthive_client = get_client() try: - props = client.get_dynamic_global_properties() + props = lighthive_client.get_dynamic_global_properties() except Exception: # If we can't connect to the fakenet / testnet then just pass the test assert True @@ -44,21 +49,16 @@ async def test_use_testnet_startup_checks_and_write_cli_single(): test_name = "use_testnet_startup_checks_and_write_cli_single" iri = f"https://example.com?t={test_name}&v={pv()}&s={session_uuid_str}" - default_hive_operation_id = HiveOperationId( - LIVETEST_OPERATION_ID, Medium.podcast, Reason.update - ) + medium = PodpingMedium.podcast + reason = PodpingReason.update + default_hive_operation_id = HiveOperationId(LIVETEST_OPERATION_ID, medium, reason) default_hive_operation_id_str = str(default_hive_operation_id) - async def get_iri_from_blockchain(start_block: int): - async for post in listen_for_custom_json_operations(client, start_block): - if post["op"][1]["id"] == default_hive_operation_id_str: - data = json.loads(post["op"][1]["json"]) - if "iris" in data and len(data["iris"]) == 1: - yield data["iris"][0] - args = ["--livetest", "--hive-operation-period", "30", "write", iri] - current_block = client.get_dynamic_global_properties()["head_block_number"] + current_block = lighthive_client.get_dynamic_global_properties()[ + "head_block_number" + ] # Ensure hive env vars are set from .env.test file or this will fail result = runner.invoke(app, args) @@ -67,9 +67,15 @@ async def get_iri_from_blockchain(start_block: int): iri_found = False - async for stream_iri in get_iri_from_blockchain(current_block): - if stream_iri == iri: + async for tx in get_relevant_transactions_from_blockchain( + lighthive_client, current_block, default_hive_operation_id_str + ): + assert len(tx.podpings) == 1 + + if iri in tx.podpings[0].iris: iri_found = True + assert tx.podpings[0].medium == medium + assert tx.podpings[0].reason == reason break assert iri_found