diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index 22052c3..5516039 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -12,4 +12,4 @@ on: jobs: call-changelog-check-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.11.2 + uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.12.0 diff --git a/.github/workflows/create-jira-issue.yml b/.github/workflows/create-jira-issue.yml index 99489d5..d95ef84 100644 --- a/.github/workflows/create-jira-issue.yml +++ b/.github/workflows/create-jira-issue.yml @@ -6,7 +6,7 @@ on: jobs: call-create-jira-issue-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.11.2 + uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.12.0 secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/distribute.yml b/.github/workflows/distribute.yml index fe9485c..7c2d74a 100644 --- a/.github/workflows/distribute.yml +++ b/.github/workflows/distribute.yml @@ -7,7 +7,7 @@ on: jobs: call-version-info-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-version-info.yml@v0.11.2 + uses: ASFHyP3/actions/.github/workflows/reusable-version-info.yml@v0.12.0 with: python_version: "3.10" @@ -21,7 +21,7 @@ jobs: with: fetch-depth: 0 - - uses: mamba-org/setup-micromamba@v1 + - uses: mamba-org/setup-micromamba@v2 with: environment-file: environment.yml @@ -31,7 +31,7 @@ jobs: python -m build - name: upload to PyPI.org - uses: pypa/gh-action-pypi-publish@v1.9.0 + uses: pypa/gh-action-pypi-publish@v1.12.3 with: user: __token__ password: ${{ secrets.TOOLS_PYPI_PAK }} diff --git a/.github/workflows/labeled-pr.yml b/.github/workflows/labeled-pr.yml index f89f3e3..f408f3b 100644 --- a/.github/workflows/labeled-pr.yml +++ b/.github/workflows/labeled-pr.yml @@ -12,4 +12,4 @@ on: jobs: call-labeled-pr-check-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.11.2 + uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.12.0 diff --git a/.github/workflows/release-template-comment.yml b/.github/workflows/release-template-comment.yml index 2e12712..a3a4f26 100644 --- a/.github/workflows/release-template-comment.yml +++ b/.github/workflows/release-template-comment.yml @@ -7,7 +7,7 @@ on: jobs: call-release-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-release-checklist-comment.yml@v0.11.2 + uses: ASFHyP3/actions/.github/workflows/reusable-release-checklist-comment.yml@v0.12.0 permissions: pull-requests: write secrets: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d4c54ea..7bcfd1d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,7 +7,7 @@ on: jobs: call-release-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.11.2 + uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.12.0 with: release_prefix: HyP3 SDK secrets: diff --git a/.github/workflows/static-analysis.yml b/.github/workflows/static-analysis.yml index bfb8f53..9a48b7a 100644 --- a/.github/workflows/static-analysis.yml +++ b/.github/workflows/static-analysis.yml @@ -3,10 +3,10 @@ name: Static analysis on: push jobs: - call-flake8-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-flake8.yml@v0.11.2 - with: - local_package_names: hyp3_sdk - call-secrets-analysis-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-secrets-analysis.yml@v0.11.2 + # Docs: https://github.com/ASFHyP3/actions + uses: ASFHyP3/actions/.github/workflows/reusable-secrets-analysis.yml@v0.12.0 + + call-ruff-workflow: + # Docs: https://github.com/ASFHyP3/actions + uses: ASFHyP3/actions/.github/workflows/reusable-ruff.yml@v0.13.2 diff --git a/.github/workflows/tag-version.yml b/.github/workflows/tag-version.yml index 3b94479..c8a5746 100644 --- a/.github/workflows/tag-version.yml +++ b/.github/workflows/tag-version.yml @@ -7,6 +7,8 @@ on: jobs: call-bump-version-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.11.2 + # For first-time setup, create a v0.0.0 tag as shown here: + # https://github.com/ASFHyP3/actions#reusable-bump-versionyml + uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.12.0 secrets: USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ebfa764..511a85b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -12,7 +12,7 @@ on: jobs: call-pytest-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-pytest.yml@v0.11.2 + uses: ASFHyP3/actions/.github/workflows/reusable-pytest.yml@v0.12.0 with: local_package_name: hyp3_sdk python_versions: >- diff --git a/.gitleaks.toml b/.gitleaks.toml deleted file mode 100644 index cddfcd7..0000000 --- a/.gitleaks.toml +++ /dev/null @@ -1,55 +0,0 @@ -title = "gitleaks config" -[[rules]] - description = "AWS Manager ID" - regex = '''(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}''' - tags = ["key", "AWS"] -[[rules]] - description = "AWS Secret Key" - regex = '''(?i)aws(.{0,20})?(?-i)[0-9a-zA-Z\/+]{40}''' - tags = ["key", "AWS"] -[[rules]] - description = "AWS MWS key" - regex = '''amzn\.mws\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}''' - tags = ["key", "AWS", "MWS"] -[[rules]] - description = "Github" - regex = '''(?i)github(.{0,20})?(?-i)[0-9a-zA-Z]{35,40}''' - tags = ["key", "Github"] -[[rules]] - description = "Asymmetric Private Key" - regex = '''-----BEGIN ((EC|PGP|DSA|RSA|OPENSSH) )?PRIVATE KEY( BLOCK)?-----''' - tags = ["key", "AsymmetricPrivateKey"] -[[rules]] - description = "Generic Credential" - regex = '''(?i)(api_key|apikey|secret|password|pass|pw|key)(.{0,20})?[0-9a-zA-Z]{16,45}''' - tags = ["key", "API", "generic"] - [[rules.whitelist]] - regex = '''KeyChecking.no.*''' - description = "Ignore ssh settings for GitLab tools-bot" -[[rules]] - description = "Google API key" - regex = '''AIza[0-9A-Za-z\\-_]{35}''' - tags = ["key", "Google"] -[[rules]] - description = "Google (GCP) Service Account" - regex = '''"type": "service_account"''' - tags = ["key", "Google"] -[[rules]] - description = "Heroku API key" - regex = '''(?i)heroku(.{0,20})?[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}''' - tags = ["key", "Heroku"] -[[rules]] - description = "MailChimp API key" - regex = '''(?i)(mailchimp|mc)(.{0,20})?[0-9a-f]{32}-us[0-9]{1,2}''' - tags = ["key", "Mailchimp"] -[[rules]] - description = "Mailgun API key" - regex = '''((?i)(mailgun|mg)(.{0,20})?)?key-[0-9a-z]{32}''' - tags = ["key", "Mailgun"] -[[rules]] - description = "Slack Webhook" - regex = '''https://hooks.slack.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8}/[a-zA-Z0-9_]{24}''' - tags = ["key", "slack"] -[whitelist] - description = "Whitelisted files" - files = ['''(^.*gitleaks.toml$|(.*?)(jpg|gif|doc|pdf|bin)$)'''] diff --git a/CHANGELOG.md b/CHANGELOG.md index 8c21841..7bd7c78 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [7.0.2] + +### Changed +* The [`static-analysis`](.github/workflows/static-analysis.yml) Github Actions workflow now uses `ruff` rather than `flake8` for linting. + ## [7.0.1] ### Removed diff --git a/docs/sdk_example.ipynb b/docs/sdk_example.ipynb index dacfdb0..c372754 100644 --- a/docs/sdk_example.ipynb +++ b/docs/sdk_example.ipynb @@ -55,6 +55,7 @@ "source": [ "# initial setup\n", "import asf_search as asf\n", + "\n", "import hyp3_sdk as sdk" ] }, @@ -157,34 +158,32 @@ { "cell_type": "code", "execution_count": null, - "outputs": [], - "source": [ - "from typing import Optional\n", - "\n", - "def get_nearest_neighbors(granule: str, max_neighbors: Optional[int] = None) -> asf.ASFSearchResults:\n", - " granule = asf.granule_search(granule)[-1]\n", - " stack = reversed([item for item in granule.stack() if item.properties['temporalBaseline'] < 0])\n", - " return asf.ASFSearchResults(stack)[:max_neighbors]" - ], "metadata": { "collapsed": false, "pycharm": { "name": "#%%\n" } - } + }, + "outputs": [], + "source": [ + "def get_nearest_neighbors(granule: str, max_neighbors: int | None = None) -> asf.ASFSearchResults:\n", + " granule = asf.granule_search(granule)[-1]\n", + " stack = reversed([item for item in granule.stack() if item.properties['temporalBaseline'] < 0])\n", + " return asf.ASFSearchResults(stack)[:max_neighbors]" + ] }, { "cell_type": "markdown", - "source": [ - "Now, using the example granule list for our RTC jobs as the reference scenes, we can find their nearest and next-nearest neighbor granules, and submit them\n", - "as pairs for InSAR processing." - ], "metadata": { "collapsed": false, "pycharm": { "name": "#%% md\n" } - } + }, + "source": [ + "Now, using the example granule list for our RTC jobs as the reference scenes, we can find their nearest and next-nearest neighbor granules, and submit them\n", + "as pairs for InSAR processing." + ] }, { "cell_type": "code", @@ -194,6 +193,7 @@ "source": [ "from tqdm.auto import tqdm # For a nice progress bar: https://github.com/tqdm/tqdm#ipython-jupyter-integration\n", "\n", + "\n", "insar_jobs = sdk.Batch()\n", "for reference in tqdm(granules):\n", " neighbors = get_nearest_neighbors(reference, max_neighbors=2)\n", @@ -235,14 +235,17 @@ "source": [ "autorift_pairs = [\n", " # Sentinel-1 ESA granule IDs\n", - " ('S1A_IW_SLC__1SSH_20170221T204710_20170221T204737_015387_0193F6_AB07',\n", - " 'S1B_IW_SLC__1SSH_20170227T204628_20170227T204655_004491_007D11_6654'),\n", + " (\n", + " 'S1A_IW_SLC__1SSH_20170221T204710_20170221T204737_015387_0193F6_AB07',\n", + " 'S1B_IW_SLC__1SSH_20170227T204628_20170227T204655_004491_007D11_6654',\n", + " ),\n", " # Sentinel-2 ESA granule IDs\n", - " ('S2B_MSIL1C_20200612T150759_N0209_R025_T22WEB_20200612T184700',\n", - " 'S2A_MSIL1C_20200627T150921_N0209_R025_T22WEB_20200627T170912'),\n", + " (\n", + " 'S2B_MSIL1C_20200612T150759_N0209_R025_T22WEB_20200612T184700',\n", + " 'S2A_MSIL1C_20200627T150921_N0209_R025_T22WEB_20200627T170912',\n", + " ),\n", " # Landsat 8\n", - " ('LC08_L1TP_009011_20200703_20200913_02_T1',\n", - " 'LC08_L1TP_009011_20200820_20200905_02_T1'),\n", + " ('LC08_L1TP_009011_20200703_20200913_02_T1', 'LC08_L1TP_009011_20200820_20200905_02_T1'),\n", "]\n", "\n", "autorift_jobs = sdk.Batch()\n", diff --git a/docs/search_other_user_jobs.ipynb b/docs/search_other_user_jobs.ipynb index 2bac5ca..3802bfd 100644 --- a/docs/search_other_user_jobs.ipynb +++ b/docs/search_other_user_jobs.ipynb @@ -1,66 +1,76 @@ { - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "python", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8" - }, - "kernelspec": { - "name": "python", - "display_name": "Python (Pyodide)", - "language": "python" - } + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": "# Using the HyP3 SDK to search for jobs run by another user\n\nTo facilitate collaboration, HyP3 allows you to search for jobs run by other users.\n\nFollow [Using the HyP3 SDK for Python](https://nbviewer.jupyter.org/github/ASFHyP3/hyp3-sdk/blob/main/docs/sdk_example.ipynb) to install the `hyp3-sdk` package (version `2.1.1` or higher) and authenticate using your Earthdata credentials.\n\nSuppose you have run a number of RTC jobs with the name `rtc-example`. You can search for them using `find_jobs`:" }, - "nbformat_minor": 4, - "nbformat": 4, - "cells": [ - { - "cell_type": "markdown", - "source": "# Using the HyP3 SDK to search for jobs run by another user\n\nTo facilitate collaboration, HyP3 allows you to search for jobs run by other users.\n\nFollow [Using the HyP3 SDK for Python](https://nbviewer.jupyter.org/github/ASFHyP3/hyp3-sdk/blob/main/docs/sdk_example.ipynb) to install the `hyp3-sdk` package (version `2.1.1` or higher) and authenticate using your Earthdata credentials.\n\nSuppose you have run a number of RTC jobs with the name `rtc-example`. You can search for them using `find_jobs`:", - "metadata": {} - }, - { - "cell_type": "code", - "source": "from hyp3_sdk import HyP3\nhyp3 = HyP3()\nmy_rtc_jobs = hyp3.find_jobs(name='rtc-example')", - "metadata": {}, - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "markdown", - "source": "Suppose that you are working with another user who has also run a number of RTC jobs with the same name. You can search for those jobs by providing the `user_id` parameter:", - "metadata": {} - }, - { - "cell_type": "code", - "source": "other_user_rtc_jobs = hyp3.find_jobs(name='rtc-example', user_id='other_user')", - "metadata": {}, - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "markdown", - "source": "Or, suppose that the other user has run a number of InSAR jobs with the name `insar-example`:", - "metadata": {} - }, - { - "cell_type": "code", - "source": "other_user_insar_jobs = hyp3.find_jobs(name='insar-example', user_id='other_user')", - "metadata": {}, - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "markdown", - "source": "You can provide the `user_id` parameter with any combination of other parameters supported by `find_jobs`. This allows you to search for jobs run by other users just as you would search for your own jobs.\n\nIf the `user_id` parameter is not provided, jobs are returned for the currently authenticated user.", - "metadata": {} - } - ] + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from hyp3_sdk import HyP3\n", + "\n", + "\n", + "hyp3 = HyP3()\n", + "my_rtc_jobs = hyp3.find_jobs(name='rtc-example')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": "Suppose that you are working with another user who has also run a number of RTC jobs with the same name. You can search for those jobs by providing the `user_id` parameter:" + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "other_user_rtc_jobs = hyp3.find_jobs(name='rtc-example', user_id='other_user')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": "Or, suppose that the other user has run a number of InSAR jobs with the name `insar-example`:" + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "other_user_insar_jobs = hyp3.find_jobs(name='insar-example', user_id='other_user')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": "You can provide the `user_id` parameter with any combination of other parameters supported by `find_jobs`. This allows you to search for jobs run by other users just as you would search for your own jobs.\n\nIf the `user_id` parameter is not provided, jobs are returned for the currently authenticated user." + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python (Pyodide)", + "language": "python", + "name": "python" + }, + "language_info": { + "codemirror_mode": { + "name": "python", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8" + } + }, + "nbformat": 4, + "nbformat_minor": 4 } diff --git a/environment.yml b/environment.yml index 1d1124b..4b08a7f 100644 --- a/environment.yml +++ b/environment.yml @@ -6,11 +6,8 @@ dependencies: - python>=3.10 - pip # For packaging, and testing - - build - - flake8 - - flake8-import-order - - flake8-blind-except - - flake8-builtins + - python-build + - ruff - jupyter - setuptools>=61 - setuptools_scm>=6.2 diff --git a/pyproject.toml b/pyproject.toml index c086432..57971ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,3 +48,28 @@ zip-safe = false where = ["src"] [tool.setuptools_scm] + +[tool.ruff] +line-length = 120 +src = ["src", "tests"] + +[tool.ruff.format] +indent-style = "space" +quote-style = "single" + +[tool.ruff.lint] +extend-select = [ + "I", # isort: https://docs.astral.sh/ruff/rules/#isort-i + "UP", # pyupgrade: https://docs.astral.sh/ruff/rules/#pyupgrade-up + # TODO: Uncomment the following extensions and address the warnings: + # "D", # pydocstyle: https://docs.astral.sh/ruff/rules/#pydocstyle-d + # "ANN", # annotations: https://docs.astral.sh/ruff/rules/#flake8-annotations-ann + # "PTH", # use-pathlib-pth: https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth +] + +[tool.ruff.lint.pydocstyle] +convention = "google" + +[tool.ruff.lint.isort] +case-sensitive = true +lines-after-imports = 2 diff --git a/src/hyp3_sdk/__init__.py b/src/hyp3_sdk/__init__.py index b92f816..3ff5fb2 100644 --- a/src/hyp3_sdk/__init__.py +++ b/src/hyp3_sdk/__init__.py @@ -3,7 +3,7 @@ from importlib.metadata import version from . import util -from .hyp3 import HyP3, PROD_API, TEST_API +from .hyp3 import PROD_API, TEST_API, HyP3 from .jobs import Batch, Job diff --git a/src/hyp3_sdk/hyp3.py b/src/hyp3_sdk/hyp3.py index 4f653cc..fc15912 100644 --- a/src/hyp3_sdk/hyp3.py +++ b/src/hyp3_sdk/hyp3.py @@ -4,7 +4,7 @@ from datetime import datetime, timezone from functools import singledispatchmethod from getpass import getpass -from typing import List, Literal, Optional, Union +from typing import Literal from urllib.parse import urljoin from warnings import warn @@ -13,6 +13,7 @@ from hyp3_sdk.exceptions import HyP3Error, _raise_for_hyp3_status from hyp3_sdk.jobs import Batch, Job + PROD_API = 'https://hyp3-api.asf.alaska.edu' TEST_API = 'https://hyp3-test-api.asf.alaska.edu' @@ -24,8 +25,13 @@ class HyP3: https://hyp3-docs.asf.alaska.edu/#public-visibility-of-jobs """ - def __init__(self, api_url: str = PROD_API, username: Optional[str] = None, password: Optional[str] = None, - prompt: bool = False): + def __init__( + self, + api_url: str = PROD_API, + username: str | None = None, + password: str | None = None, + prompt: bool = False, + ): """If username and password are not provided, attempts to use credentials from a `.netrc` file. Args: @@ -47,13 +53,15 @@ def __init__(self, api_url: str = PROD_API, username: Optional[str] = None, pass self.session = hyp3_sdk.util.get_authenticated_session(username, password) self.session.headers.update({'User-Agent': f'{hyp3_sdk.__name__}/{hyp3_sdk.__version__}'}) - def find_jobs(self, - start: Optional[datetime] = None, - end: Optional[datetime] = None, - status_code: Optional[str] = None, - name: Optional[str] = None, - job_type: Optional[str] = None, - user_id: Optional[str] = None) -> Batch: + def find_jobs( + self, + start: datetime | None = None, + end: datetime | None = None, + status_code: str | None = None, + name: str | None = None, + job_type: str | None = None, + user_id: str | None = None, + ) -> Batch: """Gets a Batch of jobs from HyP3 matching the provided search criteria Args: @@ -105,8 +113,7 @@ def get_job_by_id(self, job_id: str) -> Job: return Job.from_dict(response.json()) @singledispatchmethod - def watch(self, job_or_batch: Union[Batch, Job], timeout: int = 10800, - interval: Union[int, float] = 60) -> Union[Batch, Job]: + def watch(self, job_or_batch: Batch | Job, timeout: int = 10800, interval: int | float = 60) -> Batch | Job: """Watch jobs until they complete Args: @@ -120,7 +127,7 @@ def watch(self, job_or_batch: Union[Batch, Job], timeout: int = 10800, raise NotImplementedError(f'Cannot watch {type(job_or_batch)} type object') @watch.register - def _watch_batch(self, batch: Batch, timeout: int = 10800, interval: Union[int, float] = 60) -> Batch: + def _watch_batch(self, batch: Batch, timeout: int = 10800, interval: int | float = 60) -> Batch: tqdm = hyp3_sdk.util.get_tqdm_progress_bar() iterations_until_timeout = math.ceil(timeout / interval) bar_format = '{l_bar}{bar}| {n_fmt}/{total_fmt} [{postfix[0]}]' @@ -142,7 +149,7 @@ def _watch_batch(self, batch: Batch, timeout: int = 10800, interval: Union[int, raise HyP3Error(f'Timeout occurred while waiting for {batch}') @watch.register - def _watch_job(self, job: Job, timeout: int = 10800, interval: Union[int, float] = 60) -> Job: + def _watch_job(self, job: Job, timeout: int = 10800, interval: int | float = 60) -> Job: tqdm = hyp3_sdk.util.get_tqdm_progress_bar() iterations_until_timeout = math.ceil(timeout / interval) bar_format = '{n_fmt}/{total_fmt} [{postfix[0]}]' @@ -158,7 +165,7 @@ def _watch_job(self, job: Job, timeout: int = 10800, interval: Union[int, float] raise HyP3Error(f'Timeout occurred while waiting for {job}') @singledispatchmethod - def refresh(self, job_or_batch: Union[Batch, Job]) -> Union[Batch, Job]: + def refresh(self, job_or_batch: Batch | Job) -> Batch | Job: """Refresh each jobs' information Args: @@ -180,7 +187,7 @@ def _refresh_batch(self, batch: Batch): def _refresh_job(self, job: Job): return self.get_job_by_id(job.job_id) - def submit_prepared_jobs(self, prepared_jobs: Union[dict, List[dict]]) -> Batch: + def submit_prepared_jobs(self, prepared_jobs: dict | list[dict]) -> Batch: """Submit a prepared job dictionary, or list of prepared job dictionaries Args: @@ -202,7 +209,7 @@ def submit_prepared_jobs(self, prepared_jobs: Union[dict, List[dict]]) -> Batch: batch += Job.from_dict(job) return batch - def submit_autorift_job(self, granule1: str, granule2: str, name: Optional[str] = None) -> Batch: + def submit_autorift_job(self, granule1: str, granule2: str, name: str | None = None) -> Batch: """Submit an autoRIFT job Args: @@ -217,7 +224,7 @@ def submit_autorift_job(self, granule1: str, granule2: str, name: Optional[str] return self.submit_prepared_jobs(prepared_jobs=job_dict) @classmethod - def prepare_autorift_job(cls, granule1: str, granule2: str, name: Optional[str] = None) -> dict: + def prepare_autorift_job(cls, granule1: str, granule2: str, name: str | None = None) -> dict: """Submit an autoRIFT job Args: @@ -236,19 +243,21 @@ def prepare_autorift_job(cls, granule1: str, granule2: str, name: Optional[str] job_dict['name'] = name return job_dict - def submit_rtc_job(self, - granule: str, - name: Optional[str] = None, - dem_matching: bool = False, - include_dem: bool = False, - include_inc_map: bool = False, - include_rgb: bool = False, - include_scattering_area: bool = False, - radiometry: Literal['sigma0', 'gamma0'] = 'gamma0', - resolution: Literal[10, 20, 30] = 30, - scale: Literal['amplitude', 'decibel', 'power'] = 'power', - speckle_filter: bool = False, - dem_name: Literal['copernicus'] = 'copernicus') -> Batch: + def submit_rtc_job( + self, + granule: str, + name: str | None = None, + dem_matching: bool = False, + include_dem: bool = False, + include_inc_map: bool = False, + include_rgb: bool = False, + include_scattering_area: bool = False, + radiometry: Literal['sigma0', 'gamma0'] = 'gamma0', + resolution: Literal[10, 20, 30] = 30, + scale: Literal['amplitude', 'decibel', 'power'] = 'power', + speckle_filter: bool = False, + dem_name: Literal['copernicus'] = 'copernicus', + ) -> Batch: """Submit an RTC job Args: @@ -277,19 +286,21 @@ def submit_rtc_job(self, return self.submit_prepared_jobs(prepared_jobs=job_dict) @classmethod - def prepare_rtc_job(cls, - granule: str, - name: Optional[str] = None, - dem_matching: bool = False, - include_dem: bool = False, - include_inc_map: bool = False, - include_rgb: bool = False, - include_scattering_area: bool = False, - radiometry: Literal['sigma0', 'gamma0'] = 'gamma0', - resolution: Literal[10, 20, 30] = 30, - scale: Literal['amplitude', 'decibel', 'power'] = 'power', - speckle_filter: bool = False, - dem_name: Literal['copernicus'] = 'copernicus') -> dict: + def prepare_rtc_job( + cls, + granule: str, + name: str | None = None, + dem_matching: bool = False, + include_dem: bool = False, + include_inc_map: bool = False, + include_rgb: bool = False, + include_scattering_area: bool = False, + radiometry: Literal['sigma0', 'gamma0'] = 'gamma0', + resolution: Literal[10, 20, 30] = 30, + scale: Literal['amplitude', 'decibel', 'power'] = 'power', + speckle_filter: bool = False, + dem_name: Literal['copernicus'] = 'copernicus', + ) -> dict: """Submit an RTC job Args: @@ -325,19 +336,21 @@ def prepare_rtc_job(cls, job_dict['name'] = name return job_dict - def submit_insar_job(self, - granule1: str, - granule2: str, - name: Optional[str] = None, - include_look_vectors: bool = False, - include_los_displacement: bool = False, - include_inc_map: bool = False, - looks: Literal['20x4', '10x2'] = '20x4', - include_dem: bool = False, - include_wrapped_phase: bool = False, - apply_water_mask: bool = False, - include_displacement_maps: bool = False, - phase_filter_parameter: float = 0.6) -> Batch: + def submit_insar_job( + self, + granule1: str, + granule2: str, + name: str | None = None, + include_look_vectors: bool = False, + include_los_displacement: bool = False, + include_inc_map: bool = False, + looks: Literal['20x4', '10x2'] = '20x4', + include_dem: bool = False, + include_wrapped_phase: bool = False, + apply_water_mask: bool = False, + include_displacement_maps: bool = False, + phase_filter_parameter: float = 0.6, + ) -> Batch: """Submit an InSAR job Args: @@ -369,19 +382,21 @@ def submit_insar_job(self, return self.submit_prepared_jobs(prepared_jobs=job_dict) @classmethod - def prepare_insar_job(cls, - granule1: str, - granule2: str, - name: Optional[str] = None, - include_look_vectors: bool = False, - include_los_displacement: bool = False, - include_inc_map: bool = False, - looks: Literal['20x4', '10x2'] = '20x4', - include_dem: bool = False, - include_wrapped_phase: bool = False, - apply_water_mask: bool = False, - include_displacement_maps: bool = False, - phase_filter_parameter: float = 0.6) -> dict: + def prepare_insar_job( + cls, + granule1: str, + granule2: str, + name: str | None = None, + include_look_vectors: bool = False, + include_los_displacement: bool = False, + include_inc_map: bool = False, + looks: Literal['20x4', '10x2'] = '20x4', + include_dem: bool = False, + include_wrapped_phase: bool = False, + apply_water_mask: bool = False, + include_displacement_maps: bool = False, + phase_filter_parameter: float = 0.6, + ) -> dict: """Submit an InSAR job Args: @@ -408,8 +423,11 @@ def prepare_insar_job(cls, A dictionary containing the prepared InSAR job """ if include_los_displacement: - warnings.warn('The include_los_displacement parameter has been deprecated in favor of ' - 'include_displacement_maps, and will be removed in a future release.', FutureWarning) + warnings.warn( + 'The include_los_displacement parameter has been deprecated in favor of ' + 'include_displacement_maps, and will be removed in a future release.', + FutureWarning, + ) job_parameters = locals().copy() for key in ['cls', 'granule1', 'granule2', 'name']: @@ -423,12 +441,14 @@ def prepare_insar_job(cls, job_dict['name'] = name return job_dict - def submit_insar_isce_burst_job(self, - granule1: str, - granule2: str, - name: Optional[str] = None, - apply_water_mask: bool = False, - looks: Literal['20x4', '10x2', '5x1'] = '20x4') -> Batch: + def submit_insar_isce_burst_job( + self, + granule1: str, + granule2: str, + name: str | None = None, + apply_water_mask: bool = False, + looks: Literal['20x4', '10x2', '5x1'] = '20x4', + ) -> Batch: """Submit an InSAR ISCE burst job. Args: @@ -448,12 +468,14 @@ def submit_insar_isce_burst_job(self, return self.submit_prepared_jobs(prepared_jobs=job_dict) @classmethod - def prepare_insar_isce_burst_job(cls, - granule1: str, - granule2: str, - name: Optional[str] = None, - apply_water_mask: bool = False, - looks: Literal['20x4', '10x2', '5x1'] = '20x4') -> dict: + def prepare_insar_isce_burst_job( + cls, + granule1: str, + granule2: str, + name: str | None = None, + apply_water_mask: bool = False, + looks: Literal['20x4', '10x2', '5x1'] = '20x4', + ) -> dict: """Prepare an InSAR ISCE burst job. Args: @@ -480,36 +502,37 @@ def prepare_insar_isce_burst_job(cls, return job_dict def my_info(self) -> dict: - """ - Returns: - Your user information + """Returns: + Your user information """ response = self.session.get(urljoin(self.url, '/user')) _raise_for_hyp3_status(response) return response.json() - def check_credits(self) -> Union[float, int, None]: - """ - Returns: - Your remaining processing credits, or None if you have no processing limit + def check_credits(self) -> float | int | None: + """Returns: + Your remaining processing credits, or None if you have no processing limit """ info = self.my_info() return info['remaining_credits'] - def check_quota(self) -> Union[float, int, None]: + def check_quota(self) -> float | int | None: """Deprecated method for checking your remaining processing credits; replaced by `HyP3.check_credits` Returns: Your remaining processing credits, or None if you have no processing limit """ - warn('This method is deprecated and will be removed in a future release.\n' - 'Please use `HyP3.check_credits` instead.', DeprecationWarning, stacklevel=2) + warn( + 'This method is deprecated and will be removed in a future release.\n' + 'Please use `HyP3.check_credits` instead.', + DeprecationWarning, + stacklevel=2, + ) return self.check_credits() def costs(self) -> dict: - """ - Returns: - Table of job costs + """Returns: + Table of job costs """ response = self.session.get(urljoin(self.url, '/costs')) _raise_for_hyp3_status(response) diff --git a/src/hyp3_sdk/jobs.py b/src/hyp3_sdk/jobs.py index 3e11548..156768f 100644 --- a/src/hyp3_sdk/jobs.py +++ b/src/hyp3_sdk/jobs.py @@ -1,7 +1,7 @@ from collections import Counter from datetime import datetime from pathlib import Path -from typing import List, Optional, Union +from typing import Union from dateutil import tz from dateutil.parser import parse as parse_date @@ -17,22 +17,22 @@ class Job: _attributes_for_resubmit = {'name', 'job_parameters', 'job_type'} def __init__( - self, - job_type: str, - job_id: str, - request_time: datetime, - status_code: str, - user_id: str, - name: Optional[str] = None, - job_parameters: Optional[dict] = None, - files: Optional[List] = None, - logs: Optional[List] = None, - browse_images: Optional[List] = None, - thumbnail_images: Optional[List] = None, - expiration_time: Optional[datetime] = None, - processing_times: Optional[List[float]] = None, - credit_cost: Optional[float] = None, - priority: Optional[int] = None, + self, + job_type: str, + job_id: str, + request_time: datetime, + status_code: str, + user_id: str, + name: str | None = None, + job_parameters: dict | None = None, + files: list | None = None, + logs: list | None = None, + browse_images: list | None = None, + thumbnail_images: list | None = None, + expiration_time: datetime | None = None, + processing_times: list[float] | None = None, + credit_cost: float | None = None, + priority: int | None = None, ): self.job_id = job_id self.job_type = job_type @@ -115,9 +115,8 @@ def running(self) -> bool: def expired(self) -> bool: return self.expiration_time is not None and datetime.now(tz.UTC) >= self.expiration_time - def download_files(self, location: Union[Path, str] = '.', create: bool = True) -> List[Path]: - """ - Args: + def download_files(self, location: Path | str = '.', create: bool = True) -> list[Path]: + """Args: location: Directory location to put files into create: Create `location` if it does not point to an existing directory @@ -128,8 +127,10 @@ def download_files(self, location: Union[Path, str] = '.', create: bool = True) if not self.succeeded(): raise HyP3SDKError(f'Only succeeded jobs can be downloaded; job is {self.status_code}.') if self.expired(): - raise HyP3SDKError(f'Expired jobs cannot be downloaded; ' - f'job expired {self.expiration_time.isoformat(timespec="seconds")}.') + raise HyP3SDKError( + f'Expired jobs cannot be downloaded; ' + f'job expired {self.expiration_time.isoformat(timespec="seconds")}.' + ) if create: location.mkdir(parents=True, exist_ok=True) @@ -148,7 +149,7 @@ def download_files(self, location: Union[Path, str] = '.', create: bool = True) class Batch: - def __init__(self, jobs: Optional[List[Job]] = None): + def __init__(self, jobs: list[Job] | None = None): if jobs is None: jobs = [] self.jobs = jobs @@ -196,41 +197,38 @@ def __setitem__(self, index: int, job: Job): return self def __repr__(self): - reprs = ", ".join([job.__repr__() for job in self.jobs]) + reprs = ', '.join([job.__repr__() for job in self.jobs]) return f'Batch([{reprs}])' def __str__(self): count = self._count_statuses() - return f'{len(self)} HyP3 Jobs: ' \ - f'{count["SUCCEEDED"]} succeeded, ' \ - f'{count["FAILED"]} failed, ' \ - f'{count["RUNNING"]} running, ' \ - f'{count["PENDING"]} pending.' + return ( + f'{len(self)} HyP3 Jobs: ' + f'{count["SUCCEEDED"]} succeeded, ' + f'{count["FAILED"]} failed, ' + f'{count["RUNNING"]} running, ' + f'{count["PENDING"]} pending.' + ) def _count_statuses(self): return Counter([job.status_code for job in self.jobs]) def complete(self) -> bool: - """ - Returns: True if all jobs are complete, otherwise returns False - """ + """Returns: True if all jobs are complete, otherwise returns False""" for job in self.jobs: if not job.complete(): return False return True def succeeded(self) -> bool: - """ - Returns: True if all jobs have succeeded, otherwise returns False - """ + """Returns: True if all jobs have succeeded, otherwise returns False""" for job in self.jobs: if not job.succeeded(): return False return True - def download_files(self, location: Union[Path, str] = '.', create: bool = True) -> List[Path]: - """ - Args: + def download_files(self, location: Path | str = '.', create: bool = True) -> list[Path]: + """Args: location: Directory location to put files into create: Create `location` if it does not point to an existing directory diff --git a/src/hyp3_sdk/util.py b/src/hyp3_sdk/util.py index 2d43833..93b7d19 100644 --- a/src/hyp3_sdk/util.py +++ b/src/hyp3_sdk/util.py @@ -1,8 +1,9 @@ """Extra utilities for working with HyP3""" import urllib.parse +from collections.abc import Generator, Sequence from pathlib import Path -from typing import Any, Generator, Sequence, Union +from typing import Any from zipfile import ZipFile import requests @@ -11,13 +12,16 @@ from hyp3_sdk.exceptions import AuthenticationError -AUTH_URL = 'https://urs.earthdata.nasa.gov/oauth/authorize?response_type=code&client_id=BO_n7nTIlMljdvU6kRRB3g' \ - '&redirect_uri=https://auth.asf.alaska.edu/login&app_type=401' + +AUTH_URL = ( + 'https://urs.earthdata.nasa.gov/oauth/authorize?response_type=code&client_id=BO_n7nTIlMljdvU6kRRB3g' + '&redirect_uri=https://auth.asf.alaska.edu/login&app_type=401' +) PROFILE_URL = 'https://urs.earthdata.nasa.gov/profile' -def extract_zipped_product(zip_file: Union[str, Path], delete: bool = True) -> Path: +def extract_zipped_product(zip_file: str | Path, delete: bool = True) -> Path: """Extract a zipped HyP3 product Extract a zipped HyP3 product to the same directory as the zipped HyP3 product, optionally @@ -51,7 +55,7 @@ def chunk(itr: Sequence[Any], n: int = 200) -> Generator[Sequence[Any], None, No raise ValueError(f'n must be a positive integer: {n}') for i in range(0, len(itr), n): - yield itr[i:i + n] + yield itr[i : i + n] def get_tqdm_progress_bar(): @@ -105,7 +109,7 @@ def get_authenticated_session(username: str, password: str) -> requests.Session: return s -def download_file(url: str, filepath: Union[Path, str], chunk_size=None, retries=2, backoff_factor=1) -> Path: +def download_file(url: str, filepath: Path | str, chunk_size=None, retries=2, backoff_factor=1) -> Path: """Download a file Args: url: URL of the file to download @@ -130,8 +134,9 @@ def download_file(url: str, filepath: Union[Path, str], chunk_size=None, retries with session.get(url, stream=stream) as s: s.raise_for_status() tqdm = get_tqdm_progress_bar() - with tqdm.wrapattr(open(filepath, "wb"), 'write', miniters=1, desc=filepath.name, - total=int(s.headers.get('content-length', 0))) as f: + with tqdm.wrapattr( + open(filepath, 'wb'), 'write', miniters=1, desc=filepath.name, total=int(s.headers.get('content-length', 0)) + ) as f: for chunk in s.iter_content(chunk_size=chunk_size): if chunk: f.write(chunk) diff --git a/tests/conftest.py b/tests/conftest.py index 3e699d9..24240ce 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -19,24 +19,25 @@ def mock_get_authenticated_session(username, password): def default_hyp3(): with patch('hyp3_sdk.util.get_authenticated_session', mock_get_authenticated_session): return HyP3() + return default_hyp3 @pytest.fixture(autouse=True) def get_mock_job(): def default_job( - job_type='JOB_TYPE', - request_time=datetime.now(), - status_code='RUNNING', - user_id='user', - name='name', - job_parameters=None, - files=None, - browse_images=None, - thumbnail_images=None, - expiration_time=None, - credit_cost=None, - priority=None, + job_type='JOB_TYPE', + request_time=datetime.now(), + status_code='RUNNING', + user_id='user', + name='name', + job_parameters=None, + files=None, + browse_images=None, + thumbnail_images=None, + expiration_time=None, + credit_cost=None, + priority=None, ): if job_parameters is None: job_parameters = {'param1': 'value1'} @@ -62,6 +63,7 @@ def default_job( del job_dict[key] return Job.from_dict(job_dict) + return default_job diff --git a/tests/test_hyp3.py b/tests/test_hyp3.py index 25086f8..86688a0 100644 --- a/tests/test_hyp3.py +++ b/tests/test_hyp3.py @@ -28,10 +28,13 @@ def test_find_jobs(get_mock_hyp3, get_mock_job): 'jobs': [ get_mock_job(name='job1').to_dict(), get_mock_job(name='job2', request_time=datetime.now() - timedelta(minutes=15)).to_dict(), - get_mock_job(name='job3', status_code='SUCCEEDED', - files=[{'url': 'https://foo.com/file.zip', 'size': 1000, 'filename': 'file.zip'}], - browse_images=['https://foo.com/browse.png'], - thumbnail_images=['https://foo.com/thumbnail.png']).to_dict() + get_mock_job( + name='job3', + status_code='SUCCEEDED', + files=[{'url': 'https://foo.com/file.zip', 'size': 1000, 'filename': 'file.zip'}], + browse_images=['https://foo.com/browse.png'], + thumbnail_images=['https://foo.com/thumbnail.png'], + ).to_dict(), ] } @@ -56,16 +59,12 @@ def test_find_jobs_paging(get_mock_hyp3, get_mock_job): get_mock_job(name='job1').to_dict(), get_mock_job(name='job2').to_dict(), ], - 'next': urljoin(api.url, '/jobs?next=foobar') - } - api_response_mock_2 = { - 'jobs': [ - get_mock_job(name='job3').to_dict() - ] + 'next': urljoin(api.url, '/jobs?next=foobar'), } + api_response_mock_2 = {'jobs': [get_mock_job(name='job3').to_dict()]} responses.add(responses.GET, urljoin(api.url, '/jobs'), json=api_response_mock_1, match_querystring=True) - responses.add(responses.GET, urljoin(api.url, '/jobs?next=foobar'), json=api_response_mock_2) + responses.add(responses.GET, urljoin(api.url, '/jobs?next=foobar'), json=api_response_mock_2) batch = api.find_jobs() assert len(batch) == 3 @@ -77,11 +76,14 @@ def test_find_jobs_paging(get_mock_hyp3, get_mock_job): def test_find_jobs_user_id(get_mock_hyp3, get_mock_job): api = get_mock_hyp3() - responses.add(responses.GET, urljoin(api.url, '/jobs?user_id=foo'), - json={'jobs': []}, match_querystring=True) + responses.add(responses.GET, urljoin(api.url, '/jobs?user_id=foo'), json={'jobs': []}, match_querystring=True) - responses.add(responses.GET, urljoin(api.url, '/jobs?user_id=bar'), - json={'jobs': [get_mock_job(name='job1').to_dict()]}, match_querystring=True) + responses.add( + responses.GET, + urljoin(api.url, '/jobs?user_id=bar'), + json={'jobs': [get_mock_job(name='job1').to_dict()]}, + match_querystring=True, + ) batch = api.find_jobs(user_id='foo') assert len(batch) == 0 @@ -94,8 +96,12 @@ def test_find_jobs_user_id(get_mock_hyp3, get_mock_job): def test_find_jobs_start(get_mock_hyp3): api = get_mock_hyp3() - responses.add(responses.GET, urljoin(api.url, '/jobs?start=2021-01-01T00%3A00%3A00%2B00%3A00'), - json={'jobs': []}, match_querystring=True) + responses.add( + responses.GET, + urljoin(api.url, '/jobs?start=2021-01-01T00%3A00%3A00%2B00%3A00'), + json={'jobs': []}, + match_querystring=True, + ) batch = api.find_jobs(start=datetime(2021, 1, 1)) assert len(batch) == 0 @@ -108,8 +114,12 @@ def test_find_jobs_start(get_mock_hyp3): def test_find_jobs_end(get_mock_hyp3): api = get_mock_hyp3() - responses.add(responses.GET, urljoin(api.url, '/jobs?end=2021-01-02T00%3A00%3A00%2B00%3A00'), - json={'jobs': []}, match_querystring=True) + responses.add( + responses.GET, + urljoin(api.url, '/jobs?end=2021-01-02T00%3A00%3A00%2B00%3A00'), + json={'jobs': []}, + match_querystring=True, + ) batch = api.find_jobs(end=datetime(2021, 1, 2)) assert len(batch) == 0 @@ -122,13 +132,15 @@ def test_find_jobs_end(get_mock_hyp3): def test_find_jobs_status_code(get_mock_hyp3): api = get_mock_hyp3() - responses.add(responses.GET, urljoin(api.url, '/jobs?status_code=RUNNING'), - json={'jobs': []}, match_querystring=True) + responses.add( + responses.GET, urljoin(api.url, '/jobs?status_code=RUNNING'), json={'jobs': []}, match_querystring=True + ) batch = api.find_jobs(status_code='RUNNING') assert len(batch) == 0 - responses.add(responses.GET, urljoin(api.url, '/jobs?status_code=FAILED'), - json={'jobs': []}, match_querystring=True) + responses.add( + responses.GET, urljoin(api.url, '/jobs?status_code=FAILED'), json={'jobs': []}, match_querystring=True + ) batch = api.find_jobs(status_code='FAILED') assert len(batch) == 0 @@ -149,10 +161,8 @@ def test_watch(get_mock_hyp3, get_mock_job): complete_job.status_code = 'SUCCEEDED' api = get_mock_hyp3() for ii in range(3): - responses.add(responses.GET, urljoin(api.url, f'/jobs/{incomplete_job.job_id}'), - json=incomplete_job.to_dict()) - responses.add(responses.GET, urljoin(api.url, f'/jobs/{incomplete_job.job_id}'), - json=complete_job.to_dict()) + responses.add(responses.GET, urljoin(api.url, f'/jobs/{incomplete_job.job_id}'), json=incomplete_job.to_dict()) + responses.add(responses.GET, urljoin(api.url, f'/jobs/{incomplete_job.job_id}'), json=complete_job.to_dict()) response = api.watch(incomplete_job, interval=0.05) assert response == complete_job responses.assert_call_count(urljoin(api.url, f'/jobs/{incomplete_job.job_id}'), 4) @@ -186,8 +196,7 @@ def test_submit_prepared_jobs(get_mock_hyp3, get_mock_job): responses.add(responses.POST, urljoin(api.url, '/jobs'), json=api_response) - batch = api.submit_prepared_jobs( - [rtc_job.to_dict(for_resubmit=True), insar_job.to_dict(for_resubmit=True)]) + batch = api.submit_prepared_jobs([rtc_job.to_dict(for_resubmit=True), insar_job.to_dict(for_resubmit=True)]) assert batch.jobs == [rtc_job, insar_job] @@ -196,9 +205,9 @@ def test_prepare_autorift_job(): 'job_type': 'AUTORIFT', 'job_parameters': { 'granules': ['my_granule1', 'my_granule2'], - } + }, } - assert HyP3.prepare_autorift_job(granule1='my_granule1', granule2='my_granule2', name='my_name') == { + assert HyP3.prepare_autorift_job(granule1='my_granule1', granule2='my_granule2', name='my_name') == { 'job_type': 'AUTORIFT', 'name': 'my_name', 'job_parameters': { @@ -222,7 +231,7 @@ def test_prepare_rtc_job(): 'scale': 'power', 'speckle_filter': False, 'dem_name': 'copernicus', - } + }, } assert HyP3.prepare_rtc_job(granule='my_granule', name='my_name') == { 'job_type': 'RTC_GAMMA', @@ -257,12 +266,22 @@ def test_prepare_insar_job(): 'apply_water_mask': False, 'include_displacement_maps': False, 'phase_filter_parameter': 0.6, - } + }, } - assert HyP3.prepare_insar_job(granule1='my_granule1', granule2='my_granule2', name='my_name', looks='10x2', - include_los_displacement=True, include_look_vectors=True, include_inc_map=True, - include_dem=True, include_wrapped_phase=True, apply_water_mask=True, - include_displacement_maps=True, phase_filter_parameter=0.4) == { + assert HyP3.prepare_insar_job( + granule1='my_granule1', + granule2='my_granule2', + name='my_name', + looks='10x2', + include_los_displacement=True, + include_look_vectors=True, + include_inc_map=True, + include_dem=True, + include_wrapped_phase=True, + apply_water_mask=True, + include_displacement_maps=True, + phase_filter_parameter=0.4, + ) == { 'job_type': 'INSAR_GAMMA', 'name': 'my_name', 'job_parameters': { @@ -287,17 +306,18 @@ def test_prepare_insar_isce_burst_job(): 'granules': ['my_granule1', 'my_granule2'], 'apply_water_mask': False, 'looks': '20x4', - } + }, } - assert HyP3.prepare_insar_isce_burst_job(granule1='my_granule1', granule2='my_granule2', name='my_name', - apply_water_mask=True, looks='10x2') == { + assert HyP3.prepare_insar_isce_burst_job( + granule1='my_granule1', granule2='my_granule2', name='my_name', apply_water_mask=True, looks='10x2' + ) == { 'job_type': 'INSAR_ISCE_BURST', 'name': 'my_name', 'job_parameters': { 'granules': ['my_granule1', 'my_granule2'], 'apply_water_mask': True, 'looks': '10x2', - } + }, } @@ -316,11 +336,7 @@ def test_deprecated_warning(): @responses.activate def test_submit_autorift_job(get_mock_hyp3, get_mock_job): job = get_mock_job('AUTORIFT', job_parameters={'granules': ['g1', 'g2']}) - api_response = { - 'jobs': [ - job.to_dict() - ] - } + api_response = {'jobs': [job.to_dict()]} api = get_mock_hyp3() responses.add(responses.POST, urljoin(api.url, '/jobs'), json=api_response) batch = api.submit_autorift_job('g1', 'g2') @@ -330,11 +346,7 @@ def test_submit_autorift_job(get_mock_hyp3, get_mock_job): @responses.activate def test_submit_rtc_job(get_mock_hyp3, get_mock_job): job = get_mock_job('RTC_GAMMA', job_parameters={'granules': ['g1']}) - api_response = { - 'jobs': [ - job.to_dict() - ] - } + api_response = {'jobs': [job.to_dict()]} api = get_mock_hyp3() responses.add(responses.POST, urljoin(api.url, '/jobs'), json=api_response) batch = api.submit_rtc_job('g1') @@ -344,11 +356,7 @@ def test_submit_rtc_job(get_mock_hyp3, get_mock_job): @responses.activate def test_submit_insar_job(get_mock_hyp3, get_mock_job): job = get_mock_job('INSAR_GAMMA', job_parameters={'granules': ['g1', 'g2']}) - api_response = { - 'jobs': [ - job.to_dict() - ] - } + api_response = {'jobs': [job.to_dict()]} api = get_mock_hyp3() responses.add(responses.POST, urljoin(api.url, '/jobs'), json=api_response) batch = api.submit_insar_job('g1', 'g2') @@ -358,11 +366,7 @@ def test_submit_insar_job(get_mock_hyp3, get_mock_job): @responses.activate def test_submit_insar_isce_burst_job(get_mock_hyp3, get_mock_job): job = get_mock_job('INSAR_ISCE_BURST', job_parameters={'granules': ['g1', 'g2']}) - api_response = { - 'jobs': [ - job.to_dict() - ] - } + api_response = {'jobs': [job.to_dict()]} api = get_mock_hyp3() responses.add(responses.POST, urljoin(api.url, '/jobs'), json=api_response) batch = api.submit_insar_isce_burst_job('g1', 'g2') @@ -372,11 +376,7 @@ def test_submit_insar_isce_burst_job(get_mock_hyp3, get_mock_job): @responses.activate def test_resubmit_previous_job(get_mock_hyp3, get_mock_job): job = get_mock_job() - api_response = { - 'jobs': [ - job.to_dict() - ] - } + api_response = {'jobs': [job.to_dict()]} api = get_mock_hyp3() responses.add(responses.POST, urljoin(api.url, '/jobs'), json=api_response) batch = api.submit_prepared_jobs(job.to_dict(for_resubmit=True)) @@ -385,14 +385,7 @@ def test_resubmit_previous_job(get_mock_hyp3, get_mock_job): @responses.activate def test_my_info(get_mock_hyp3): - api_response = { - 'job_names': [ - 'name1', - 'name2' - ], - 'remaining_credits': 25., - 'user_id': 'someUser' - } + api_response = {'job_names': ['name1', 'name2'], 'remaining_credits': 25.0, 'user_id': 'someUser'} api = get_mock_hyp3() responses.add(responses.GET, urljoin(api.url, '/user'), json=api_response) response = api.my_info() @@ -401,18 +394,11 @@ def test_my_info(get_mock_hyp3): @responses.activate def test_check_credits(get_mock_hyp3): - api_response = { - 'job_names': [ - 'name1', - 'name2' - ], - 'remaining_credits': 25., - 'user_id': 'someUser' - } + api_response = {'job_names': ['name1', 'name2'], 'remaining_credits': 25.0, 'user_id': 'someUser'} api = get_mock_hyp3() responses.add(responses.GET, urljoin(api.url, '/user'), json=api_response) - assert math.isclose(api.check_credits(), 25.) + assert math.isclose(api.check_credits(), 25.0) @responses.activate diff --git a/tests/test_jobs.py b/tests/test_jobs.py index e679e79..c1a8784 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -8,42 +8,45 @@ from hyp3_sdk.exceptions import HyP3SDKError from hyp3_sdk.jobs import Batch, Job + SUCCEEDED_JOB = { - "browse_images": ["https://PAIR_PROCESS.png"], - "expiration_time": "2020-10-08T00:00:00+00:00", - "files": [{"filename": "PAIR_PROCESS.nc", "size": 5949932, "url": "https://PAIR_PROCESS.nc"}], - "logs": ["https://d1c05104-b455-4f35-a95a-84155d63f855.log"], - "job_id": "d1c05104-b455-4f35-a95a-84155d63f855", - "job_parameters": {"granules": [ - "S1A_IW_SLC__1SDH_20180511T204719_20180511T204746_021862_025C12_6F77", - "S1B_IW_SLC__1SDH_20180505T204637_20180505T204704_010791_013B91_E42D" - ]}, - "job_type": "PAIR_PROCESS", - "name": "test_success", - "request_time": "2020-09-22T23:55:10+00:00", - "status_code": "SUCCEEDED", - "thumbnail_images": ["https://PAIR_PROCESS_thumb.png"], - "user_id": "asf_hyp3", - "credit_cost": 1, - "priority": 9999, + 'browse_images': ['https://PAIR_PROCESS.png'], + 'expiration_time': '2020-10-08T00:00:00+00:00', + 'files': [{'filename': 'PAIR_PROCESS.nc', 'size': 5949932, 'url': 'https://PAIR_PROCESS.nc'}], + 'logs': ['https://d1c05104-b455-4f35-a95a-84155d63f855.log'], + 'job_id': 'd1c05104-b455-4f35-a95a-84155d63f855', + 'job_parameters': { + 'granules': [ + 'S1A_IW_SLC__1SDH_20180511T204719_20180511T204746_021862_025C12_6F77', + 'S1B_IW_SLC__1SDH_20180505T204637_20180505T204704_010791_013B91_E42D', + ] + }, + 'job_type': 'PAIR_PROCESS', + 'name': 'test_success', + 'request_time': '2020-09-22T23:55:10+00:00', + 'status_code': 'SUCCEEDED', + 'thumbnail_images': ['https://PAIR_PROCESS_thumb.png'], + 'user_id': 'asf_hyp3', + 'credit_cost': 1, + 'priority': 9999, } FAILED_JOB = { - "logs": ["https://281b2087-9e7d-4d17-a9b3-aebeb2ad23c6.log"], - "job_id": "281b2087-9e7d-4d17-a9b3-aebeb2ad23c6", - "job_parameters": { - "granules": [ - "S1A_IW_SLC__1SSH_20161126T080144_20161126T080211_014110_016C51_037E", - "S1B_IW_SLC__1SSH_20161120T080102_20161120T080129_003039_0052AE_AA91" + 'logs': ['https://281b2087-9e7d-4d17-a9b3-aebeb2ad23c6.log'], + 'job_id': '281b2087-9e7d-4d17-a9b3-aebeb2ad23c6', + 'job_parameters': { + 'granules': [ + 'S1A_IW_SLC__1SSH_20161126T080144_20161126T080211_014110_016C51_037E', + 'S1B_IW_SLC__1SSH_20161120T080102_20161120T080129_003039_0052AE_AA91', ] }, - "job_type": "PAIR_PROCESS", - "name": "test_failure", - "request_time": "2020-09-22T23:55:10+00:00", - "status_code": "FAILED", - "user_id": "asf_hyp3", - "credit_cost": 1, - "priority": 9999, + 'job_type': 'PAIR_PROCESS', + 'name': 'test_failure', + 'request_time': '2020-09-22T23:55:10+00:00', + 'status_code': 'FAILED', + 'user_id': 'asf_hyp3', + 'credit_cost': 1, + 'priority': 9999, } @@ -125,8 +128,11 @@ def test_job_expired(): @responses.activate def test_job_download_files(tmp_path, get_mock_job): unexpired_time = (datetime.now(tz=tz.UTC) + timedelta(days=7)).isoformat(timespec='seconds') - job = get_mock_job(status_code='SUCCEEDED', expiration_time=unexpired_time, - files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}]) + job = get_mock_job( + status_code='SUCCEEDED', + expiration_time=unexpired_time, + files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}], + ) responses.add(responses.GET, 'https://foo.com/file', body='foobar') path = job.download_files(tmp_path)[0] @@ -134,8 +140,11 @@ def test_job_download_files(tmp_path, get_mock_job): assert path == tmp_path / 'file' assert contents == 'foobar' - job = get_mock_job(status_code='SUCCEEDED', expiration_time=unexpired_time, - files=[{'url': 'https://foo.com/f1', 'size': 0, 'filename': 'f1'}]) + job = get_mock_job( + status_code='SUCCEEDED', + expiration_time=unexpired_time, + files=[{'url': 'https://foo.com/f1', 'size': 0, 'filename': 'f1'}], + ) responses.add(responses.GET, 'https://foo.com/f1', body='foobar1') path = job.download_files(str(tmp_path))[0] @@ -147,8 +156,11 @@ def test_job_download_files(tmp_path, get_mock_job): @responses.activate def test_job_download_files_create_dirs(tmp_path, get_mock_job): unexpired_time = (datetime.now(tz=tz.UTC) + timedelta(days=7)).isoformat(timespec='seconds') - job = get_mock_job(status_code='SUCCEEDED', expiration_time=unexpired_time, - files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}]) + job = get_mock_job( + status_code='SUCCEEDED', + expiration_time=unexpired_time, + files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}], + ) with pytest.raises(NotADirectoryError): job.download_files(tmp_path / 'not_a_dir', create=False) @@ -163,8 +175,11 @@ def test_job_download_files_create_dirs(tmp_path, get_mock_job): @responses.activate def test_job_download_files_expired(tmp_path, get_mock_job): expired_time = (datetime.now(tz=tz.UTC) - timedelta(days=7)).isoformat(timespec='seconds') - job = get_mock_job(status_code='SUCCEEDED', expiration_time=expired_time, - files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}]) + job = get_mock_job( + status_code='SUCCEEDED', + expiration_time=expired_time, + files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}], + ) with pytest.raises(HyP3SDKError): job.download_files(tmp_path) @@ -228,8 +243,11 @@ def test_contains(get_mock_job): unexpired_time = (datetime.now(tz=tz.UTC) + timedelta(days=7)).isoformat(timespec='seconds') j1 = Job.from_dict(SUCCEEDED_JOB) j2 = Job.from_dict(FAILED_JOB) - j3 = get_mock_job(status_code='SUCCEEDED', expiration_time=unexpired_time, - files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}]) + j3 = get_mock_job( + status_code='SUCCEEDED', + expiration_time=unexpired_time, + files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}], + ) a = Batch([j1, j2]) @@ -262,8 +280,11 @@ def test_getitem(get_mock_job): unexpired_time = (datetime.now(tz=tz.UTC) + timedelta(days=7)).isoformat(timespec='seconds') j0 = Job.from_dict(SUCCEEDED_JOB) j1 = Job.from_dict(FAILED_JOB) - j2 = get_mock_job(status_code='SUCCEEDED', expiration_time=unexpired_time, - files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}]) + j2 = get_mock_job( + status_code='SUCCEEDED', + expiration_time=unexpired_time, + files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}], + ) batch = Batch([j0, j1, j2]) assert j0 == batch[0] @@ -277,8 +298,11 @@ def test_setitem(get_mock_job): unexpired_time = (datetime.now(tz=tz.UTC) + timedelta(days=7)).isoformat(timespec='seconds') j0 = Job.from_dict(SUCCEEDED_JOB) j1 = Job.from_dict(FAILED_JOB) - j2 = get_mock_job(status_code='SUCCEEDED', expiration_time=unexpired_time, - files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}]) + j2 = get_mock_job( + status_code='SUCCEEDED', + expiration_time=unexpired_time, + files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}], + ) batch = Batch([j0, j1]) assert batch[1] == j1 @@ -290,8 +314,11 @@ def test_reverse(get_mock_job): unexpired_time = (datetime.now(tz=tz.UTC) + timedelta(days=7)).isoformat(timespec='seconds') j0 = Job.from_dict(SUCCEEDED_JOB) j1 = Job.from_dict(FAILED_JOB) - j2 = get_mock_job(status_code='SUCCEEDED', expiration_time=unexpired_time, - files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}]) + j2 = get_mock_job( + status_code='SUCCEEDED', + expiration_time=unexpired_time, + files=[{'url': 'https://foo.com/file', 'size': 0, 'filename': 'file'}], + ) batch = Batch([j0, j1, j2]) @@ -320,14 +347,25 @@ def test_batch_complete_succeeded(): @responses.activate def test_batch_download(tmp_path, get_mock_job): expiration_time = (datetime.now(tz=tz.UTC) + timedelta(days=7)).isoformat(timespec='seconds') - batch = Batch([ - get_mock_job(status_code='SUCCEEDED', expiration_time=expiration_time, - files=[{'url': 'https://foo.com/file1', 'size': 0, 'filename': 'file1'}]), - get_mock_job(status_code='SUCCEEDED', expiration_time=expiration_time, - files=[{'url': 'https://foo.com/file2', 'size': 0, 'filename': 'file2'}]), - get_mock_job(status_code='SUCCEEDED', expiration_time=expiration_time, - files=[{'url': 'https://foo.com/file3', 'size': 0, 'filename': 'file3'}]) - ]) + batch = Batch( + [ + get_mock_job( + status_code='SUCCEEDED', + expiration_time=expiration_time, + files=[{'url': 'https://foo.com/file1', 'size': 0, 'filename': 'file1'}], + ), + get_mock_job( + status_code='SUCCEEDED', + expiration_time=expiration_time, + files=[{'url': 'https://foo.com/file2', 'size': 0, 'filename': 'file2'}], + ), + get_mock_job( + status_code='SUCCEEDED', + expiration_time=expiration_time, + files=[{'url': 'https://foo.com/file3', 'size': 0, 'filename': 'file3'}], + ), + ] + ) responses.add(responses.GET, 'https://foo.com/file1', body='foobar1') responses.add(responses.GET, 'https://foo.com/file2', body='foobar2') responses.add(responses.GET, 'https://foo.com/file3', body='foobar3') @@ -344,9 +382,11 @@ def test_batch_download(tmp_path, get_mock_job): paths = batch.download_files(tmp_path / 'not_a_dir', create=True) contents = [path.read_text() for path in paths] assert len(paths) == 3 - assert set(paths) == {tmp_path / 'not_a_dir' / 'file1', - tmp_path / 'not_a_dir' / 'file2', - tmp_path / 'not_a_dir' / 'file3'} + assert set(paths) == { + tmp_path / 'not_a_dir' / 'file1', + tmp_path / 'not_a_dir' / 'file2', + tmp_path / 'not_a_dir' / 'file3', + } assert set(contents) == {'foobar1', 'foobar2', 'foobar3'} @@ -354,14 +394,25 @@ def test_batch_download(tmp_path, get_mock_job): def test_batch_download_expired(tmp_path, get_mock_job): expired_time = (datetime.now(tz=tz.UTC) - timedelta(days=7)).isoformat(timespec='seconds') unexpired_time = (datetime.now(tz=tz.UTC) + timedelta(days=7)).isoformat(timespec='seconds') - batch = Batch([ - get_mock_job(status_code='SUCCEEDED', expiration_time=unexpired_time, - files=[{'url': 'https://foo.com/file1', 'size': 0, 'filename': 'file1'}]), - get_mock_job(status_code='SUCCEEDED', expiration_time=expired_time, - files=[{'url': 'https://foo.com/file2', 'size': 0, 'filename': 'file2'}]), - get_mock_job(status_code='SUCCEEDED', expiration_time=unexpired_time, - files=[{'url': 'https://foo.com/file3', 'size': 0, 'filename': 'file3'}]) - ]) + batch = Batch( + [ + get_mock_job( + status_code='SUCCEEDED', + expiration_time=unexpired_time, + files=[{'url': 'https://foo.com/file1', 'size': 0, 'filename': 'file1'}], + ), + get_mock_job( + status_code='SUCCEEDED', + expiration_time=expired_time, + files=[{'url': 'https://foo.com/file2', 'size': 0, 'filename': 'file2'}], + ), + get_mock_job( + status_code='SUCCEEDED', + expiration_time=unexpired_time, + files=[{'url': 'https://foo.com/file3', 'size': 0, 'filename': 'file3'}], + ), + ] + ) responses.add(responses.GET, 'https://foo.com/file1', body='foobar1') responses.add(responses.GET, 'https://foo.com/file2', body='foobar2') responses.add(responses.GET, 'https://foo.com/file3', body='foobar3') @@ -448,60 +499,78 @@ def test_batch_total_credit_cost(): batch = Batch() assert batch.total_credit_cost() == 0 - batch = Batch([ - Job.from_dict({ - 'job_type': 'foo', - 'job_id': 'foo', - 'request_time': '2024-01-01T00:00:00Z', - 'status_code': 'foo', - 'user_id': 'foo', - }), - ]) + batch = Batch( + [ + Job.from_dict( + { + 'job_type': 'foo', + 'job_id': 'foo', + 'request_time': '2024-01-01T00:00:00Z', + 'status_code': 'foo', + 'user_id': 'foo', + } + ), + ] + ) assert batch.total_credit_cost() == 0 - batch = Batch([ - Job.from_dict({ - 'job_type': 'foo', - 'job_id': 'foo', - 'request_time': '2024-01-01T00:00:00Z', - 'status_code': 'foo', - 'user_id': 'foo', - 'credit_cost': 4 - }), - Job.from_dict({ - 'job_type': 'foo', - 'job_id': 'foo', - 'request_time': '2024-01-01T00:00:00Z', - 'status_code': 'foo', - 'user_id': 'foo', - }), - ]) + batch = Batch( + [ + Job.from_dict( + { + 'job_type': 'foo', + 'job_id': 'foo', + 'request_time': '2024-01-01T00:00:00Z', + 'status_code': 'foo', + 'user_id': 'foo', + 'credit_cost': 4, + } + ), + Job.from_dict( + { + 'job_type': 'foo', + 'job_id': 'foo', + 'request_time': '2024-01-01T00:00:00Z', + 'status_code': 'foo', + 'user_id': 'foo', + } + ), + ] + ) assert batch.total_credit_cost() == 4 - batch = Batch([ - Job.from_dict({ - 'job_type': 'foo', - 'job_id': 'foo', - 'request_time': '2024-01-01T00:00:00Z', - 'status_code': 'foo', - 'user_id': 'foo', - 'credit_cost': 1 - }), - Job.from_dict({ - 'job_type': 'foo', - 'job_id': 'foo', - 'request_time': '2024-01-01T00:00:00Z', - 'status_code': 'foo', - 'user_id': 'foo', - 'credit_cost': 2 - }), - Job.from_dict({ - 'job_type': 'foo', - 'job_id': 'foo', - 'request_time': '2024-01-01T00:00:00Z', - 'status_code': 'foo', - 'user_id': 'foo', - 'credit_cost': 5 - }), - ]) + batch = Batch( + [ + Job.from_dict( + { + 'job_type': 'foo', + 'job_id': 'foo', + 'request_time': '2024-01-01T00:00:00Z', + 'status_code': 'foo', + 'user_id': 'foo', + 'credit_cost': 1, + } + ), + Job.from_dict( + { + 'job_type': 'foo', + 'job_id': 'foo', + 'request_time': '2024-01-01T00:00:00Z', + 'status_code': 'foo', + 'user_id': 'foo', + 'credit_cost': 2, + } + ), + Job.from_dict( + { + 'job_type': 'foo', + 'job_id': 'foo', + 'request_time': '2024-01-01T00:00:00Z', + 'status_code': 'foo', + 'user_id': 'foo', + 'credit_cost': 5, + } + ), + ] + ) assert batch.total_credit_cost() == 8 diff --git a/tests/test_util.py b/tests/test_util.py index faf9de8..3d8af42 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -61,15 +61,11 @@ def test_get_authenticated_session_study_area(): def test_get_authenticated_session_http_error(): responses.add(responses.GET, util.AUTH_URL, status=401) - with pytest.raises( - AuthenticationError, - match=r'^Was not able to authenticate with credentials provided.*' - ): + with pytest.raises(AuthenticationError, match=r'^Was not able to authenticate with credentials provided.*'): util.get_authenticated_session('user', 'pass') with pytest.raises( - AuthenticationError, - match=r'^Was not able to authenticate with \.netrc file and no credentials provided.*' + AuthenticationError, match=r'^Was not able to authenticate with \.netrc file and no credentials provided.*' ): util.get_authenticated_session(None, None)