diff --git a/.gitignore b/.gitignore index a3ba929..3222b10 100644 --- a/.gitignore +++ b/.gitignore @@ -103,7 +103,14 @@ ENV/ .ruff_cache/ # IDE settings -.vscode/ +.vscode/* +!.vscode/recommended_settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/boilerplate-words.txt +!.vscode/project-related-words.txt +!.vscode/cspell.json .idea/ # Test files diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5264814..00b0680 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -45,7 +45,7 @@ repos: stages: [pre-push] - id: tests name: run tests - entry: inv pytest.run --params="--cov=." + entry: inv pytest.run --params="--numprocesses auto --create-db --cov=." language: system pass_filenames: false types: [python] diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..fd54d0b --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,57 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Python: Django", + "type": "debugpy", + "request": "launch", + "program": "${workspaceFolder}/test_project/manage.py", + "preLaunchTask": "Launch containers and wait for DB", + "args": [ + "runserver_plus", + "localhost:8000", + ], + "django": true, + "justMyCode": false + }, + { + "name": "Python: Django With SQL Logs", + "type": "debugpy", + "request": "launch", + "program": "${workspaceFolder}/test_project/manage.py", + "preLaunchTask": "Launch containers and wait for DB", + "args": [ + "runserver_plus", + "localhost:8000", + "--print-sql" + ], + "django": true, + "justMyCode": false + }, + { + "name": "Python: Celery", + "type": "debugpy", + "request": "launch", + "module": "celery", + "preLaunchTask": "Launch containers and wait for DB", + "args": [ + "--app", + "test_project.celery_app.app", + "worker", + "--beat", + "--scheduler=django", + "--loglevel=info", + ], + "justMyCode": false + }, + { + "name": "Python: Debug Tests", + "type": "debugpy", + "request": "launch", + "program": "${file}", + "purpose": ["debug-test"], + "console": "integratedTerminal", + "justMyCode": false + }, + ] +} diff --git a/.vscode/recommended_settings.json b/.vscode/recommended_settings.json new file mode 100644 index 0000000..1eb96f1 --- /dev/null +++ b/.vscode/recommended_settings.json @@ -0,0 +1,26 @@ +{ + "files.exclude": { + "**/__pycache__": true, + "**/.pytest_cache": true, + "**/.mypy_cache": true, + "**/.ruff_cache": true, + "**/htmlcov": true, + }, + + "editor.rulers": [79], + + "editor.bracketPairColorization.enabled": true, + + "python.analysis.typeCheckingMode": "off", + + "python.analysis.inlayHints.functionReturnTypes": true, + "mypy.enabled": false, + + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + + "[python]": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "charliermarsh.ruff" + } +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..0f63964 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,15 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Launch containers and wait for DB", + "type": "shell", + "command": "inv django.wait-for-database", + "problemMatcher": [], + "group": { + "kind": "build", + "isDefault": false + } + } + ] +} diff --git a/HISTORY.rst b/HISTORY.rst index 4ff9759..b1292eb 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -2,6 +2,12 @@ History ======= +UNRELEASED +------------------ + +* Add base import/export views that only allow users to work with their own jobs (`ImportJobForUserViewSet` and `ExportJobForUserViewSet`). +* Small actions definition refactor in `ExportJobViewSet/ExportJobViewSet` to allow easier overriding. + 1.2.0 (2024-12-26) ------------------ * Fix issue with slow export duration (https://github.com/saritasa-nest/django-import-export-extensions/issues/79): diff --git a/import_export_extensions/api/__init__.py b/import_export_extensions/api/__init__.py index e643144..392cb62 100644 --- a/import_export_extensions/api/__init__.py +++ b/import_export_extensions/api/__init__.py @@ -1,3 +1,15 @@ -from .serializers.export_job import CreateExportJob, ExportJobSerializer -from .serializers.import_job import CreateImportJob, ImportJobSerializer -from .serializers.progress import ProgressInfoSerializer, ProgressSerializer +from .mixins import LimitQuerySetToCurrentUserMixin +from .serializers import ( + CreateExportJob, + CreateImportJob, + ExportJobSerializer, + ImportJobSerializer, + ProgressInfoSerializer, + ProgressSerializer, +) +from .views import ( + ExportJobForUserViewSet, + ExportJobViewSet, + ImportJobForUserViewSet, + ImportJobViewSet, +) diff --git a/import_export_extensions/api/mixins.py b/import_export_extensions/api/mixins.py new file mode 100644 index 0000000..dc4bf9a --- /dev/null +++ b/import_export_extensions/api/mixins.py @@ -0,0 +1,10 @@ +class LimitQuerySetToCurrentUserMixin: + """Make queryset to return only current user jobs.""" + + def get_queryset(self): + """Return user's jobs.""" + return ( + super() + .get_queryset() + .filter(created_by_id=getattr(self.request.user, "pk", None)) + ) diff --git a/import_export_extensions/api/serializers/__init__.py b/import_export_extensions/api/serializers/__init__.py index b661b86..d8b2d45 100644 --- a/import_export_extensions/api/serializers/__init__.py +++ b/import_export_extensions/api/serializers/__init__.py @@ -1,2 +1,11 @@ -from .export_job import ExportJobSerializer, get_create_export_job_serializer -from .import_job import ImportJobSerializer, get_create_import_job_serializer +from .export_job import ( + CreateExportJob, + ExportJobSerializer, + get_create_export_job_serializer, +) +from .import_job import ( + CreateImportJob, + ImportJobSerializer, + get_create_import_job_serializer, +) +from .progress import ProgressInfoSerializer, ProgressSerializer diff --git a/import_export_extensions/api/views/__init__.py b/import_export_extensions/api/views/__init__.py index c642548..2409e38 100644 --- a/import_export_extensions/api/views/__init__.py +++ b/import_export_extensions/api/views/__init__.py @@ -1,2 +1,8 @@ -from .export_job import ExportJobViewSet -from .import_job import ImportJobViewSet +from .export_job import ( + ExportJobForUserViewSet, + ExportJobViewSet, +) +from .import_job import ( + ImportJobForUserViewSet, + ImportJobViewSet, +) diff --git a/import_export_extensions/api/views/export_job.py b/import_export_extensions/api/views/export_job.py index c4c43ac..d8b74bf 100644 --- a/import_export_extensions/api/views/export_job.py +++ b/import_export_extensions/api/views/export_job.py @@ -15,6 +15,7 @@ import django_filters from ... import models, resources +from .. import mixins as core_mixins from .. import serializers @@ -37,27 +38,11 @@ def __new__(cls, name, bases, attrs, **kwargs): attrs, **kwargs, ) - # Skip if it is a base viewset, since none of needed class attrs are - # specified - if name == "ExportJobViewSet": + # Skip if it is has no resource_class specified + if not hasattr(viewset, "resource_class"): return viewset - def start(self: "ExportJobViewSet", request: Request): - """Validate request data and start ExportJob.""" - serializer = self.get_serializer( - data=request.data, - filter_kwargs=request.query_params, - ) - serializer.is_valid(raise_exception=True) - export_job = serializer.save() - return response.Response( - data=self.get_detail_serializer_class()( - instance=export_job, - ).data, - status=status.HTTP_201_CREATED, - ) - - viewset.start = decorators.action( + decorators.action( methods=["POST"], detail=False, queryset=viewset.resource_class.get_model_queryset(), @@ -67,7 +52,11 @@ def start(self: "ExportJobViewSet", request: Request): filter_backends=[ django_filters.rest_framework.DjangoFilterBackend, ], - )(start) + )(viewset.start) + decorators.action( + methods=["POST"], + detail=True, + )(viewset.cancel) # Correct specs of drf-spectacular if it is installed with contextlib.suppress(ImportError): from drf_spectacular.utils import extend_schema, extend_schema_view @@ -110,8 +99,8 @@ class ExportJobViewSet( permission_classes = (permissions.IsAuthenticated,) queryset = models.ExportJob.objects.all() serializer_class = serializers.ExportJobSerializer - resource_class: type[resources.CeleryModelResource] | None = None - filterset_class: django_filters.rest_framework.FilterSet = None + resource_class: type[resources.CeleryModelResource] + filterset_class: django_filters.rest_framework.FilterSet | None = None search_fields = ("id",) ordering = ( "id", @@ -154,7 +143,21 @@ def get_export_create_serializer_class(self): self.resource_class, ) - @decorators.action(methods=["POST"], detail=True) + def start(self, request: Request): + """Validate request data and start ExportJob.""" + serializer = self.get_serializer( + data=request.data, + filter_kwargs=request.query_params, + ) + serializer.is_valid(raise_exception=True) + export_job = serializer.save() + return response.Response( + data=self.get_detail_serializer_class()( + instance=export_job, + ).data, + status=status.HTTP_201_CREATED, + ) + def cancel(self, *args, **kwargs): """Cancel export job that is in progress.""" job: models.ExportJob = self.get_object() @@ -169,3 +172,10 @@ def cancel(self, *args, **kwargs): status=status.HTTP_200_OK, data=serializer.data, ) + + +class ExportJobForUserViewSet( + core_mixins.LimitQuerySetToCurrentUserMixin, + ExportJobViewSet, +): + """Viewset for providing export feature to users.""" diff --git a/import_export_extensions/api/views/import_job.py b/import_export_extensions/api/views/import_job.py index cfe41ef..be7bcfb 100644 --- a/import_export_extensions/api/views/import_job.py +++ b/import_export_extensions/api/views/import_job.py @@ -12,6 +12,7 @@ ) from ... import models, resources +from .. import mixins as core_mixins from .. import serializers @@ -32,11 +33,23 @@ def __new__(cls, name, bases, attrs, **kwargs): attrs, **kwargs, ) - # Skip if it is a base viewset, since none of needed class attrs are - # specified - if name == "ImportJobViewSet": + # Skip if it is has no resource_class specified + if not hasattr(viewset, "resource_class"): return viewset + decorators.action( + methods=["POST"], + detail=False, + )(viewset.start) + decorators.action( + methods=["POST"], + detail=True, + )(viewset.confirm) + decorators.action( + methods=["POST"], + detail=True, + )(viewset.cancel) + # Correct specs of drf-spectacular if it is installed with contextlib.suppress(ImportError): from drf_spectacular.utils import extend_schema, extend_schema_view @@ -89,7 +102,7 @@ class ImportJobViewSet( permission_classes = (permissions.IsAuthenticated,) queryset = models.ImportJob.objects.all() serializer_class = serializers.ImportJobSerializer - resource_class: type[resources.CeleryModelResource] | None = None + resource_class: type[resources.CeleryModelResource] search_fields = ("id",) ordering = ( "id", @@ -132,7 +145,6 @@ def get_import_create_serializer_class(self): self.resource_class, ) - @decorators.action(methods=["POST"], detail=False) def start(self, request, *args, **kwargs): """Validate request data and start ImportJob.""" serializer = self.get_serializer(data=request.data) @@ -147,7 +159,6 @@ def start(self, request, *args, **kwargs): status=status.HTTP_201_CREATED, ) - @decorators.action(methods=["POST"], detail=True) def confirm(self, *args, **kwargs): """Confirm import job that has `parsed` status.""" job: models.ImportJob = self.get_object() @@ -163,7 +174,6 @@ def confirm(self, *args, **kwargs): data=serializer.data, ) - @decorators.action(methods=["POST"], detail=True) def cancel(self, *args, **kwargs): """Cancel import job that is in progress.""" job: models.ImportJob = self.get_object() @@ -178,3 +188,9 @@ def cancel(self, *args, **kwargs): status=status.HTTP_200_OK, data=serializer.data, ) + +class ImportJobForUserViewSet( + core_mixins.LimitQuerySetToCurrentUserMixin, + ImportJobViewSet, +): + """Viewset for providing import feature to users.""" diff --git a/invocations/project.py b/invocations/project.py index 1a6d7c8..41d7656 100644 --- a/invocations/project.py +++ b/invocations/project.py @@ -10,6 +10,7 @@ def init(context: invoke.Context, clean: bool = False): """Prepare env for working with project.""" saritasa_invocations.print_success("Setting up git config") saritasa_invocations.git.setup(context) + saritasa_invocations.system.copy_vscode_settings(context) saritasa_invocations.print_success("Initial assembly of all dependencies") saritasa_invocations.poetry.install(context) if clean: diff --git a/poetry.lock b/poetry.lock index 02804a8..3657280 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "alabaster" @@ -695,6 +695,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "execnet" +version = "2.1.1" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + [[package]] name = "executing" version = "2.1.0" @@ -1376,7 +1390,6 @@ files = [ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, @@ -1527,6 +1540,45 @@ pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] +[[package]] +name = "pytest-sugar" +version = "1.0.0" +description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." +optional = false +python-versions = "*" +files = [ + {file = "pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a"}, + {file = "pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd"}, +] + +[package.dependencies] +packaging = ">=21.3" +pytest = ">=6.2.0" +termcolor = ">=2.1.0" + +[package.extras] +dev = ["black", "flake8", "pre-commit"] + +[[package]] +name = "pytest-xdist" +version = "3.6.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, +] + +[package.dependencies] +execnet = ">=2.1" +pytest = ">=7.0.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -2055,6 +2107,20 @@ xls = ["xlrd", "xlwt"] xlsx = ["openpyxl (>=2.6.0)"] yaml = ["pyyaml"] +[[package]] +name = "termcolor" +version = "2.5.0" +description = "ANSI color formatting for output in terminal" +optional = false +python-versions = ">=3.9" +files = [ + {file = "termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8"}, + {file = "termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f"}, +] + +[package.extras] +tests = ["pytest", "pytest-cov"] + [[package]] name = "toml" version = "0.10.2" @@ -2275,4 +2341,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "c6352f426e9171e0b0213ad5a7a41a8456cbb84455692390749ff9fda89dd2a0" +content-hash = "623753d009cc12ddcf104c2ceecf0ab88948a3b4af84c3c374664a6aa2061bc5" diff --git a/pyproject.toml b/pyproject.toml index 26ca96c..2e67d12 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -100,15 +100,21 @@ pytest-mock = "^3.14.0" # Allows you to use fixtures in @pytest.mark.parametrize. # https://pypi.org/project/pytest-lazy-fixtures/ pytest-lazy-fixtures = "^1.1.1" +# This plugin produces coverage reports. +# https://pytest-cov.readthedocs.io/en/latest/index.html +pytest-cov = "^6.0.0" +# To prettify pytest output +# https://github.com/Teemu/pytest-sugar +pytest-sugar = "^1.0.0" +# To run test in parallel +# Docs: https://pypi.org/project/pytest-xdist/ +pytest-xdist = "^3.6.1" # Package for generating test data # https://factoryboy.readthedocs.io/en/stable/ factory-boy = "^3.3.1" # Werkzeug is a comprehensive WSGI web application library # https://werkzeug.palletsprojects.com/en/3.0.x/ werkzeug = "^3.0.4" -# This plugin produces coverage reports. -# https://pytest-cov.readthedocs.io/en/latest/index.html -pytest-cov = ">=5,<7" # Psycopg is a PostgreSQL adapter for the Python # https://www.psycopg.org/docs/install.html psycopg2-binary = "^2.9.9" diff --git a/test_project/fake_app/api/views.py b/test_project/fake_app/api/views.py index f1ab760..17f9c76 100644 --- a/test_project/fake_app/api/views.py +++ b/test_project/fake_app/api/views.py @@ -3,13 +3,12 @@ from ..resources import SimpleArtistResource -class ArtistExportViewSet(views.ExportJobViewSet): +class ArtistExportViewSet(views.ExportJobForUserViewSet): """Simple ViewSet for exporting Artist model.""" resource_class = SimpleArtistResource - -class ArtistImportViewSet(views.ImportJobViewSet): +class ArtistImportViewSet(views.ImportJobForUserViewSet): """Simple ViewSet for importing Artist model.""" resource_class = SimpleArtistResource diff --git a/test_project/tests/conftest.py b/test_project/tests/conftest.py index 60621eb..dd262c7 100644 --- a/test_project/tests/conftest.py +++ b/test_project/tests/conftest.py @@ -28,15 +28,23 @@ def new_artist(): @pytest.fixture -def artist_import_job(existing_artist: Artist) -> ImportJob: +def artist_import_job( + superuser: User, + existing_artist: Artist, +) -> ImportJob: """Return `ImportJob` instance with specified artist.""" - return factories.ArtistImportJobFactory(artists=[existing_artist]) + return factories.ArtistImportJobFactory( + created_by=superuser, + artists=[existing_artist], + ) @pytest.fixture -def artist_export_job() -> ExportJob: +def artist_export_job( + superuser: User, +) -> ExportJob: """Return `ExportJob` instance.""" - return factories.ArtistExportJobFactory() + return factories.ArtistExportJobFactory(created_by=superuser) @pytest.fixture @@ -63,22 +71,38 @@ def uploaded_file(existing_artist: Artist) -> SimpleUploadedFile: @pytest.fixture -def force_import_artist_job(new_artist: Artist) -> Artist: +def force_import_artist_job( + superuser: User, + new_artist: Artist, +) -> Artist: """`ImportJob` with `force_import=True` and file with invalid row.""" return ArtistImportJobFactory( artists=[new_artist], is_valid_file=False, force_import=True, + created_by=superuser, ) @pytest.fixture -def superuser(): - """Return superuser instance.""" +def user(): + """Return user instance.""" return get_user_model().objects.create( username="test_login", email="test@localhost.com", password="test_pass", + is_staff=False, + is_superuser=False, + ) + + +@pytest.fixture +def superuser(): + """Return superuser instance.""" + return get_user_model().objects.create( + username="admin_login", + email="admin@localhost.com", + password="admin_pass", is_staff=True, is_superuser=True, ) diff --git a/test_project/tests/integration_tests/test_api/test_export.py b/test_project/tests/integration_tests/test_api/test_export.py index b972a62..9f5eddd 100644 --- a/test_project/tests/integration_tests/test_api/test_export.py +++ b/test_project/tests/integration_tests/test_api/test_export.py @@ -1,3 +1,4 @@ +from django.contrib.auth.models import User from django.urls import reverse from rest_framework import status, test @@ -68,6 +69,32 @@ def test_export_api_detail( assert response.data["export_finished"] +@pytest.mark.django_db(transaction=True) +def test_import_user_api_get_detail( + user: User, + admin_api_client: test.APIClient, + artist_export_job: ExportJob, +): + """Ensure import detail api for user returns only users jobs.""" + response = admin_api_client.get( + path=reverse( + "export-artist-detail", + kwargs={"pk": artist_export_job.id}, + ), + ) + assert response.status_code == status.HTTP_200_OK, response.data + + artist_export_job.created_by = user + artist_export_job.save() + response = admin_api_client.get( + path=reverse( + "export-artist-detail", + kwargs={"pk": artist_export_job.id}, + ), + ) + assert response.status_code == status.HTTP_404_NOT_FOUND, response.data + + @pytest.mark.django_db(transaction=True) @pytest.mark.parametrize( argnames="allowed_cancel_status", diff --git a/test_project/tests/integration_tests/test_api/test_import.py b/test_project/tests/integration_tests/test_api/test_import.py index 6352ddf..2bd8bc3 100644 --- a/test_project/tests/integration_tests/test_api/test_import.py +++ b/test_project/tests/integration_tests/test_api/test_import.py @@ -60,6 +60,32 @@ def test_import_api_detail( assert response.data["import_finished"] +@pytest.mark.django_db(transaction=True) +def test_import_user_api_get_detail( + user: User, + admin_api_client: APIClient, + artist_import_job: ImportJob, +): + """Ensure import detail api for user returns only users jobs.""" + response = admin_api_client.get( + path=reverse( + "import-artist-detail", + kwargs={"pk": artist_import_job.id}, + ), + ) + assert response.status_code == status.HTTP_200_OK, response.data + + artist_import_job.created_by = user + artist_import_job.save() + response = admin_api_client.get( + path=reverse( + "import-artist-detail", + kwargs={"pk": artist_import_job.id}, + ), + ) + assert response.status_code == status.HTTP_404_NOT_FOUND, response.data + + @pytest.mark.django_db(transaction=True) def test_force_import_api_detail( admin_api_client: APIClient, @@ -103,6 +129,7 @@ def test_force_import_api_detail( def test_import_api_detail_with_row_errors( admin_api_client: APIClient, existing_artist: Artist, + superuser: User, ): """Ensure import detail api shows row errors.""" expected_error_message = "Instrument matching query does not exist." @@ -115,6 +142,7 @@ def test_import_api_detail_with_row_errors( import_artist_job = ArtistImportJobFactory( artists=[existing_artist], + created_by=superuser, ) # Remove instrument to trigger row error existing_artist.instrument.delete() @@ -137,6 +165,7 @@ def test_import_api_detail_with_row_errors( @pytest.mark.django_db(transaction=True) def test_import_api_detail_with_base_errors( + superuser: User, admin_api_client: APIClient, existing_artist: Artist, ): @@ -158,6 +187,7 @@ def test_import_api_detail_with_base_errors( artists=[existing_artist], data_file=uploaded_file, force_import=True, + created_by=superuser, ) import_artist_job.parse_data()