diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 033bd35..9e462d1 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 2.14.0 +current_version = 3.0.0 commit = False tag = False diff --git a/.circleci/config.yml b/.circleci/config.yml index f6f42c7..52b2c2e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -14,7 +14,7 @@ defaults: &defaults working_directory: ~/repo docker: - - image: ${AWS_ECR_DOMAIN}/globality-build:2019.41.70 + - image: ${AWS_ECR_DOMAIN}/globality-build:2020.36.100 aws_auth: aws_access_key_id: ${AWS_ACCESS_KEY_ID} aws_secret_access_key: ${AWS_SECRET_ACCESS_KEY} @@ -28,7 +28,7 @@ defaults: &defaults deploy_defaults: &deploy_defaults working_directory: ~/repo docker: - - image: ${AWS_ECR_DOMAIN}/globality-build:2019.41.70 + - image: ${AWS_ECR_DOMAIN}/globality-build:2020.36.100 aws_auth: aws_access_key_id: ${AWS_ACCESS_KEY_ID} aws_secret_access_key: ${AWS_SECRET_ACCESS_KEY} @@ -39,6 +39,7 @@ deploy_defaults: &deploy_defaults PYPI_USERNAME: "InjectedDuringRuntime" PYPI_PASSWORD: "InjectedDuringRuntime" + whitelist: &whitelist paths: . @@ -56,22 +57,34 @@ jobs: root: ~/repo <<: *whitelist - build_base_docker: + build_docker: <<: *defaults steps: - attach_workspace: at: ~/repo - - setup_remote_docker + - setup_remote_docker: + docker_layer_caching: true - run: - name: Build Base Docker - # install dependencies for loading ecs task definitions + name: Login AWS ECR command: | eval $(aws ecr get-login --no-include-email) - globality-build build-gen local - globality-build docker-build-push --repo python-library + - run: + name: Build Docker - Application Service Code + command: | + # pwd is here to prevent error when pre_docker_build returns nothing + pwd + + + docker build --tag $AWS_ECR_DOMAIN/python-library:$CIRCLE_SHA1 \ + --build-arg BUILD_NUM=$CIRCLE_BUILD_NUM \ + --build-arg SHA1=$CIRCLE_SHA1 \ + --build-arg EXTRA_INDEX_URL=$EXTRA_INDEX_URL \ + --build-arg JFROG_AUTH=$JFROG_AUTH . + + docker push $AWS_ECR_DOMAIN/python-library:$CIRCLE_SHA1 test: <<: *defaults @@ -82,13 +95,26 @@ jobs: - setup_remote_docker - run: - name: Test code + name: Login AWS ECR command: | - docker create -v /src/microcosm/tests/ --name service_tests alpine:3.4 /bin/true - docker cp $(pwd)/microcosm/tests service_tests:/src/microcosm/ eval $(aws ecr get-login --no-include-email) - docker pull ${AWS_ECR_DOMAIN}/python-library:${CIRCLE_SHA1} + + + - run: + name: Copy service tests to volume + command: | + + docker create -v /src/microcosm/tests/ --name service_tests alpine:3.11 /bin/true + docker cp $(pwd)/microcosm/tests service_tests:/src/microcosm/ + + - run: + name: Run Test + command: | docker run -it --volumes-from service_tests ${AWS_ECR_DOMAIN}/python-library:${CIRCLE_SHA1} test + + + + lint: <<: *defaults @@ -97,16 +123,24 @@ jobs: at: ~/repo - setup_remote_docker + - run: + name: Login AWS ECR + command: | + eval $(aws ecr get-login --no-include-email) - run: - name: Run Lint + name: Copy service tests to volume command: | - docker create -v /src/microcosm/tests/ --name service_tests alpine:3.4 /bin/true + + docker create -v /src/microcosm/tests/ --name service_tests alpine:3.11 /bin/true docker cp $(pwd)/microcosm/tests service_tests:/src/microcosm/ - eval $(aws ecr get-login --no-include-email) - docker pull ${AWS_ECR_DOMAIN}/python-library:${CIRCLE_SHA1} + + - run: + name: Run Lint + command: | docker run -it --volumes-from service_tests ${AWS_ECR_DOMAIN}/python-library:${CIRCLE_SHA1} lint + typehinting: <<: *defaults @@ -115,15 +149,33 @@ jobs: at: ~/repo - setup_remote_docker + - run: + name: Login AWS ECR + command: | + eval $(aws ecr get-login --no-include-email) - run: - name: Run Typehinting + name: Copy service tests to volume command: | - docker create -v /src/microcosm/tests/ --name service_tests alpine:3.4 /bin/true + + docker create -v /src/microcosm/tests/ --name service_tests alpine:3.11 /bin/true docker cp $(pwd)/microcosm/tests service_tests:/src/microcosm/ - eval $(aws ecr get-login --no-include-email) - docker pull ${AWS_ECR_DOMAIN}/python-library:${CIRCLE_SHA1} + + - run: + name: Run Typehinting + command: | docker run -it --volumes-from service_tests ${AWS_ECR_DOMAIN}/python-library:${CIRCLE_SHA1} typehinting + + deploy_jfrog_rc: + <<: *defaults + steps: + - attach_workspace: + at: ~/repo + - run: + name: Deploy + command: | + echo "Not publishing package!" + deploy_pypi: <<: *defaults steps: @@ -146,6 +198,7 @@ jobs: python setup.py sdist twine upload --repository pypi dist/microcosm-${version}.tar.gz + workflows: version: 2 @@ -156,7 +209,7 @@ workflows: # run for all branches and tags tags: only: /.*/ - - build_base_docker: + - build_docker: requires: - checkout filters: @@ -165,21 +218,26 @@ workflows: only: /.*/ - lint: requires: - - build_base_docker + - build_docker filters: # run for all branches and tags tags: only: /.*/ - test: requires: - - build_base_docker + - build_docker filters: # run for all branches and tags tags: only: /.*/ + - deploy_jfrog_rc: + requires: + - test + - lint + - typehinting - typehinting: requires: - - build_base_docker + - build_docker filters: # run for all branches and tags tags: diff --git a/.globality/build.json b/.globality/build.json index 6b1595c..9177e8c 100644 --- a/.globality/build.json +++ b/.globality/build.json @@ -6,5 +6,5 @@ } }, "type": "python-library", - "version": "2019.41.70" + "version": "2020.36.100" } diff --git a/build.python-library/docker-base/Dockerfile.template b/Dockerfile similarity index 51% rename from build.python-library/docker-base/Dockerfile.template rename to Dockerfile index f4e5c3f..da42a35 100644 --- a/build.python-library/docker-base/Dockerfile.template +++ b/Dockerfile @@ -11,7 +11,33 @@ # # -FROM +# ----------- deps ----------- +# Install from Debian Stretch with modern Python support +FROM python:slim-stretch as deps + +# +# Most services will use the same set of packages here, though a few will install +# custom dependencies for native requirements. +# + +ARG EXTRA_INDEX_URL +ENV EXTRA_INDEX_URL ${EXTRA_INDEX_URL} + +ENV CORE_PACKAGES locales +ENV BUILD_PACKAGES build-essential libffi-dev +ENV OTHER_PACKAGES libssl-dev + + +RUN apt-get update && \ + apt-get install -y --no-install-recommends ${CORE_PACKAGES} ${BUILD_PACKAGES} && \ + apt-get install -y --no-install-recommends ${OTHER_PACKAGES} && \ + apt-get autoremove -y && \ + rm -rf /var/lib/apt/lists/* + + +# ----------- base ----------- + +FROM deps as base # Install dependencies # @@ -53,8 +79,34 @@ ENV LC_ALL en_US.UTF-8 COPY README.md MANIFEST.in setup.cfg setup.py /src/ -RUN pip install --upgrade --extra-index-url ${EXTRA_INDEX_URL} /src/ && \ +RUN pip install --no-cache-dir --upgrade --extra-index-url ${EXTRA_INDEX_URL} /src/ && \ apt-get remove --purge -y ${BUILD_PACKAGES} && \ apt-get autoremove -y && \ rm -rf /var/lib/apt/lists/* + +# ----------- final ----------- +FROM base + +# Setup invocation +# +# We expose the application on the standard HTTP port and use an entrypoint +# to customize the `dev` and `test` targets. + +ENV NAME microcosm +COPY entrypoint.sh /src/ +ENTRYPOINT ["./entrypoint.sh"] + +# Install source +# +# We should not need to reinstall dependencies here, but we do need to import +# the distribution properly. We also save build arguments to the image using +# microcosm-compatible environment variables. + + +ARG BUILD_NUM +ARG SHA1 +ENV MICROCOSM__BUILD_INFO_CONVENTION__BUILD_NUM ${BUILD_NUM} +ENV MICROCOSM__BUILD_INFO_CONVENTION__SHA1 ${SHA1} +COPY $NAME /src/$NAME/ +RUN pip install --no-cache-dir --extra-index-url $EXTRA_INDEX_URL -e . diff --git a/Dockerfile.template b/Dockerfile.template deleted file mode 100644 index efdb463..0000000 --- a/Dockerfile.template +++ /dev/null @@ -1,39 +0,0 @@ -# -# Globality autogenerated Docker configuration -# -# This file is auto generated with globality-build. -# You should not make any changes to this file manually -# -# Any changes made to this file will be overwritten in the -# next version of the build. -# -# See: http://github.com/globality-corp/globality-build -# -# - - -FROM /python-library:base- - -# Setup invocation -# -# We expose the application on the standard HTTP port and use an entrypoint -# to customize the `dev` and `test` targets. - -ENV NAME microcosm -COPY entrypoint.sh /src/ -ENTRYPOINT ["./entrypoint.sh"] - -# Install source -# -# We should not need to reinstall dependencies here, but we do need to import -# the distribution properly. We also save build arguments to the image using -# microcosm-compatible environment variables. - - -ARG BUILD_NUM -ARG SHA1 -ENV MICROCOSM__BUILD_INFO_CONVENTION__BUILD_NUM ${BUILD_NUM} -ENV MICROCOSM__BUILD_INFO_CONVENTION__SHA1 ${SHA1} -COPY $NAME /src/$NAME/ -RUN pip install --extra-index-url $EXTRA_INDEX_URL -e . - diff --git a/build.python-library/docker-base/Dockerfile.deps b/build.python-library/docker-base/Dockerfile.deps deleted file mode 100644 index 3e7a78f..0000000 --- a/build.python-library/docker-base/Dockerfile.deps +++ /dev/null @@ -1,36 +0,0 @@ -# -# Globality autogenerated Docker configuration -# -# This file is auto generated with globality-build. -# You should not make any changes to this file manually -# -# Any changes made to this file will be overwritten in the -# next version of the build. -# -# See: http://github.com/globality-corp/globality-build -# -# - - -# Install from Debian Stretch with modern Python support -FROM python:slim-stretch - -# -# Most services will use the same set of packages here, though a few will install -# custom dependencies for native requirements. -# - -ARG EXTRA_INDEX_URL -ENV EXTRA_INDEX_URL ${EXTRA_INDEX_URL} - -ENV CORE_PACKAGES locales -ENV BUILD_PACKAGES build-essential libffi-dev -ENV OTHER_PACKAGES libssl-dev - - -RUN apt-get update && \ - apt-get install -y --no-install-recommends ${CORE_PACKAGES} ${BUILD_PACKAGES} && \ - apt-get install -y --no-install-recommends ${OTHER_PACKAGES} && \ - apt-get autoremove -y && \ - rm -rf /var/lib/apt/lists/* - diff --git a/entrypoint.sh b/entrypoint.sh index e79d498..1b25351 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -5,7 +5,6 @@ # Entrypoint conventions are as follows: # # - If the container is run without a custom CMD, the service should run as it would in production. -# # - If the container is run with the "dev" CMD, the service should run in development mode. # # Normally, this means that if the user's source has been mounted as a volume, the server will @@ -20,20 +19,20 @@ if [ "$1" = "test" ]; then - # Install standard test dependencies; YMMV - pip --quiet install \ - .[test] nose PyHamcrest coverage - exec nosetests ${NAME} + # Install standard test dependencies; YMMV + pip --quiet install \ + .[test] nose "PyHamcrest<1.10.0" coverage + exec nosetests elif [ "$1" = "lint" ]; then - # Install standard linting dependencies; YMMV - pip --quiet install \ - .[lint] flake8 flake8-print flake8-logging-format flake8-isort - exec flake8 ${NAME} + # Install standard linting dependencies; YMMV + pip --quiet install \ + .[lint] flake8 flake8-print flake8-logging-format "isort<5" flake8-isort + exec flake8 ${NAME} elif [ "$1" = "typehinting" ]; then - # Install standard type-linting dependencies - pip --quiet install mypy - mypy ${NAME} --ignore-missing-imports + # Install standard type-linting dependencies + pip --quiet install mypy + mypy ${NAME} --ignore-missing-imports else - echo "Cannot execute $@" - exit 3 + echo "Cannot execute $@" + exit 3 fi diff --git a/microcosm/opaque.py b/microcosm/opaque.py index 01189ba..f35519b 100644 --- a/microcosm/opaque.py +++ b/microcosm/opaque.py @@ -10,7 +10,7 @@ Similarly, different layers of an application may use this data in different ways: - - A logging decoratory might automatically log all opaque values (see `microcosm-logging`) + - A logging decorator might automatically log all opaque values (see `microcosm-logging`) - A pubsub framework might insert opaque values into new messages (see `microcosm-pubsub`) - An outbound web request might insert opaque values as HTTP headers @@ -23,12 +23,6 @@ from copy import deepcopy from types import MethodType -from opentracing.ext import tags -from opentracing.propagation import Format -from opentracing_instrumentation.request_context import span_in_context - -from microcosm.tracing import OPAQUE_KEY_WHITE_LIST, SPAN_NAME - def _make_initializer(opaque): @@ -46,49 +40,6 @@ def __enter__(self): self.saved = deepcopy(opaque._store) opaque.update(self.func()) - # Use of a tracing solution on top of opaque is optional! - if opaque.tracer: - span = self.start_span() - self.set_tags(span) - self.pass_span_context_to_children(span) - - def start_span(self): - """ - Extract any existing span context from the graph, and use it to - initialize a span for this opaque context. - """ - span_context = opaque.tracer.extract(Format.TEXT_MAP, opaque.as_dict()) - span_tags = {tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER} - - return self.enter_context( - opaque.tracer.start_span( - opaque.get(SPAN_NAME, opaque.service_name), - child_of=span_context, - tags=span_tags, - ), - ) - - def set_tags(self, span): - """ - Copy opaque tags into tracer tags. - """ - for key, value in opaque.as_dict().items(): - if key.lower() in OPAQUE_KEY_WHITE_LIST: - span.set_tag(key, value) - - def pass_span_context_to_children(self, span): - """ - Save span information in jaeger global storage as well as in - graph.opaque so it can be passed to children in this process as - well as across other processes e.g. over HTTP calls or pubsub - boundaries. - - """ - self.enter_context(span_in_context(span)) - span_dict = dict() - opaque.tracer.inject(span, Format.HTTP_HEADERS, span_dict) - opaque.update(span_dict) - def __exit__(self, *exc): opaque._store = self.saved self.saved = None @@ -120,7 +71,6 @@ def foo(): """ def __init__(self, *args, **kwargs): - self.tracer = kwargs.pop("tracer", None) self.service_name = kwargs.pop("name", None) self._store = dict(*args, **kwargs) self.initialize = _make_initializer(self) @@ -145,4 +95,4 @@ def as_dict(self): def configure_opaque(graph): - return Opaque(graph.config.opaque, tracer=graph.tracer, name=graph.metadata.name) + return Opaque(graph.config.opaque, name=graph.metadata.name) diff --git a/microcosm/tests/test_opaque.py b/microcosm/tests/test_opaque.py index a9e6b10..48dabb9 100644 --- a/microcosm/tests/test_opaque.py +++ b/microcosm/tests/test_opaque.py @@ -6,11 +6,8 @@ assert_that, equal_to, has_entries, - has_key, is_, - not_, ) -from jaeger_client.constants import TRACE_ID_HEADER from microcosm.api import binding, create_object_graph, load_from_dict from microcosm.opaque import Opaque @@ -141,7 +138,6 @@ def test_collaboration(): ) graph.lock() - assert_that(graph.opaque.as_dict(), not_(has_key(TRACE_ID_HEADER))) # we should be able to initialize the opaque data and observe it from the collaborators decorated_func = graph.opaque.initialize( example_func, OTHER, OTHER @@ -150,5 +146,4 @@ def test_collaboration(): assert_that(graph.opaque.as_dict(), is_(equal_to({THIS: VALUE}))) # NB: opaque.initialize will also inject some jaeger-related metadata which the tests can ignore. assert_that(decorated_func(), has_entries(example_func(OTHER, OTHER))) - assert_that(decorated_func(), has_key(TRACE_ID_HEADER)) assert_that(graph.opaque.as_dict(), is_(equal_to({THIS: VALUE}))) diff --git a/microcosm/tracing.py b/microcosm/tracing.py deleted file mode 100644 index 812db5c..0000000 --- a/microcosm/tracing.py +++ /dev/null @@ -1,55 +0,0 @@ -from jaeger_client.config import ( - DEFAULT_REPORTING_HOST, - DEFAULT_REPORTING_PORT, - DEFAULT_SAMPLING_PORT, - Config, -) -from jaeger_client.constants import TRACE_ID_HEADER - -from microcosm.api import binding, defaults, typed -from microcosm.config.types import boolean - - -SPAN_NAME = "span_name" -OPAQUE_KEY_WHITE_LIST = [ - "x-request-user", - "x-request-id", - TRACE_ID_HEADER, -] - - -@binding("tracer") -@defaults( - enabled=typed(boolean, default_value=False), - sample_type="ratelimiting", - sample_param=typed(int, 10), - sampling_port=typed(int, DEFAULT_SAMPLING_PORT), - reporting_port=typed(int, DEFAULT_REPORTING_PORT), - reporting_host=DEFAULT_REPORTING_HOST, - logging_enabled=typed(boolean, False), -) -def configure_tracing(graph): - """ - See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about - available sampling strategies. - - """ - if graph.config.tracer.enabled: - config = Config( - config={ - "sampler": { - "type": graph.config.tracer.sample_type, - "param": graph.config.tracer.sample_param, - }, - "local_agent": { - "sampling_port": graph.config.tracer.sampling_port, - "reporting_port": graph.config.tracer.reporting_port, - "reporting_host": graph.config.tracer.reporting_host, - }, - "logging": graph.config.tracer.logging_enabled, - }, - service_name=graph.metadata.name, - ) - return config.initialize_tracer() - - return None diff --git a/setup.cfg b/setup.cfg index 1e33831..168a7f4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -10,10 +10,20 @@ force_grid_wrap = 4 include_trailing_comma = True known_first_party = microcosm known_standard_library = dataclasses,pkg_resources -known_third_party = six,hamcrest,parameterized,microcosm,unidecode,nose +known_third_party = alembic,allennlp,boto3,click,hamcrest,joblib,matplotlib,microcosm,microcosm_sagemaker,networkx,node2vec,nose,numpy,pandas,parameterized,seaborn,six,sklearn,taxonomies,tf,torch,tqdm,unidecode line_length = 99 lines_after_imports = 2 multi_line_output = 3 [mypy] ignore_missing_imports = True + +[nosetests] +with-coverage = True +cover-package = microcosm +cover-html = True +cover-html-dir = coverage +cover-erase = True + +[coverage:report] +show_missing = True diff --git a/setup.py b/setup.py index f0741fc..11a714e 100755 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ project = "microcosm" -version = "2.14.0" +version = "3.0.0" setup( name=project, @@ -22,10 +22,7 @@ install_requires=[ "contextdecorator>=0.10.0", "inflection>=0.3.1", - "jaeger-client>=4.1.0", "lazy>=1.3", - "opentracing-instrumentation>=3.2.0", - "tornado<6", ], setup_requires=[ "nose>=1.3.6", @@ -36,7 +33,6 @@ "microcosm.factories": [ "hello_world = microcosm.example:create_hello_world", "opaque = microcosm.opaque:configure_opaque", - "tracer = microcosm.tracing:configure_tracing", ], }, tests_require=[