diff --git a/.github/workflows/branch_ci.yml b/.github/workflows/branch_ci.yml new file mode 100644 index 0000000..2f9e3ff --- /dev/null +++ b/.github/workflows/branch_ci.yml @@ -0,0 +1,16 @@ +name: Branch CI (Snapshot) + +on: + push: + branches-ignore: + - main + +jobs: + call-build-and-deploy: + permissions: + contents: read + packages: write + uses: ./.github/workflows/build_and_deploy.yml + with: + is_snapshot: true + version_regex: "-SNAPSHOT$" # Ensure the version ends with -SNAPSHOT diff --git a/.github/workflows/build_and_deploy.yml b/.github/workflows/build_and_deploy.yml new file mode 100644 index 0000000..1c6d9d9 --- /dev/null +++ b/.github/workflows/build_and_deploy.yml @@ -0,0 +1,91 @@ +name: Build and Deploy + +on: + workflow_call: + inputs: + is_snapshot: + required: true + type: boolean + version_regex: + required: true + type: string + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + os: [ ubuntu-latest, macos-latest ] + java-version: [ '8', '11', '17', '21' ] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up JDK ${{ matrix.java-version }} on ${{ matrix.os }} + uses: actions/setup-java@v4 + with: + java-version: ${{ matrix.java-version }} + distribution: 'corretto' + cache: maven + + - name: Grant execute permission for mvn-exec.sh + run: chmod +x ./mvnw + + - name: Get Project Version + id: get_version + run: | + VERSION=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) + echo "VERSION=$VERSION" >> $GITHUB_ENV + + - name: Validate Version with Regex + run: | + if [[ "${{ env.VERSION }}" =~ ${{ inputs.version_regex }} ]]; then + echo "Version ${{ env.VERSION }} matches the regex ${{ inputs.version_check }}. Proceeding..." + else + echo "Version ${{ env.VERSION }} does not match the regex ${{ inputs.version_check }}. Failing..." + exit 1 + fi + + - name: Build with Maven Wrapper + run: ./mvnw clean verify + + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: test-results - JDK ${{ matrix.java-version }} on ${{ matrix.os }} + path: target/surefire-reports/*.xml + + deploy: + needs: build + permissions: + contents: read + packages: write + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up JDK 8 + uses: actions/setup-java@v4 + with: + java-version: '8' + distribution: 'corretto' + cache: maven + + - name: Grant execute permission for Maven Wrapper + run: chmod +x ./mvnw + + - name: Deploy SNAPSHOT or Release + run: | + if [[ "$IS_SNAPSHOT" == "true" ]]; then + echo "Deploying SNAPSHOT version..." + ./mvnw --batch-mode -DskipTests deploy + else + echo "Deploying RELEASE version..." + ./mvnw --batch-mode -DskipTests deploy + fi + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + IS_SNAPSHOT: ${{ inputs.is_snapshot }} diff --git a/.github/workflows/main_ci.yml b/.github/workflows/main_ci.yml new file mode 100644 index 0000000..16b046f --- /dev/null +++ b/.github/workflows/main_ci.yml @@ -0,0 +1,16 @@ +name: Main CI (Release) + +on: + push: + branches: + - main + +jobs: + call-build-and-deploy: + permissions: + contents: read + packages: write + uses: ./.github/workflows/build_and_deploy.yml + with: + is_snapshot: false + version_regex: "^[^\\s-]+$" # Ensure the version does not contain -SNAPSHOT diff --git a/.github/workflows/test_results.yml b/.github/workflows/test_results.yml new file mode 100644 index 0000000..5f2f9ed --- /dev/null +++ b/.github/workflows/test_results.yml @@ -0,0 +1,36 @@ +name: Test Results + +on: + workflow_run: + workflows: + - "Branch CI (Snapshot)" + types: + - completed + +permissions: {} + +jobs: + test-results: + name: Test Results + runs-on: ubuntu-latest + if: github.event.workflow_run.conclusion != 'skipped' + + permissions: + checks: write + pull-requests: write + actions: read + + steps: + - name: Download and Extract Artifacts + uses: dawidd6/action-download-artifact@v6 + with: + run_id: ${{ github.event.workflow_run.id }} + path: artifacts + + - name: Publish Test Results + uses: EnricoMi/publish-unit-test-result-action@v2 + with: + commit: ${{ github.event.workflow_run.head_sha }} + event_file: artifacts/Event File/event.json + event_name: ${{ github.event.workflow_run.event }} + files: "artifacts/**/*.xml" diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..65f20b1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,35 @@ +target/ +!.mvn/wrapper/maven-wrapper.jar +!**/src/main/**/target/ +!**/src/test/**/target/ + +### IntelliJ IDEA ### +.idea/ +*.iws +*.iml +*.ipr + +### Eclipse ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +build/ +!**/src/main/**/build/ +!**/src/test/**/build/ + +### VS Code ### +.vscode/ + +### Mac OS ### +.DS_Store diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 0000000..d58dfb7 --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,19 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +wrapperVersion=3.3.2 +distributionType=only-script +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..38a18a4 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,53 @@ +# Contributing to AWS SDK Java OpenTelemetry Metrics + +We welcome contributions to this project! Below are guidelines to help you get started. + +## How to Contribute + +1. **Fork the repository**: + Create your own fork of the repository by clicking the "Fork" button in GitHub. + +2. **Create a feature branch**: + Clone your fork locally, then create a feature branch for your work: + + ```bash + git checkout -b feature/my-new-feature + ``` + +3. **Make your changes**: + Implement your changes, ensuring that they follow the project’s coding standards and best practices. + +4. **Commit your changes**: + Commit your changes to your feature branch: + + ```bash + git commit -am 'Add some feature' + ``` + +5. **Push your changes to your fork**: + Push your changes to your fork on GitHub: + + ```bash + git push origin feature/my-new-feature + ``` + +6. **Create a Pull Request**: + Once your changes are ready, open a pull request (PR) from your branch on GitHub. + - Ensure that your PR description explains what changes you’ve made and why. + - Mention any related issues, if applicable. + +## Running Tests + +Before submitting a PR, make sure all tests pass: + +```bash +./mvnw test +``` + +If you’ve added new features, consider adding appropriate unit tests as well. + +## Code of Conduct + +Please note that this project is governed by a [Code of Conduct]. By participating, you are expected to uphold this code. + +[Code of Conduct]: CODE_OF_CONDUCT.md diff --git a/README.md b/README.md index 0a03b0a..d28fac9 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,57 @@ -# aws-sdk-java-opentelemetry-metrics -OpenTelemetry Metric Publisher for AWS SDK for Java – Export AWS SDK metrics to OpenTelemetry for enhanced observability +# AWS SDK Java OpenTelemetry Metrics + +A lightweight metrics publisher that integrates AWS SDK metrics with OpenTelemetry, allowing you to monitor and collect +AWS client performance metrics in your distributed applications. + +## Usage + +This library integrates AWS SDK Java metrics with OpenTelemetry’s metrics API, allowing you to collect and publish AWS client performance data such as API call durations, retry counts, and more. + +### Basic Example + +Here’s a simple example of how to use the `OtelMetricPublisher`: + +```java +import com.appsflyer.otelawsmetrics.OtelMetricPublisher; +import io.opentelemetry.api.OpenTelemetry; +import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration; +import software.amazon.awssdk.services.dynamodb.DynamoDbAsyncClient; +import software.amazon.awssdk.metrics.MetricPublisher; + +public class MyAwsService { + private final DynamoDbAsyncClient dynamoDbAsyncClient; + + public MyAwsService(OpenTelemetry openTelemetry) { + // Create the metric publisher + MetricPublisher metricPublisher = new OtelMetricPublisher(openTelemetry, "aws.sdk"); + + // Create the DynamoDbAsyncClient with the metric publisher + this.dynamoDbAsyncClient = DynamoDbAsyncClient.builder() + .overrideConfiguration(ClientOverrideConfiguration.builder() + .addMetricPublisher(metricPublisher) + .build()) + .build(); + } + + public void putItemAsync(String tableName, Map item) { + // Perform DynamoDB operations and automatically collect metrics + dynamoDbAsyncClient.putItem(putItemRequest -> putItemRequest.tableName(tableName).item(item)); + } +} +``` + +### Configuration + +You can configure the OtelMetricPublisher with additional options if needed: + +```java +Executor customExecutor = Executors.newSingleThreadExecutor(); +OtelMetricPublisher metricPublisher = new OtelMetricPublisher(OpenTelemetry.get(), customExecutor); +``` + +This allows you to use a custom executor for asynchronous metrics publishing. + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. + diff --git a/mvnw b/mvnw new file mode 100755 index 0000000..19529dd --- /dev/null +++ b/mvnw @@ -0,0 +1,259 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Apache Maven Wrapper startup batch script, version 3.3.2 +# +# Optional ENV vars +# ----------------- +# JAVA_HOME - location of a JDK home dir, required when download maven via java source +# MVNW_REPOURL - repo url base for downloading maven distribution +# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven +# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output +# ---------------------------------------------------------------------------- + +set -euf +[ "${MVNW_VERBOSE-}" != debug ] || set -x + +# OS specific support. +native_path() { printf %s\\n "$1"; } +case "$(uname)" in +CYGWIN* | MINGW*) + [ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")" + native_path() { cygpath --path --windows "$1"; } + ;; +esac + +# set JAVACMD and JAVACCMD +set_java_home() { + # For Cygwin and MinGW, ensure paths are in Unix format before anything is touched + if [ -n "${JAVA_HOME-}" ]; then + if [ -x "$JAVA_HOME/jre/sh/java" ]; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACCMD="$JAVA_HOME/jre/sh/javac" + else + JAVACMD="$JAVA_HOME/bin/java" + JAVACCMD="$JAVA_HOME/bin/javac" + + if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then + echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2 + echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2 + return 1 + fi + fi + else + JAVACMD="$( + 'set' +e + 'unset' -f command 2>/dev/null + 'command' -v java + )" || : + JAVACCMD="$( + 'set' +e + 'unset' -f command 2>/dev/null + 'command' -v javac + )" || : + + if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then + echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2 + return 1 + fi + fi +} + +# hash string like Java String::hashCode +hash_string() { + str="${1:-}" h=0 + while [ -n "$str" ]; do + char="${str%"${str#?}"}" + h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296)) + str="${str#?}" + done + printf %x\\n $h +} + +verbose() { :; } +[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; } + +die() { + printf %s\\n "$1" >&2 + exit 1 +} + +trim() { + # MWRAPPER-139: + # Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds. + # Needed for removing poorly interpreted newline sequences when running in more + # exotic environments such as mingw bash on Windows. + printf "%s" "${1}" | tr -d '[:space:]' +} + +# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties +while IFS="=" read -r key value; do + case "${key-}" in + distributionUrl) distributionUrl=$(trim "${value-}") ;; + distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;; + esac +done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties" +[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties" + +case "${distributionUrl##*/}" in +maven-mvnd-*bin.*) + MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ + case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in + *AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;; + :Darwin*x86_64) distributionPlatform=darwin-amd64 ;; + :Darwin*arm64) distributionPlatform=darwin-aarch64 ;; + :Linux*x86_64*) distributionPlatform=linux-amd64 ;; + *) + echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2 + distributionPlatform=linux-amd64 + ;; + esac + distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip" + ;; +maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;; +*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;; +esac + +# apply MVNW_REPOURL and calculate MAVEN_HOME +# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/ +[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}" +distributionUrlName="${distributionUrl##*/}" +distributionUrlNameMain="${distributionUrlName%.*}" +distributionUrlNameMain="${distributionUrlNameMain%-bin}" +MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}" +MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")" + +exec_maven() { + unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || : + exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD" +} + +if [ -d "$MAVEN_HOME" ]; then + verbose "found existing MAVEN_HOME at $MAVEN_HOME" + exec_maven "$@" +fi + +case "${distributionUrl-}" in +*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;; +*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;; +esac + +# prepare tmp dir +if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then + clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; } + trap clean HUP INT TERM EXIT +else + die "cannot create temp dir" +fi + +mkdir -p -- "${MAVEN_HOME%/*}" + +# Download and Install Apache Maven +verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." +verbose "Downloading from: $distributionUrl" +verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" + +# select .zip or .tar.gz +if ! command -v unzip >/dev/null; then + distributionUrl="${distributionUrl%.zip}.tar.gz" + distributionUrlName="${distributionUrl##*/}" +fi + +# verbose opt +__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR='' +[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v + +# normalize http auth +case "${MVNW_PASSWORD:+has-password}" in +'') MVNW_USERNAME='' MVNW_PASSWORD='' ;; +has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;; +esac + +if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then + verbose "Found wget ... using wget" + wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl" +elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then + verbose "Found curl ... using curl" + curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl" +elif set_java_home; then + verbose "Falling back to use Java to download" + javaSource="$TMP_DOWNLOAD_DIR/Downloader.java" + targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName" + cat >"$javaSource" <<-END + public class Downloader extends java.net.Authenticator + { + protected java.net.PasswordAuthentication getPasswordAuthentication() + { + return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() ); + } + public static void main( String[] args ) throws Exception + { + setDefault( new Downloader() ); + java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() ); + } + } + END + # For Cygwin/MinGW, switch paths to Windows format before running javac and java + verbose " - Compiling Downloader.java ..." + "$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java" + verbose " - Running Downloader.java ..." + "$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")" +fi + +# If specified, validate the SHA-256 sum of the Maven distribution zip file +if [ -n "${distributionSha256Sum-}" ]; then + distributionSha256Result=false + if [ "$MVN_CMD" = mvnd.sh ]; then + echo "Checksum validation is not supported for maven-mvnd." >&2 + echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 + exit 1 + elif command -v sha256sum >/dev/null; then + if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then + distributionSha256Result=true + fi + elif command -v shasum >/dev/null; then + if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then + distributionSha256Result=true + fi + else + echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2 + echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 + exit 1 + fi + if [ $distributionSha256Result = false ]; then + echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2 + echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2 + exit 1 + fi +fi + +# unzip and move +if command -v unzip >/dev/null; then + unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip" +else + tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar" +fi +printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url" +mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME" + +clean || : +exec_maven "$@" diff --git a/mvnw.cmd b/mvnw.cmd new file mode 100644 index 0000000..249bdf3 --- /dev/null +++ b/mvnw.cmd @@ -0,0 +1,149 @@ +<# : batch portion +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM http://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Apache Maven Wrapper startup batch script, version 3.3.2 +@REM +@REM Optional ENV vars +@REM MVNW_REPOURL - repo url base for downloading maven distribution +@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven +@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output +@REM ---------------------------------------------------------------------------- + +@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0) +@SET __MVNW_CMD__= +@SET __MVNW_ERROR__= +@SET __MVNW_PSMODULEP_SAVE=%PSModulePath% +@SET PSModulePath= +@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @( + IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B) +) +@SET PSModulePath=%__MVNW_PSMODULEP_SAVE% +@SET __MVNW_PSMODULEP_SAVE= +@SET __MVNW_ARG0_NAME__= +@SET MVNW_USERNAME= +@SET MVNW_PASSWORD= +@IF NOT "%__MVNW_CMD__%"=="" (%__MVNW_CMD__% %*) +@echo Cannot start maven from wrapper >&2 && exit /b 1 +@GOTO :EOF +: end batch / begin powershell #> + +$ErrorActionPreference = "Stop" +if ($env:MVNW_VERBOSE -eq "true") { + $VerbosePreference = "Continue" +} + +# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties +$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl +if (!$distributionUrl) { + Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties" +} + +switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) { + "maven-mvnd-*" { + $USE_MVND = $true + $distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip" + $MVN_CMD = "mvnd.cmd" + break + } + default { + $USE_MVND = $false + $MVN_CMD = $script -replace '^mvnw','mvn' + break + } +} + +# apply MVNW_REPOURL and calculate MAVEN_HOME +# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/ +if ($env:MVNW_REPOURL) { + $MVNW_REPO_PATTERN = if ($USE_MVND) { "/org/apache/maven/" } else { "/maven/mvnd/" } + $distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace '^.*'+$MVNW_REPO_PATTERN,'')" +} +$distributionUrlName = $distributionUrl -replace '^.*/','' +$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$','' +$MAVEN_HOME_PARENT = "$HOME/.m2/wrapper/dists/$distributionUrlNameMain" +if ($env:MAVEN_USER_HOME) { + $MAVEN_HOME_PARENT = "$env:MAVEN_USER_HOME/wrapper/dists/$distributionUrlNameMain" +} +$MAVEN_HOME_NAME = ([System.Security.Cryptography.MD5]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join '' +$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME" + +if (Test-Path -Path "$MAVEN_HOME" -PathType Container) { + Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME" + Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" + exit $? +} + +if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) { + Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl" +} + +# prepare tmp dir +$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile +$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir" +$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null +trap { + if ($TMP_DOWNLOAD_DIR.Exists) { + try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } + catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } + } +} + +New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null + +# Download and Install Apache Maven +Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." +Write-Verbose "Downloading from: $distributionUrl" +Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" + +$webclient = New-Object System.Net.WebClient +if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) { + $webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD) +} +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 +$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null + +# If specified, validate the SHA-256 sum of the Maven distribution zip file +$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum +if ($distributionSha256Sum) { + if ($USE_MVND) { + Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." + } + Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash + if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) { + Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property." + } +} + +# unzip and move +Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null +Rename-Item -Path "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" -NewName $MAVEN_HOME_NAME | Out-Null +try { + Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null +} catch { + if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) { + Write-Error "fail to move MAVEN_HOME" + } +} finally { + try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } + catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } +} + +Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..cec07f3 --- /dev/null +++ b/pom.xml @@ -0,0 +1,174 @@ + + + 4.0.0 + + com.appsflyer + aws-sdk-java-opentelemetry-metrics + 1.0.0-SNAPSHOT + AWS SDK Java OpenTelemetry Metrics + OpenTelemetry Metric Publisher for AWS SDK for Java + https://github.com/AppsFlyer/aws-sdk-java-opentelemetry-metrics + + + + MIT License + https://opensource.org/licenses/MIT + + + + + scm:git:https://github.com/AppsFlyer/aws-sdk-java-opentelemetry-metrics.git + scm:git:git@github.com:AppsFlyer/aws-sdk-java-opentelemetry-metrics.git + + https://github.com/AppsFlyer/aws-sdk-java-opentelemetry-metrics + + + + 1.8 + 1.8 + UTF-8 + 1.24.0 + 2.27.23 + 2.0.7 + 5.10.3 + 5.5.0 + + + + + + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} + + + + + software.amazon.awssdk + metrics-spi + ${software.amazon.awssdk.version} + + + + + org.slf4j + slf4j-api + ${slf4j-api.version} + + + + + software.amazon.awssdk + sdk-core + ${software.amazon.awssdk.version} + + + + + org.junit.jupiter + junit-jupiter + ${junit-jupiter.version} + test + + + + + io.opentelemetry + opentelemetry-sdk-testing + ${opentelemetry.version} + test + + + + + io.opentelemetry + opentelemetry-sdk-metrics + ${opentelemetry.version} + test + + + + + org.mockito + mockito-core + ${mockito-core.version} + test + + + + + + + + + maven-compiler-plugin + 3.11.0 + + ${maven.compiler.source} + ${maven.compiler.target} + + + + + + + + + github + GitHub Packages + https://maven.pkg.github.com/appsflyer/aws-sdk-java-opentelemetry-metrics + + + github + GitHub Packages + https://maven.pkg.github.com/appsflyer/aws-sdk-java-opentelemetry-metrics + + + + + + release + + + performRelease + true + + + + + + + org.apache.maven.plugins + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.5.0 + + + attach-javadocs + + jar + + + + + + + + + diff --git a/src/main/java/com/appsflyer/otelawsmetrics/DoubleHistogramStrategy.java b/src/main/java/com/appsflyer/otelawsmetrics/DoubleHistogramStrategy.java new file mode 100644 index 0000000..214dcae --- /dev/null +++ b/src/main/java/com/appsflyer/otelawsmetrics/DoubleHistogramStrategy.java @@ -0,0 +1,29 @@ +package com.appsflyer.otelawsmetrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.Meter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.metrics.MetricRecord; + +public class DoubleHistogramStrategy implements MetricStrategy { + private static final Logger log = LoggerFactory.getLogger(DoubleHistogramStrategy.class); + private final DoubleHistogram histogram; + + public DoubleHistogramStrategy(Meter meter, String metricName, String description) { + this.histogram = meter.histogramBuilder(metricName) + .setDescription(description) + .build(); + } + + @Override + public void record(MetricRecord metricRecord, Attributes attributes) { + if (metricRecord.value() instanceof Double) { + Double value = (Double) metricRecord.value(); + histogram.record(value, attributes); + } else { + log.warn("Invalid value type for a DoubleHistogram metric: {}", metricRecord.metric().name()); + } + } +} diff --git a/src/main/java/com/appsflyer/otelawsmetrics/DurationStrategy.java b/src/main/java/com/appsflyer/otelawsmetrics/DurationStrategy.java new file mode 100644 index 0000000..6b9b614 --- /dev/null +++ b/src/main/java/com/appsflyer/otelawsmetrics/DurationStrategy.java @@ -0,0 +1,33 @@ +package com.appsflyer.otelawsmetrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.Meter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.metrics.MetricRecord; + +import java.time.Duration; + +public class DurationStrategy implements MetricStrategy { + private static final Logger log = LoggerFactory.getLogger(DurationStrategy.class); + private final LongHistogram histogram; + + public DurationStrategy(Meter meter, String metricName, String description) { + this.histogram = meter.histogramBuilder(metricName) + .setDescription(description) + .setUnit("ns") + .ofLongs() + .build(); + } + + @Override + public void record(MetricRecord metricRecord, Attributes attributes) { + if (metricRecord.value() instanceof Duration) { + Duration duration = (Duration) metricRecord.value(); + histogram.record(duration.toNanos(), attributes); + } else { + log.warn("Invalid value type for duration metric: {}", metricRecord.metric().name()); + } + } +} diff --git a/src/main/java/com/appsflyer/otelawsmetrics/LongHistogramStrategy.java b/src/main/java/com/appsflyer/otelawsmetrics/LongHistogramStrategy.java new file mode 100644 index 0000000..5fb12dc --- /dev/null +++ b/src/main/java/com/appsflyer/otelawsmetrics/LongHistogramStrategy.java @@ -0,0 +1,30 @@ +package com.appsflyer.otelawsmetrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.Meter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.metrics.MetricRecord; + +public class LongHistogramStrategy implements MetricStrategy { + private static final Logger log = LoggerFactory.getLogger(LongHistogramStrategy.class); + private final LongHistogram histogram; + + public LongHistogramStrategy(Meter meter, String metricName, String description) { + this.histogram = meter.histogramBuilder(metricName) + .setDescription(description) + .ofLongs() + .build(); + } + + @Override + public void record(MetricRecord metricRecord, Attributes attributes) { + if (metricRecord.value() instanceof Number) { + Number value = (Number) metricRecord.value(); + histogram.record(value.longValue(), attributes); + } else { + log.warn("Invalid value type for a LongHistogram metric: {}", metricRecord.metric().name()); + } + } +} diff --git a/src/main/java/com/appsflyer/otelawsmetrics/MetricStrategy.java b/src/main/java/com/appsflyer/otelawsmetrics/MetricStrategy.java new file mode 100644 index 0000000..583d620 --- /dev/null +++ b/src/main/java/com/appsflyer/otelawsmetrics/MetricStrategy.java @@ -0,0 +1,9 @@ +package com.appsflyer.otelawsmetrics; + +import io.opentelemetry.api.common.Attributes; +import software.amazon.awssdk.metrics.MetricRecord; + +@FunctionalInterface +public interface MetricStrategy { + void record(MetricRecord metricRecord, Attributes attributes); +} diff --git a/src/main/java/com/appsflyer/otelawsmetrics/MetricStrategyWithoutErrors.java b/src/main/java/com/appsflyer/otelawsmetrics/MetricStrategyWithoutErrors.java new file mode 100644 index 0000000..3c6f440 --- /dev/null +++ b/src/main/java/com/appsflyer/otelawsmetrics/MetricStrategyWithoutErrors.java @@ -0,0 +1,35 @@ +package com.appsflyer.otelawsmetrics; + +import io.opentelemetry.api.common.Attributes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.metrics.MetricRecord; + +/** + * A {@link MetricStrategy} that delegates to another {@link MetricStrategy} and catches any exceptions that occur + * during the delegation. If an exception occurs, it logs a warning and continues. + */ +public class MetricStrategyWithoutErrors implements MetricStrategy { + private static final Logger log = LoggerFactory.getLogger(MetricStrategyWithoutErrors.class); + + private final MetricStrategy delegate; + + public MetricStrategyWithoutErrors(MetricStrategy delegate) { + this.delegate = delegate; + } + + @Override + public void record(MetricRecord metricRecord, Attributes attributes) { + if (metricRecord == null) { + log.warn("Received null metric record"); + return; + } + + try { + delegate.record(metricRecord, attributes); + } catch (Exception e) { + String metricName = metricRecord.metric() == null ? "null" : metricRecord.metric().name(); + log.warn("Failed to record metric: {}", metricName, e); + } + } +} diff --git a/src/main/java/com/appsflyer/otelawsmetrics/OtelMetricPublisher.java b/src/main/java/com/appsflyer/otelawsmetrics/OtelMetricPublisher.java new file mode 100644 index 0000000..4b2409b --- /dev/null +++ b/src/main/java/com/appsflyer/otelawsmetrics/OtelMetricPublisher.java @@ -0,0 +1,281 @@ +package com.appsflyer.otelawsmetrics; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.Meter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.core.metrics.CoreMetric; +import software.amazon.awssdk.http.HttpMetric; +import software.amazon.awssdk.metrics.MetricCollection; +import software.amazon.awssdk.metrics.MetricPublisher; +import software.amazon.awssdk.metrics.MetricRecord; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executor; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.RejectedExecutionException; + +/** + * A metrics reporter that reports AWS SDK metrics to OpenTelemetry. + * The metric names, descriptions, and units are defined based on AWS SDK Metrics List. + */ +public class OtelMetricPublisher implements MetricPublisher { + private static final Logger log = LoggerFactory.getLogger(OtelMetricPublisher.class); + private static final String DEFAULT_METRIC_PREFIX = "aws.sdk"; + + private final Map>> perRequestAttributesCache = new ConcurrentHashMap<>(); + private final Map> perAttemptAttributesCache = new ConcurrentHashMap<>(); + private final Map> perHttpAttributesCache = new ConcurrentHashMap<>(); + + private final Executor executor; + private final String metricPrefix; + private final Map perRequestMetrics; + private final Map perAttemptMetrics; + private final Map httpMetrics; + + public OtelMetricPublisher(OpenTelemetry openTelemetry) { + this(openTelemetry, DEFAULT_METRIC_PREFIX); + } + + public OtelMetricPublisher(OpenTelemetry openTelemetry, String metricPrefix) { + this(openTelemetry, metricPrefix, ForkJoinPool.commonPool()); + } + + public OtelMetricPublisher(OpenTelemetry openTelemetry, String metricPrefix, Executor executor) { + this.metricPrefix = metricPrefix + "."; + this.executor = executor; + + Meter meter = openTelemetry.getMeter("aws.sdk"); + + perRequestMetrics = initializePerRequestStrategies(meter); + perAttemptMetrics = initializeCoreStrategies(meter); + httpMetrics = initializeHttpStrategies(meter); + } + + @Override + public void publish(MetricCollection metricCollection) { + try { + executor.execute(() -> publishInternal(metricCollection)); + } catch (RejectedExecutionException ex) { + log.warn("Some AWS SDK client-side metrics have been dropped because an internal executor did not accept the task.", ex); + } + } + + @Override + public void close() { + // This publisher does not allocate any resources that need to be cleaned up. + } + + private Map initializePerRequestStrategies(Meter meter) { + Map strategyMap = new HashMap<>(); + strategyMap.put(CoreMetric.API_CALL_DURATION.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "api_call_duration", + "The total time taken to finish a request (inclusive of all retries)"))); + strategyMap.put(CoreMetric.CREDENTIALS_FETCH_DURATION.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "credentials_fetch_duration", + "The time taken to fetch AWS signing credentials for the request"))); + strategyMap.put(CoreMetric.ENDPOINT_RESOLVE_DURATION.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "endpoint_resolve_duration", + "The duration of time it took to resolve the endpoint used for the API call"))); + strategyMap.put(CoreMetric.MARSHALLING_DURATION.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "marshalling_duration", + "The time it takes to marshall an SDK request to an HTTP request"))); + strategyMap.put(CoreMetric.TOKEN_FETCH_DURATION.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "token_fetch_duration", + "The time taken to fetch token signing credentials for the request"))); + return strategyMap; + } + + private Map initializeCoreStrategies(Meter meter) { + + Map strategyMap = new HashMap<>(); + strategyMap.put(CoreMetric.BACKOFF_DELAY_DURATION.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "backoff_delay_duration", + "The duration of time the SDK waited before this API call attempt"))); + strategyMap.put(CoreMetric.READ_THROUGHPUT.name(), new MetricStrategyWithoutErrors(new DoubleHistogramStrategy(meter, + metricPrefix + "read_throughput", + "The read throughput of the client in bytes/second"))); + strategyMap.put(CoreMetric.SERVICE_CALL_DURATION.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "service_call_duration", + "The time it takes to connect to the service, send the request, and receive the HTTP status code and header from the response"))); + strategyMap.put(CoreMetric.SIGNING_DURATION.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "signing_duration", + "The time it takes to sign the HTTP request"))); + strategyMap.put(CoreMetric.TIME_TO_FIRST_BYTE.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "time_to_first_byte", + "Elapsed time from sending the HTTP request (including acquiring a connection) to receiving the first byte of the headers in the response"))); + strategyMap.put(CoreMetric.TIME_TO_LAST_BYTE.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "time_to_last_byte", + "Elapsed time from sending the HTTP request (including acquiring a connection) to receiving the last byte of the response"))); + strategyMap.put(CoreMetric.UNMARSHALLING_DURATION.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "unmarshalling_duration", + "The time it takes to unmarshall an HTTP response to an SDK response"))); + return strategyMap; + } + + private Map initializeHttpStrategies(Meter meter) { + Map strategyMap = new HashMap<>(); + strategyMap.put(HttpMetric.AVAILABLE_CONCURRENCY.name(), new MetricStrategyWithoutErrors(new LongHistogramStrategy(meter, + metricPrefix + "available_concurrency", + "The number of remaining concurrent requests that can be supported by the HTTP client without needing to establish another connection"))); + strategyMap.put(HttpMetric.CONCURRENCY_ACQUIRE_DURATION.name(), new MetricStrategyWithoutErrors(new DurationStrategy(meter, + metricPrefix + "concurrency_acquire_duration", + "The time taken to acquire a channel from the connection pool"))); + strategyMap.put(HttpMetric.LEASED_CONCURRENCY.name(), new MetricStrategyWithoutErrors(new LongHistogramStrategy(meter, + metricPrefix + "leased_concurrency", + "The number of request currently being executed by the HTTP client"))); + strategyMap.put(HttpMetric.MAX_CONCURRENCY.name(), new MetricStrategyWithoutErrors(new LongHistogramStrategy(meter, + metricPrefix + "max_concurrency", + "The max number of concurrent requests supported by the HTTP client"))); + strategyMap.put(HttpMetric.PENDING_CONCURRENCY_ACQUIRES.name(), new MetricStrategyWithoutErrors(new LongHistogramStrategy(meter, + metricPrefix + "pending_concurrency_acquires", + "The number of requests that are blocked, waiting for another TCP connection or a new stream to be available from the connection pool"))); + return strategyMap; + } + + private void publishInternal(MetricCollection metricCollection) { + try { + // Start processing from the root per-request metrics + processPerRequestMetrics(metricCollection); + } catch (Exception e) { + log.error("An error occurred while publishing metrics", e); + } + } + + private void recordMetrics(Map> metricsMap, + Attributes attributes, + Map metricStrategies) { + for (Map.Entry entry : metricStrategies.entrySet()) { + MetricRecord metricRecord = metricsMap.get(entry.getKey()); + if (metricRecord != null) { + entry.getValue().record(metricRecord, attributes); + } + } + } + + private void processPerRequestMetrics(MetricCollection requestMetrics) { + Map> metricsMap = extractMetrics(requestMetrics); + + // Extract attributes for per-request metrics + String operationName = getStringMetricValue(metricsMap, CoreMetric.OPERATION_NAME.name()); + boolean isSuccess = getBooleanMetricValue(metricsMap, CoreMetric.API_CALL_SUCCESSFUL.name()); + int retryCount = getIntMetricValue(metricsMap, CoreMetric.RETRY_COUNT.name()); + Attributes attributes = toPerRequestAttributes(operationName, isSuccess, retryCount); + + // Report per-request metrics + recordMetrics(metricsMap, attributes, perRequestMetrics); + + // Process per-attempt metrics + for (MetricCollection attemptMetrics : requestMetrics.children()) { + processPerAttemptMetrics(attemptMetrics, attributes); + } + } + + private void processPerAttemptMetrics(MetricCollection attemptMetrics, Attributes parentAttributes) { + Map> metricsMap = extractMetrics(attemptMetrics); + + // Extract ErrorType if present + String errorType = getStringMetricValue(metricsMap, CoreMetric.ERROR_TYPE.name()); + + // Build attributes including attempt number and error type + Attributes attributes = toAttemptAttributes(parentAttributes, errorType); + + // Report per-attempt metrics + recordMetrics(metricsMap, attributes, perAttemptMetrics); + + // Process HTTP metrics + for (MetricCollection httpMetricsCollection : attemptMetrics.children()) { + processHttpMetrics(httpMetricsCollection, attributes); + } + } + + private void processHttpMetrics(MetricCollection httpMetricsCollection, Attributes parentAttributes) { + Map> metricsMap = extractMetrics(httpMetricsCollection); + + // Extract HTTP status code + int httpStatusCode = getIntMetricValue(metricsMap, HttpMetric.HTTP_STATUS_CODE.name()); + Attributes attributes = toHttpAttributes(parentAttributes, httpStatusCode); + + // Report HTTP metrics + recordMetrics(metricsMap, attributes, httpMetrics); + } + + private Map> extractMetrics(MetricCollection metricCollection) { + Map> metricMap = new HashMap<>(); + for (MetricRecord metricRecord : metricCollection) { + metricMap.put(metricRecord.metric().name(), metricRecord); + } + return metricMap; + } + + private String getStringMetricValue(Map> metricsMap, String metricName) { + MetricRecord metricRecord = metricsMap.get(metricName); + if (metricRecord != null) { + Object value = metricRecord.value(); + if (value instanceof String) { + return (String) value; + } + } + return null; + } + + @SuppressWarnings("SameParameterValue") + private boolean getBooleanMetricValue(Map> metricsMap, String metricName) { + MetricRecord metricRecord = metricsMap.get(metricName); + if (metricRecord != null) { + Object value = metricRecord.value(); + if (value instanceof Boolean) { + return (Boolean) value; + } + } + return false; + } + + private int getIntMetricValue(Map> metricsMap, String metricName) { + MetricRecord metricRecord = metricsMap.get(metricName); + if (metricRecord != null) { + Object value = metricRecord.value(); + if (value instanceof Number) { + return ((Number) value).intValue(); + } + } + return 0; + } + + private Attributes toPerRequestAttributes(String operationName, boolean isSuccess, int retryCount) { + String nullSafeOperationName = operationName == null ? "null" : operationName; + return perRequestAttributesCache + .computeIfAbsent(nullSafeOperationName, op -> new ConcurrentHashMap<>()) + .computeIfAbsent(isSuccess, success -> new ConcurrentHashMap<>()) + .computeIfAbsent(retryCount, rc -> Attributes.builder() + .put("request_operation_name", nullSafeOperationName) + .put("request_is_success", isSuccess) + .put("request_retry_count", retryCount) + .build()); + } + + private Attributes toAttemptAttributes(Attributes parentAttributes, String errorType) { + String safeErrorType = errorType == null ? "no_error" : errorType; + return perAttemptAttributesCache + .computeIfAbsent(parentAttributes, attr -> new ConcurrentHashMap<>()) + .computeIfAbsent(safeErrorType, type -> + Attributes.builder() + .putAll(parentAttributes) + .put("attempt_error_type", type) + .build()); + } + + private Attributes toHttpAttributes(Attributes parentAttributes, int httpStatusCode) { + return perHttpAttributesCache + .computeIfAbsent(parentAttributes, attr -> new ConcurrentHashMap<>()) + .computeIfAbsent(httpStatusCode, code -> + Attributes.builder() + .putAll(parentAttributes) + .put("http_status_code", code) + .build()); + } +} + diff --git a/src/test/java/com/appsflyer/otelawsmetrics/OtelMetricPublisherTest.java b/src/test/java/com/appsflyer/otelawsmetrics/OtelMetricPublisherTest.java new file mode 100644 index 0000000..24b6a16 --- /dev/null +++ b/src/test/java/com/appsflyer/otelawsmetrics/OtelMetricPublisherTest.java @@ -0,0 +1,118 @@ +package com.appsflyer.otelawsmetrics; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.data.HistogramData; +import io.opentelemetry.sdk.metrics.data.HistogramPointData; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import software.amazon.awssdk.core.metrics.CoreMetric; +import software.amazon.awssdk.metrics.MetricCollection; +import software.amazon.awssdk.metrics.MetricPublisher; +import software.amazon.awssdk.metrics.MetricRecord; +import software.amazon.awssdk.metrics.SdkMetric; +import software.amazon.awssdk.metrics.internal.DefaultMetricCollection; +import software.amazon.awssdk.metrics.internal.DefaultMetricRecord; + +import java.time.Duration; +import java.util.*; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +import static org.junit.jupiter.api.Assertions.*; + +class OtelMetricPublisherTest { + + private ExecutorService executor; + private InMemoryMetricReader metricReader; + private MetricPublisher metricPublisher; + + @BeforeEach + void setUp() { + // Create an executor for the OtelMetricPublisher + executor = Executors.newSingleThreadExecutor(); + + // Set up an InMemoryMetricReader to capture metrics + metricReader = InMemoryMetricReader.create(); + + // Set up the SdkMeterProvider with the metric reader + SdkMeterProvider sdkMeterProvider = SdkMeterProvider.builder() + .registerMetricReader(metricReader) + .build(); + + // Set up OpenTelemetry with the SdkMeterProvider + OpenTelemetrySdk openTelemetrySdk = OpenTelemetrySdk.builder() + .setMeterProvider(sdkMeterProvider) + .build(); + + GlobalOpenTelemetry.resetForTest(); + GlobalOpenTelemetry.set(openTelemetrySdk); + + // Create an instance of OtelMetricPublisher + metricPublisher = new OtelMetricPublisher(GlobalOpenTelemetry.get(), "aws.sdk", executor); + } + + @Test + public void testPublishMetrics() throws InterruptedException { + // Create a mock MetricCollection + MetricCollection metricCollection = createMockMetricCollection(); + + // Publish the metrics + metricPublisher.publish(metricCollection); + + // Wait for executor to process the metrics + executor.execute(() -> {}); // Submit a no-op task to ensure previous tasks are completed + Thread.sleep(500); // Wait briefly for tasks to complete + + // Retrieve the recorded metrics + List exportedMetrics = new ArrayList<>(metricReader.collectAllMetrics()); + + // Verify that the expected metrics are recorded + assertEquals(1, exportedMetrics.size(), "Expected one metric to be exported"); + + MetricData metricData = exportedMetrics.get(0); + assertEquals("aws.sdk.api_call_duration", metricData.getName()); + assertEquals("The total time taken to finish a request (inclusive of all retries)", metricData.getDescription()); + assertEquals("ns", metricData.getUnit()); + + // Verify the data points + HistogramData histogramData = metricData.getHistogramData(); + Collection points = histogramData.getPoints(); + assertEquals(1, points.size(), "Expected one data point"); + + HistogramPointData point = points.iterator().next(); + assertEquals(100.0, point.getSum(), 0.001, "Expected sum to be 100.0"); + assertEquals(1, point.getCount(), "Expected count to be 1"); + assertEquals("GetItem", point.getAttributes().get(AttributeKey.stringKey("request_operation_name"))); + assertEquals(true, point.getAttributes().get(AttributeKey.booleanKey("request_is_success"))); + assertEquals(0L, point.getAttributes().get(AttributeKey.longKey("request_retry_count"))); + } + + private MetricCollection createMockMetricCollection() { + // Create a Map to hold the metrics + Map, List>> metrics = new HashMap<>(); + + // For API_CALL_DURATION + MetricRecord apiCallDurationRecord = new DefaultMetricRecord<>(CoreMetric.API_CALL_DURATION, Duration.ofNanos(100)); + metrics.put(CoreMetric.API_CALL_DURATION, Collections.singletonList(apiCallDurationRecord)); + + // For OPERATION_NAME + MetricRecord operationNameRecord = new DefaultMetricRecord<>(CoreMetric.OPERATION_NAME, "GetItem"); + metrics.put(CoreMetric.OPERATION_NAME, Collections.singletonList(operationNameRecord)); + + // For API_CALL_SUCCESSFUL + MetricRecord apiCallSuccessfulRecord = new DefaultMetricRecord<>(CoreMetric.API_CALL_SUCCESSFUL, true); + metrics.put(CoreMetric.API_CALL_SUCCESSFUL, Collections.singletonList(apiCallSuccessfulRecord)); + + // For RETRY_COUNT + MetricRecord retryCountRecord = new DefaultMetricRecord<>(CoreMetric.RETRY_COUNT, 0); + metrics.put(CoreMetric.RETRY_COUNT, Collections.singletonList(retryCountRecord)); + + // Create the MetricCollection using DefaultMetricCollection + return new DefaultMetricCollection("ApiCall", metrics, Collections.emptyList()); + } +}