diff --git a/.circleci/collect_reports.sh b/.circleci/collect_reports.sh
index 9e085003c62..ce66a61358e 100755
--- a/.circleci/collect_reports.sh
+++ b/.circleci/collect_reports.sh
@@ -35,12 +35,12 @@ done
mkdir -p $REPORTS_DIR >/dev/null 2>&1
-cp /tmp/hs_err_pid*.log $REPORTS_DIR || true
-cp /tmp/java_pid*.hprof $REPORTS_DIR || true
-cp /tmp/javacore.* $REPORTS_DIR || true
-cp /tmp/*.trc $REPORTS_DIR || true
-cp /tmp/*.dmp $REPORTS_DIR || true
-cp /tmp/dd-profiler/*.jfr $REPORTS_DIR || true
+cp /tmp/hs_err_pid*.log $REPORTS_DIR 2>/dev/null || true
+cp /tmp/java_pid*.hprof $REPORTS_DIR 2>/dev/null || true
+cp /tmp/javacore.* $REPORTS_DIR 2>/dev/null || true
+cp /tmp/*.trc $REPORTS_DIR 2>/dev/null || true
+cp /tmp/*.dmp $REPORTS_DIR 2>/dev/null || true
+cp /tmp/dd-profiler/*.jfr $REPORTS_DIR 2>/dev/null || true
function process_reports () {
project_to_save=$1
@@ -59,9 +59,9 @@ function process_reports () {
else
echo "copying reports for $project_to_save"
mkdir -p $report_path
- cp -r workspace/$project_to_save/build/reports/* $report_path/ || true
- cp workspace/$project_to_save/build/hs_err_pid*.log $report_path/ || true
- cp workspace/$project_to_save/build/javacore*.txt $report_path/ || true
+ cp -r workspace/$project_to_save/build/reports/* $report_path/ 2>/dev/null || true
+ cp workspace/$project_to_save/build/hs_err_pid*.log $report_path/ 2>/dev/null || true
+ cp workspace/$project_to_save/build/javacore*.txt $report_path/ 2>/dev/null || true
fi
}
@@ -73,4 +73,4 @@ for report_path in workspace/**/build/reports; do
process_reports $report_path
done
-tar -cvzf reports.tar $REPORTS_DIR
+tar -czf reports.tar $REPORTS_DIR
diff --git a/.circleci/config.continue.yml.j2 b/.circleci/config.continue.yml.j2
index a3faffeafd0..4e237b8dff4 100644
--- a/.circleci/config.continue.yml.j2
+++ b/.circleci/config.continue.yml.j2
@@ -36,7 +36,7 @@ instrumentation_modules: &instrumentation_modules "dd-java-agent/instrumentation
debugger_modules: &debugger_modules "dd-java-agent/agent-debugger|dd-java-agent/agent-bootstrap|dd-java-agent/agent-builder|internal-api|communication|dd-trace-core"
profiling_modules: &profiling_modules "dd-java-agent/agent-profiling"
-default_system_tests_commit: &default_system_tests_commit 121787cbd6b3e5bc2840a0d5df17ecfb45566837
+default_system_tests_commit: &default_system_tests_commit b0b2e1f212f8c483b52aa3adc6ffd4132b1ba9b8
parameters:
nightly:
@@ -744,79 +744,6 @@ jobs:
path: ./reports
- display_memory_usage
- muzzle-dep-report:
- <<: *defaults
- resource_class: medium
- steps:
- - setup_code
- - skip_unless_matching_files_changed:
- pattern: "dd-java-agent/instrumentation"
- - restore_dependency_cache:
- cacheType: inst
- - restore_build_cache:
- cacheType: inst
- - run:
- name: Generate muzzle dep report
- command: >-
- SKIP_BUILDSCAN="true"
- GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'"
- ./gradlew generateMuzzleReport muzzleInstrumentationReport
- - run:
- name: Collect Reports
- command: .circleci/collect_muzzle_deps.sh
- - store_artifacts:
- path: ./reports
-
- muzzle:
- <<: *defaults
- resource_class: medium+
- parallelism: 4
- steps:
- - setup_code
-
- - skip_unless_matching_files_changed:
- pattern: "dd-java-agent/instrumentation"
-
- # We are not running with a separate cache of all muzzle artifacts here because it gets very big and
- # ends up taking more time restoring/saving than the actual increase in time it takes just
- # downloading the artifacts each time.
- #
- # Let's at least restore the build cache to have something to start from.
- - restore_dependency_cache:
- cacheType: inst
- - restore_build_cache:
- cacheType: inst
-
- - run:
- name: Gather muzzle tasks
- command: >-
- SKIP_BUILDSCAN="true"
- GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'"
- ./gradlew writeMuzzleTasksToFile
- << pipeline.parameters.gradle_flags >>
- --max-workers=3
-
- - run:
- name: Verify Muzzle
- command: >-
- SKIP_BUILDSCAN="true"
- GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx3G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'"
- ./gradlew `circleci tests split --split-by=timings workspace/build/muzzleTasks | xargs`
- << pipeline.parameters.gradle_flags >>
- --max-workers=4
-
- - run:
- name: Collect Reports
- when: on_fail
- command: .circleci/collect_reports.sh
-
- - store_artifacts:
- path: ./reports
-
- - store_test_results:
- path: workspace/build/muzzle-test-results
-
- - display_memory_usage
system-tests:
machine:
@@ -1337,6 +1264,24 @@ build_test_jobs: &build_test_jobs
cacheType: smoke
testJvm: "semeru8"
+ - tests:
+ requires:
+ - ok_to_test
+ name: test_graalvm17_smoke
+ gradleTarget: "stageMainDist :dd-smoke-test:quarkus-native:test"
+ stage: smoke
+ cacheType: smoke
+ testJvm: "graalvm17"
+
+ - tests:
+ requires:
+ - ok_to_test
+ name: test_graalvm21_smoke
+ gradleTarget: "stageMainDist :dd-smoke-test:quarkus-native:test"
+ stage: smoke
+ cacheType: smoke
+ testJvm: "graalvm21"
+
- tests:
requires:
- ok_to_test
@@ -1427,20 +1372,6 @@ build_test_jobs: &build_test_jobs
requires:
- ok_to_test
- - muzzle:
- requires:
- - ok_to_test
- filters:
- branches:
- ignore:
- - master
- - project/*
- - release/*
-
- - muzzle-dep-report:
- requires:
- - ok_to_test
-
- system-tests:
requires:
- ok_to_test
@@ -1488,7 +1419,6 @@ build_test_jobs: &build_test_jobs
- "test_{{ jdk }}"
{% endfor %}
- test_inst_latest
- - muzzle
- profiling
- debugger
- system-tests
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index c272b36b581..f914fd12ade 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -8,7 +8,13 @@ updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
- interval: "monthly"
+ interval: "weekly"
+ labels:
+ - "comp: tooling"
+ - "tag: dependencies"
+ - "tag: no release notes"
+ commit-message:
+ prefix: "chore(ci): "
groups:
gh-actions-packages:
patterns:
diff --git a/.github/workflows/README.md b/.github/workflows/README.md
index 7273a1d8435..3148b7a5a58 100644
--- a/.github/workflows/README.md
+++ b/.github/workflows/README.md
@@ -115,6 +115,16 @@ _Action:_
_Notes:_ Results are sent on both production and staging environments.
+### check-ci-pipelines [🔗](check-ci-pipelines.yaml)
+
+_Trigger:_ When opening or updating a PR.
+
+_Action:_ This action will check all other continuous integration jobs (Github action, Gitlab, CircleCi), and will fail if any of them fails.
+The purpose of this job is to be required for PR merges, achieving Green CI Policy.
+It got an `ignored` parameters to exclude some jobs if they are temprorary failing.
+
+_Recovery:_ Manually trigger the action on the desired branch.
+
### comment-on-submodule-update [🔗](comment-on-submodule-update.yaml)
_Trigger:_ When creating a PR commits to `master` or a `release/*` branch with a Git Submodule update.
@@ -137,7 +147,6 @@ _Action:_ Build the Java Client Library and runs [the system tests](https://gith
_Recovery:_ Manually trigger the action on the desired branch.
-
## Maintenance
GitHub actions should be part of the [repository allowed actions to run](https://github.com/DataDog/dd-trace-java/settings/actions).
diff --git a/.github/workflows/check-ci-pipelines.yml b/.github/workflows/check-ci-pipelines.yml
new file mode 100644
index 00000000000..2a72ca48fc1
--- /dev/null
+++ b/.github/workflows/check-ci-pipelines.yml
@@ -0,0 +1,35 @@
+name: Check Pull Request CI Status
+
+on:
+ pull_request:
+ types:
+ - opened
+ - synchronize
+ - reopened
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+permissions:
+ checks: read
+ statuses: read
+
+jobs:
+ check-ci-pipelines:
+ name: Check CI Pipelines
+ runs-on: ubuntu-latest
+ steps:
+ - name: Run Ensure CI Success
+ uses: DataDog/ensure-ci-success@f40e6ffd8e60280d478b9b92209aaa30d3d56895
+ with:
+ initial-delay-seconds: "1000"
+ max-retries: "60"
+ ignored-name-patterns: |
+ dd-gitlab/default-pipeline
+ dd-gitlab/check_inst 4/4
+
+# ignored jobs :
+#
+# * dd-gitlab/default-pipeline => success rate of 70% (needs an owner)
+# * dd-gitlab/check_inst 4/4 => success rate of 78% (needs an owner)
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 6a0ec785ec6..a7c1e528a11 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -27,9 +27,57 @@ variables:
GRADLE_PLUGIN_PROXY: "http://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/gradle-plugin-portal-proxy/"
JAVA_BUILD_IMAGE_VERSION: "v25.01"
REPO_NOTIFICATION_CHANNEL: "#apm-java-escalations"
+ DEFAULT_TEST_JVMS: /^(8|11|17|21)$/
PROFILE_TESTS:
description: "Enable profiling of tests"
value: "false"
+ NON_DEFAULT_JVMS:
+ description: "Enable tests on JVMs that are not the default"
+ value: "false"
+ RUN_FLAKY_TESTS:
+ description: "Enable flaky tests"
+ value: "false"
+
+.test_matrix: &test_matrix
+ - testJvm: &test_jvms
+ - "8"
+ - "11"
+ - "17"
+ - "21"
+ - "semeru11"
+ - "oracle8"
+ - "ubuntu17"
+ - "zulu8"
+ - "semeru8"
+ - "ibm8"
+ - "zulu11"
+ - "semeru17"
+
+# Gitlab doesn't support "parallel" and "parallel:matrix" at the same time
+# These blocks emulate "parallel" by including it in the matrix
+.test_matrix_2: &test_matrix_2
+ - testJvm: *test_jvms
+ CI_SPLIT: ["1/2", "2/2"]
+
+.test_matrix_4: &test_matrix_4
+ - testJvm: *test_jvms
+ CI_SPLIT: ["1/4", "2/4", "3/4", "4/4"]
+
+.test_matrix_6: &test_matrix_6
+ - testJvm: *test_jvms
+ CI_SPLIT: ["1/6", "2/6", "3/6", "4/6", "5/6", "6/6"]
+
+.test_matrix_8: &test_matrix_8
+ - testJvm: *test_jvms
+ CI_SPLIT: ["1/8", "2/8", "3/8", "4/8", "5/8", "6/8", "7/8", "8/8"]
+
+.test_matrix_12: &test_matrix_12
+ - testJvm: *test_jvms
+ CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ]
+
+.master_only: &master_only
+ - if: $CI_COMMIT_BRANCH == "master"
+ when: on_success
default:
tags: [ "arch:amd64" ]
@@ -45,52 +93,64 @@ default:
# CI_NODE_INDEX and CI_NODE_TOTAL are 1-indexed and not always set. These steps normalize the numbers for jobs
.normalize_node_index: &normalize_node_index
- - echo "CI_NODE_TOTAL=$CI_NODE_TOTAL , CI_NODE_INDEX=$CI_NODE_INDEX"
+ - if [ "$CI_NO_SPLIT" == "true" ] ; then CI_NODE_INDEX=1; CI_NODE_TOTAL=1; fi # A job uses parallel but doesn't intend to split by index
+ - if [ -n "$CI_SPLIT" ]; then CI_NODE_INDEX="${CI_SPLIT%%/*}"; CI_NODE_TOTAL="${CI_SPLIT##*/}"; fi
+ - echo "CI_NODE_TOTAL=${CI_NODE_TOTAL}, CI_NODE_INDEX=$CI_NODE_INDEX"
- export NORMALIZED_NODE_TOTAL=${CI_NODE_TOTAL:-1}
- ONE_INDEXED_NODE_INDEX=${CI_NODE_INDEX:-1}; export NORMALIZED_NODE_INDEX=$((ONE_INDEXED_NODE_INDEX - 1))
- - echo "NORMALIZED_NODE_TOTAL=$NORMALIZED_NODE_TOTAL , NORMALIZED_NODE_INDEX=$NORMALIZED_NODE_INDEX"
+ - echo "NORMALIZED_NODE_TOTAL=${NORMALIZED_NODE_TOTAL}, NORMALIZED_NODE_INDEX=$NORMALIZED_NODE_INDEX"
.gradle_build: &gradle_build
image: ghcr.io/datadog/dd-trace-java-docker-build:${JAVA_BUILD_IMAGE_VERSION}-base
stage: build
variables:
- GRADLE_OPTS: "-Dorg.gradle.jvmargs='-Xmx2560M -Xms2560M -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'"
MAVEN_OPTS: "-Xms64M -Xmx512M"
GRADLE_WORKERS: 2
+ GRADLE_MEM: 2560M
KUBERNETES_CPU_REQUEST: 8
- KUBERNETES_MEMORY_REQUEST: 6Gi
+ KUBERNETES_MEMORY_REQUEST: 8Gi
+ KUBERNETES_MEMORY_LIMIT: 8Gi
+ CACHE_TYPE: lib #default
+ RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE: 4 # Runtime.getRuntime().availableProcessors() returns incorrect or very high values in Kubernetes
cache:
- - key: '$CI_SERVER_VERSION-v2' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months
+ - key: '$CI_SERVER_VERSION-$CACHE_TYPE' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months
paths:
# Cached dependencies and wrappers for gradle
- .gradle/wrapper
- .gradle/caches
- .gradle/notifications
policy: $DEPENDENCY_CACHE_POLICY
- - key: $CI_PIPELINE_ID-$BUILD_CACHE_TYPE # Incremental build cache. Shared by all jobs in the pipeline of the same type
+ fallback_keys: # Use fallback keys because all cache types are not populated. See note under: populate_dep_cache
+ - '$CI_SERVER_VERSION-base'
+ - '$CI_SERVER_VERSION-lib'
+ - key: $CI_PIPELINE_ID-$CACHE_TYPE # Incremental build cache. Shared by all jobs in the pipeline of the same type
paths:
- .gradle/caches/$GRADLE_VERSION
- .gradle/$GRADLE_VERSION/executionHistory
- workspace
policy: $BUILD_CACHE_POLICY
before_script:
+ - source .gitlab/gitlab-utils.sh
- export GRADLE_USER_HOME=`pwd`/.gradle
+ - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx$GRADLE_MEM -Xms$GRADLE_MEM -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'"
- export GRADLE_ARGS=" --build-cache --stacktrace --no-daemon --parallel --max-workers=$GRADLE_WORKERS -PmavenRepositoryProxy=$MAVEN_REPOSITORY_PROXY -PgradlePluginProxy=$GRADLE_PLUGIN_PROXY"
- *normalize_node_index
# for weird reasons, gradle will always "chmod 700" the .gradle folder
# with Gitlab caching, .gradle is always owned by root and thus gradle's chmod invocation fails
# This dance is a hack to have .gradle owned by the Gitlab runner user
+ - gitlab_section_start "gradle-dance" "Fix .gradle directory permissions"
- mkdir -p .gradle
- cp -r .gradle .gradle-copy
- rm -rf .gradle
- mv .gradle-copy .gradle
- ls -la
+ - gitlab_section_end "gradle-dance"
build:
extends: .gradle_build
variables:
BUILD_CACHE_POLICY: push
- BUILD_CACHE_TYPE: lib
+ CACHE_TYPE: lib
DEPENDENCY_CACHE_POLICY: pull
script:
- if [ $CI_PIPELINE_SOURCE == "schedule" ] ; then ./gradlew resolveAndLockAll --write-locks; fi
@@ -109,16 +169,58 @@ build:
reports:
dotenv: build.env
-build_and_populate_dep_cache:
- extends: build
+build_tests:
+ extends: .gradle_build
variables:
BUILD_CACHE_POLICY: push
+ DEPENDENCY_CACHE_POLICY: pull
+ GRADLE_MEM: 4G
+ GRADLE_WORKERS: 3
+ KUBERNETES_MEMORY_REQUEST: 18Gi
+ KUBERNETES_MEMORY_LIMIT: 18Gi
+ parallel:
+ matrix:
+ - GRADLE_TARGET: ":baseTest"
+ CACHE_TYPE: "base"
+ - GRADLE_TARGET: ":profilingTest"
+ CACHE_TYPE: "profiling"
+ - GRADLE_TARGET: ":instrumentationTest"
+ CACHE_TYPE: "inst"
+ - GRADLE_TARGET: ":instrumentationLatestDepTest"
+ CACHE_TYPE: "latestdep"
+ - GRADLE_TARGET: ":smokeTest"
+ CACHE_TYPE: "smoke"
+ MAVEN_OPTS: "-Xms64M -Xmx512M -Dorg.slf4j.simpleLogger.defaultLogLevel=debug" # FIXME: Build :smokeTest build fails unless mvn debug logging is on
+
+ script:
+ - ./gradlew clean $GRADLE_TARGET -PskipTests $GRADLE_ARGS
+
+populate_dep_cache:
+ extends: build_tests
+ variables:
+ BUILD_CACHE_POLICY: pull
DEPENDENCY_CACHE_POLICY: push
rules:
- if: '$POPULATE_CACHE'
when: on_success
- when: manual
allow_failure: true
+ parallel:
+ matrix:
+ - GRADLE_TARGET: ":dd-java-agent:shadowJar :dd-trace-api:jar :dd-trace-ot:shadowJar"
+ CACHE_TYPE: "lib"
+ - GRADLE_TARGET: ":baseTest"
+ CACHE_TYPE: "base"
+ - GRADLE_TARGET: ":profilingTest"
+ CACHE_TYPE: "profiling"
+# FIXME: Gitlab doesn't support s3 based caches >5GB. Fixed in Gitlab 17.5
+# See: https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26921#note_2132307223
+# - GRADLE_TARGET: ":instrumentationTest"
+# CACHE_TYPE: "inst"
+# - GRADLE_TARGET: ":instrumentationLatestDepTest"
+# CACHE_TYPE: "latestdep"
+# - GRADLE_TARGET: ":smokeTest"
+# CACHE_TYPE: "smoke"
spotless:
extends: .gradle_build
@@ -134,7 +236,7 @@ test_published_artifacts:
stage: tests
needs: [ build ]
variables:
- BUILD_CACHE_TYPE: lib
+ CACHE_TYPE: lib
script:
- mvn_local_repo=$(./mvnw help:evaluate -Dexpression=settings.localRepository -q -DforceStdout)
- rm -rf "${mvn_local_repo}/com/datadoghq"
@@ -143,10 +245,13 @@ test_published_artifacts:
- export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'"
- ./gradlew publishToMavenLocal $GRADLE_ARGS
- cd test-published-dependencies
- - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx512M -Xms512M -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'"
+ - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx1G -Xms1G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'"
- ./gradlew check --info $GRADLE_ARGS
after_script:
+ - source .gitlab/gitlab-utils.sh
+ - gitlab_section_start "collect-reports" "Collecting reports"
- .circleci/collect_reports.sh
+ - gitlab_section_end "collect-reports"
artifacts:
when: always
paths:
@@ -157,16 +262,28 @@ test_published_artifacts:
needs: [ build ]
stage: tests
variables:
- BUILD_CACHE_TYPE: lib
+ CACHE_TYPE: lib
script:
- ./gradlew $GRADLE_TARGET -PskipTests -PrunBuildSrcTests -PskipSpotless -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS
after_script:
+ - source .gitlab/gitlab-utils.sh
+ - gitlab_section_start "collect-reports" "Collecting reports"
- .circleci/collect_reports.sh --destination ./check_reports --move
+ - gitlab_section_end "collect-reports"
artifacts:
when: always
paths:
- ./check_reports
- '.gradle/daemon/*/*.out.log'
+ retry:
+ max: 2
+ when:
+ - unknown_failure
+ - stuck_or_timeout_failure
+ - runner_system_failure
+ - unmet_prerequisites
+ - scheduler_failure
+ - data_integrity_failure
check_base:
extends: .check_job
@@ -197,11 +314,11 @@ check_debugger:
muzzle:
extends: .gradle_build
- needs: [ build ]
+ needs: [ build_tests ]
stage: tests
parallel: 8
variables:
- BUILD_CACHE_TYPE: lib
+ CACHE_TYPE: inst
script:
- export SKIP_BUILDSCAN="true"
- ./gradlew writeMuzzleTasksToFile $GRADLE_ARGS
@@ -209,7 +326,10 @@ muzzle:
- split --number=l/$NORMALIZED_NODE_TOTAL --suffix-length=1 --numeric-suffixes sortedMuzzleTasks muzzleSplit
- ./gradlew `cat muzzleSplit${NORMALIZED_NODE_INDEX} | xargs` $GRADLE_ARGS
after_script:
+ - source .gitlab/gitlab-utils.sh
+ - gitlab_section_start "collect-reports" "Collecting reports"
- .circleci/collect_reports.sh
+ - gitlab_section_end "collect-reports"
artifacts:
when: always
paths:
@@ -218,10 +338,10 @@ muzzle:
muzzle-dep-report:
extends: .gradle_build
- needs: [ build ]
+ needs: [ build_tests ]
stage: tests
variables:
- BUILD_CACHE_TYPE: test
+ CACHE_TYPE: inst
script:
- export SKIP_BUILDSCAN="true"
- ./gradlew generateMuzzleReport muzzleInstrumentationReport $GRADLE_ARGS
@@ -251,12 +371,28 @@ muzzle-dep-report:
.test_job:
extends: .gradle_build
image: ghcr.io/datadog/dd-trace-java-docker-build:$testJvm
- needs: [ build ]
+ tags: [ "docker-in-docker:amd64" ] # use docker-in-docker runner for testcontainers
+ needs: [ build_tests ]
stage: tests
variables:
- BUILD_CACHE_TYPE: lib
- GRADLE_PARAMS: ""
+ KUBERNETES_MEMORY_REQUEST: 16Gi
+ KUBERNETES_MEMORY_LIMIT: 16Gi
+ KUBERNETES_CPU_REQUEST: 10
+ GRADLE_WORKERS: 4
+ GRADLE_MEM: 3G
+ GRADLE_PARAMS: "-PskipFlakyTests"
CONTINUE_ON_FAILURE: "false"
+ TESTCONTAINERS_CHECKS_DISABLE: "true"
+ TESTCONTAINERS_RYUK_DISABLED: "true"
+ TESTCONTAINERS_HUB_IMAGE_NAME_PREFIX: "registry.ddbuild.io/images/mirror/"
+ JETTY_AVAILABLE_PROCESSORS: 4 # Jetty incorrectly calculates processor count in containers
+ rules:
+ - if: $testJvm =~ $DEFAULT_TEST_JVMS
+ when: on_success
+ - if: $NON_DEFAULT_JVMS == "true"
+ when: on_success
+ - if: $CI_COMMIT_BRANCH == "master"
+ when: on_success
script:
- >
if [ "$PROFILE_TESTS" == "true" ] && [ "$testJvm" != "ibm8" ] && [ "$testJvm" != "oracle8" ];
@@ -264,16 +400,18 @@ muzzle-dep-report:
export PROFILER_COMMAND="-XX:StartFlightRecording=settings=profile,filename=/tmp/${CI_JOB_NAME_SLUG}.jfr,dumponexit=true";
fi
- *prepare_test_env
- - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms2G -Xmx2G $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=768M -Ddatadog.forkedMinHeapSize=128M"
- - ./gradlew $GRADLE_TARGET $GRADLE_PARAMS -PtestJvm=$testJvm $GRADLE_ARGS --continue || $CONTINUE_ON_FAILURE
+ - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms$GRADLE_MEM -Xmx$GRADLE_MEM $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=768M -Ddatadog.forkedMinHeapSize=128M"
+ - ./gradlew $GRADLE_TARGET $GRADLE_PARAMS -PtestJvm=$testJvm -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS --continue || $CONTINUE_ON_FAILURE
after_script:
- *restore_pretest_env
- *set_datadog_api_keys
+ - source .gitlab/gitlab-utils.sh
+ - gitlab_section_start "collect-reports" "Collecting reports"
- .circleci/collect_reports.sh
- if [ "$PROFILE_TESTS" == "true" ]; then .circleci/collect_profiles.sh; fi
- .circleci/collect_results.sh
- .circleci/upload_ciapp.sh tests $testJvm
-# TODO Get APM Test Agent Trace Check Results
+ - gitlab_section_end "collect-reports"
artifacts:
when: always
paths:
@@ -281,13 +419,45 @@ muzzle-dep-report:
- ./profiles.tar
- ./results
- '.gradle/daemon/*/*.out.log'
+ reports:
+ junit: results/*.xml
+ retry:
+ max: 2
+ when:
+ - unknown_failure
+ - stuck_or_timeout_failure
+ - runner_system_failure
+ - unmet_prerequisites
+ - scheduler_failure
+ - data_integrity_failure
+
+.test_job_with_test_agent:
+ extends: .test_job
+ variables:
+ CI_USE_TEST_AGENT: "true"
+ CI_AGENT_HOST: local-agent
+ services:
+ - name: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.11.0
+ alias: local-agent
+ variables:
+ LOG_LEVEL: "DEBUG"
+ TRACE_LANGUAGE: "java"
+ DD_SUPPRESS_TRACE_PARSE_ERRORS: "true"
+ DD_POOL_TRACE_CHECK_FAILURES: "true"
+ DD_DISABLE_ERROR_RESPONSES: "true"
+ ENABLED_CHECKS: "trace_content_length,trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service,trace_dd_service"
+ script:
+ - !reference [.test_job, script]
+ - .gitlab/check_test_agent_results.sh
agent_integration_tests:
extends: .test_job
+ tags: [ "arch:amd64" ]
variables:
testJvm: "8"
CI_AGENT_HOST: local-agent
GRADLE_TARGET: "traceAgentTest"
+ CACHE_TYPE: "base"
services:
- name: datadog/agent:7.34.0
alias: local-agent
@@ -296,6 +466,129 @@ agent_integration_tests:
DD_BIND_HOST: "0.0.0.0"
DD_API_KEY: "invalid_key_but_this_is_fine"
+test_base:
+ extends: .test_job
+ variables:
+ GRADLE_TARGET: ":baseTest"
+ CACHE_TYPE: "base"
+ parallel:
+ matrix: *test_matrix_4
+ script:
+ - if [ "$testJvm" == "8" ]; then export GRADLE_PARAMS="-PskipFlakyTests -PcheckCoverage"; fi
+ - !reference [.test_job, script]
+
+test_inst:
+ extends: .test_job_with_test_agent
+ variables:
+ GRADLE_TARGET: ":instrumentationTest"
+ CACHE_TYPE: "inst"
+ parallel:
+ matrix: *test_matrix_6
+
+test_inst_latest:
+ extends: .test_job_with_test_agent
+ variables:
+ GRADLE_TARGET: ":instrumentationLatestDepTest"
+ CACHE_TYPE: "latestDep"
+ parallel:
+ matrix:
+ - testJvm: ["8", "17", "21" ]
+ # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time
+ # This emulates "parallel" by including it in the matrix
+ CI_SPLIT: [ "1/6", "2/6", "3/6", "4/6", "5/6", "6/6"]
+
+test_flaky:
+ extends: .test_job_with_test_agent
+ variables:
+ GRADLE_PARAMS: "-PrunFlakyTests"
+ CACHE_TYPE: "base"
+ testJvm: "8"
+ CONTINUE_ON_FAILURE: "true"
+ rules:
+ - *master_only
+ - if: $RUN_FLAKY_TESTS == "true"
+ when: on_success
+ parallel:
+ matrix:
+ - GRADLE_TARGET: [":baseTest", ":smokeTest", ":debuggerTest"]
+ # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time
+ # This emulates "parallel" by including it in the matrix
+ CI_SPLIT: [ "1/4", "2/4", "3/4", "4/4" ]
+
+test_flaky_inst:
+ extends: .test_job
+ variables:
+ GRADLE_TARGET: ":instrumentationTest"
+ GRADLE_PARAMS: "-PrunFlakyTests"
+ CACHE_TYPE: "inst"
+ testJvm: "8"
+ CONTINUE_ON_FAILURE: "true"
+ rules:
+ - *master_only
+ - if: $RUN_FLAKY_TESTS == "true"
+ when: on_success
+ parallel: 6
+
+test_profiling:
+ extends: .test_job
+ variables:
+ GRADLE_TARGET: ":profilingTest"
+ CACHE_TYPE: "profiling"
+ parallel:
+ matrix: *test_matrix
+
+# specific jvms list for debugger project because J9-based JVMs have issues with local vars
+# so need to test at least against one J9-based JVM
+test_debugger:
+ extends: .test_job
+ variables:
+ GRADLE_TARGET: ":debuggerTest"
+ CACHE_TYPE: "base"
+ DEFAULT_TEST_JVMS: /^(8|11|17|21|semeru8)$/
+ parallel:
+ matrix: *test_matrix
+
+test_smoke:
+ extends: .test_job
+ variables:
+ GRADLE_TARGET: "stageMainDist :smokeTest"
+ GRADLE_PARAMS: "-PskipFlakyTests"
+ CACHE_TYPE: "smoke"
+ parallel:
+ matrix: *test_matrix_2
+
+test_ssi_smoke:
+ extends: .test_job
+ rules: *master_only
+ variables:
+ GRADLE_TARGET: "stageMainDist :smokeTest"
+ CACHE_TYPE: "smoke"
+ DD_INJECT_FORCE: "true"
+ DD_INJECTION_ENABLED: "tracer"
+ parallel:
+ matrix: *test_matrix_2
+
+test_smoke_graalvm:
+ extends: .test_job
+ tags: [ "arch:amd64" ]
+ variables:
+ GRADLE_TARGET: "stageMainDist :dd-smoke-test:spring-boot-3.0-native:test"
+ CACHE_TYPE: "smoke"
+ CI_NO_SPLIT: "true"
+ NON_DEFAULT_JVMS: "true"
+ parallel:
+ matrix:
+ - testJvm: ["graalvm17", "graalvm21"]
+
+test_smoke_semeru8_debugger:
+ extends: .test_job
+ tags: [ "arch:amd64" ]
+ variables:
+ GRADLE_TARGET: "stageMainDist dd-smoke-tests:debugger-integration-tests:test"
+ CACHE_TYPE: "smoke"
+ NON_DEFAULT_JVMS: "true"
+ testJvm: "semeru8"
+
required:
extends: .fan_in
needs:
@@ -394,7 +687,7 @@ deploy_to_sonatype:
stage: publish
needs: [ build ]
variables:
- BUILD_CACHE_TYPE: lib
+ CACHE_TYPE: lib
rules:
- if: '$POPULATE_CACHE'
when: never
diff --git a/.gitlab/check_test_agent_results.sh b/.gitlab/check_test_agent_results.sh
new file mode 100755
index 00000000000..cfbc8f098be
--- /dev/null
+++ b/.gitlab/check_test_agent_results.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+
+set +e # Disable exiting from testagent response failure
+SUMMARY_RESPONSE=$(curl -s -w "\n%{http_code}" -o summary_response.txt "http://${CI_AGENT_HOST}:8126/test/trace_check/summary")
+set -e
+SUMMARY_RESPONSE_CODE=$(echo "$SUMMARY_RESPONSE" | awk 'END {print $NF}')
+
+if [[ SUMMARY_RESPONSE_CODE -eq 200 ]]; then
+ echo "APM Test Agent is running. (HTTP 200)"
+else
+ echo "APM Test Agent is not running and was not used for testing. No checks failed."
+ exit 0
+fi
+
+RESPONSE=$(curl -s -w "\n%{http_code}" -o response.txt "http://${CI_AGENT_HOST}:8126/test/trace_check/failures")
+RESPONSE_CODE=$(echo "$RESPONSE" | awk 'END {print $NF}')
+
+if [[ $RESPONSE_CODE -eq 200 ]]; then
+ echo "All APM Test Agent Check Traces returned successful! (HTTP 200)"
+ echo "APM Test Agent Check Traces Summary Results:"
+ cat summary_response.txt | jq '.'
+elif [[ $RESPONSE_CODE -eq 404 ]]; then
+ echo "Real APM Agent running in place of TestAgent, no checks to validate!"
+else
+ echo "APM Test Agent Check Traces failed with response code: $RESPONSE_CODE"
+ echo "Failures:"
+ cat response.txt
+ echo "APM Test Agent Check Traces Summary Results:"
+ cat summary_response.txt | jq '.'
+ exit 1
+fi
diff --git a/.gitlab/gitlab-utils.sh b/.gitlab/gitlab-utils.sh
new file mode 100755
index 00000000000..6a668fe2250
--- /dev/null
+++ b/.gitlab/gitlab-utils.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+# From https://docs.gitlab.com/ci/jobs/job_logs/#use-a-script-to-improve-display-of-collapsible-sections
+# function for starting the section
+function gitlab_section_start () {
+ local section_title="${1}"
+ local section_description="${2:-$section_title}"
+
+ echo -e "section_start:`date +%s`:${section_title}[collapsed=true]\r\e[0K${section_description}"
+}
+
+# Function for ending the section
+function gitlab_section_end () {
+ local section_title="${1}"
+
+ echo -e "section_end:`date +%s`:${section_title}\r\e[0K"
+}
diff --git a/build.gradle b/build.gradle
index 8629065f394..6a7447502b6 100644
--- a/build.gradle
+++ b/build.gradle
@@ -22,7 +22,7 @@ plugins {
id 'pl.allegro.tech.build.axion-release' version '1.14.4'
id 'io.github.gradle-nexus.publish-plugin' version '1.3.0'
- id "com.github.johnrengelman.shadow" version "7.1.2" apply false
+ id "com.gradleup.shadow" version "8.3.6" apply false
id "me.champeau.jmh" version "0.7.0" apply false
id 'org.gradle.playframework' version '0.13' apply false
id 'info.solidsoft.pitest' version '1.9.11' apply false
diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts
index 536853fc847..5c95fd38756 100644
--- a/buildSrc/build.gradle.kts
+++ b/buildSrc/build.gradle.kts
@@ -30,7 +30,7 @@ dependencies {
implementation(gradleApi())
implementation(localGroovy())
- implementation("net.bytebuddy", "byte-buddy-gradle-plugin", "1.15.11")
+ implementation("net.bytebuddy", "byte-buddy-gradle-plugin", "1.17.5")
implementation("org.eclipse.aether", "aether-connector-basic", "1.1.0")
implementation("org.eclipse.aether", "aether-transport-http", "1.1.0")
diff --git a/buildSrc/call-site-instrumentation-plugin/build.gradle.kts b/buildSrc/call-site-instrumentation-plugin/build.gradle.kts
index 1148173b096..39c18c12258 100644
--- a/buildSrc/call-site-instrumentation-plugin/build.gradle.kts
+++ b/buildSrc/call-site-instrumentation-plugin/build.gradle.kts
@@ -4,7 +4,7 @@ plugins {
java
groovy
id("com.diffplug.spotless") version "6.13.0"
- id("com.github.johnrengelman.shadow") version "8.1.1"
+ id("com.gradleup.shadow") version "8.3.6"
}
java {
@@ -36,7 +36,7 @@ dependencies {
implementation("org.ow2.asm", "asm-tree", "9.8")
implementation("com.github.javaparser", "javaparser-symbol-solver-core", "3.24.4")
- testImplementation("net.bytebuddy", "byte-buddy", "1.15.11")
+ testImplementation("net.bytebuddy", "byte-buddy", "1.17.5")
testImplementation("org.spockframework", "spock-core", "2.0-groovy-3.0")
testImplementation("org.objenesis", "objenesis", "3.0.1")
testImplementation("org.codehaus.groovy", "groovy-all", "3.0.17")
diff --git a/buildSrc/src/main/groovy/MuzzlePlugin.groovy b/buildSrc/src/main/groovy/MuzzlePlugin.groovy
index 81a3bb28c2d..bd37653f056 100644
--- a/buildSrc/src/main/groovy/MuzzlePlugin.groovy
+++ b/buildSrc/src/main/groovy/MuzzlePlugin.groovy
@@ -55,11 +55,15 @@ class MuzzlePlugin implements Plugin {
static {
RemoteRepository central = new RemoteRepository.Builder("central", "default", "https://repo1.maven.org/maven2/").build()
- // Only needed for restlet
- RemoteRepository restlet = new RemoteRepository.Builder("restlet", "default", "https://maven.restlet.talend.com/").build()
- // Only needed for play-2.3
- RemoteRepository typesafe = new RemoteRepository.Builder("typesafe", "default", "https://repo.typesafe.com/typesafe/maven-releases/").build()
- MUZZLE_REPOS = Collections.unmodifiableList(Arrays.asList(central, restlet, typesafe))
+
+ String mavenProxyUrl = System.getenv("MAVEN_REPOSITORY_PROXY")
+
+ if (mavenProxyUrl == null) {
+ MUZZLE_REPOS = Collections.singletonList(central)
+ } else {
+ RemoteRepository proxy = new RemoteRepository.Builder("central-proxy", "default", mavenProxyUrl).build()
+ MUZZLE_REPOS = Collections.unmodifiableList(Arrays.asList(proxy, central))
+ }
}
static class TestedArtifact {
diff --git a/buildSrc/src/test/groovy/CallSiteInstrumentationPluginTest.groovy b/buildSrc/src/test/groovy/CallSiteInstrumentationPluginTest.groovy
index 56edb291f9d..6adb661f0f4 100644
--- a/buildSrc/src/test/groovy/CallSiteInstrumentationPluginTest.groovy
+++ b/buildSrc/src/test/groovy/CallSiteInstrumentationPluginTest.groovy
@@ -27,7 +27,7 @@ class CallSiteInstrumentationPluginTest extends Specification {
}
dependencies {
- implementation group: 'net.bytebuddy', name: 'byte-buddy', version: '1.15.11'
+ implementation group: 'net.bytebuddy', name: 'byte-buddy', version: '1.17.5'
implementation group: 'com.google.auto.service', name: 'auto-service-annotations', version: '1.0-rc7'
}
'''
diff --git a/buildSrc/src/test/groovy/InstrumentPluginTest.groovy b/buildSrc/src/test/groovy/InstrumentPluginTest.groovy
index eb79bf833b2..fb9c6e35f1e 100644
--- a/buildSrc/src/test/groovy/InstrumentPluginTest.groovy
+++ b/buildSrc/src/test/groovy/InstrumentPluginTest.groovy
@@ -23,7 +23,7 @@ class InstrumentPluginTest extends Specification {
}
dependencies {
- compileOnly group: 'net.bytebuddy', name: 'byte-buddy', version: '1.15.11' // just to build TestPlugin
+ compileOnly group: 'net.bytebuddy', name: 'byte-buddy', version: '1.17.5' // just to build TestPlugin
}
apply plugin: 'instrument'
diff --git a/components/context/src/main/java/datadog/context/InferredProxyContext.java b/components/context/src/main/java/datadog/context/InferredProxyContext.java
new file mode 100644
index 00000000000..51eecc4cc02
--- /dev/null
+++ b/components/context/src/main/java/datadog/context/InferredProxyContext.java
@@ -0,0 +1,50 @@
+package datadog.context;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+public class InferredProxyContext implements ImplicitContextKeyed {
+ public static final ContextKey CONTEXT_KEY =
+ ContextKey.named("inferred-proxy-key");
+ private final Map inferredProxy;
+
+ public static InferredProxyContext fromContext(Context context) {
+ return context.get(CONTEXT_KEY);
+ }
+
+ public InferredProxyContext(Map contextInfo) {
+ this.inferredProxy =
+ (contextInfo == null || contextInfo.isEmpty())
+ ? new HashMap<>()
+ : new HashMap<>(contextInfo);
+ }
+
+ public InferredProxyContext() {
+ this.inferredProxy = new HashMap<>();
+ }
+
+ public Map getInferredProxyContext() {
+ return Collections.unmodifiableMap(inferredProxy);
+ }
+
+ public void putInferredProxyInfo(String key, String value) {
+ inferredProxy.put(key, value);
+ }
+
+ public void removeInferredProxyInfo(String key) {
+ inferredProxy.remove(key);
+ }
+
+ /**
+ * Creates a new context with this value under its chosen key.
+ *
+ * @param context the context to copy the original values from.
+ * @return the new context with the implicitly keyed value.
+ * @see Context#with(ImplicitContextKeyed)
+ */
+ @Override
+ public Context storeInto(Context context) {
+ return context.with(CONTEXT_KEY, this);
+ }
+}
diff --git a/components/context/src/main/java/datadog/context/propagation/InferredProxyPropagator.java b/components/context/src/main/java/datadog/context/propagation/InferredProxyPropagator.java
new file mode 100644
index 00000000000..69e5a0e896e
--- /dev/null
+++ b/components/context/src/main/java/datadog/context/propagation/InferredProxyPropagator.java
@@ -0,0 +1,74 @@
+package datadog.context.propagation;
+
+import datadog.context.Context;
+import datadog.context.InferredProxyContext;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.BiConsumer;
+
+public class InferredProxyPropagator implements Propagator {
+ public static final String INFERRED_PROXY_KEY = "x-dd-proxy";
+ /**
+ * METHOD STUB: InferredProxy is currently not meant to be injected to downstream services Injects
+ * a context into a downstream service using the given carrier.
+ *
+ * @param context the context containing the values to be injected.
+ * @param carrier the instance that will receive the key/value pairs to propagate.
+ * @param setter the callback to set key/value pairs into the carrier.
+ */
+ @Override
+ public void inject(Context context, C carrier, CarrierSetter setter) {}
+
+ /**
+ * Extracts a context from un upstream service.
+ *
+ * @param context the base context to store the extracted values on top, use {@link
+ * Context#root()} for a default base context.
+ * @param carrier the instance to fetch the propagated key/value pairs from.
+ * @param visitor the callback to walk over the carrier and extract its key/value pais.
+ * @return A context with the extracted values on top of the given base context.
+ */
+ @Override
+ public Context extract(Context context, C carrier, CarrierVisitor visitor) {
+ if (context == null || carrier == null || visitor == null) {
+ return context;
+ }
+ InferredProxyContextExtractor extractor = new InferredProxyContextExtractor();
+ visitor.forEachKeyValue(carrier, extractor);
+
+ InferredProxyContext extractedContext = extractor.extractedContext;
+ if (extractedContext == null) {
+ return context;
+ }
+ return extractedContext.storeInto(context);
+ }
+
+ public static class InferredProxyContextExtractor implements BiConsumer {
+ private InferredProxyContext extractedContext;
+
+ InferredProxyContextExtractor() {}
+
+ private Map parseInferredProxyHeaders(String input) {
+ Map parsedHeaders = new HashMap<>();
+ return parsedHeaders;
+ }
+
+ /**
+ * Performs this operation on the given arguments.
+ *
+ * @param key the first input argument from an http header
+ * @param value the second input argument from an http header
+ */
+ @Override
+ public void accept(String key, String value) {
+ if (key == null || key.isEmpty() || !key.startsWith(INFERRED_PROXY_KEY)) {
+ return;
+ }
+ Map inferredProxyMap = parseInferredProxyHeaders(value);
+ if (extractedContext == null) {
+ extractedContext = new InferredProxyContext();
+ }
+ extractedContext.putInferredProxyInfo(key, value);
+ }
+ }
+}
diff --git a/components/context/src/test/java/datadog/context/InferredProxyHandlingTest.java b/components/context/src/test/java/datadog/context/InferredProxyHandlingTest.java
new file mode 100644
index 00000000000..53ddf5cb12a
--- /dev/null
+++ b/components/context/src/test/java/datadog/context/InferredProxyHandlingTest.java
@@ -0,0 +1,465 @@
+package datadog.context;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import datadog.context.propagation.CarrierVisitor;
+import datadog.context.propagation.InferredProxyPropagator;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.BiConsumer;
+import java.util.stream.Stream;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test; // For @Test on nested class methods
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+
+class InferredProxyHandlingTest {
+
+ // Define header key constants locally for the test
+ static final String PROXY_SYSTEM_KEY = "x-dd-proxy-system";
+ static final String PROXY_REQUEST_TIME_MS_KEY = "x-dd-proxy-request-time-ms";
+ static final String PROXY_PATH_KEY = "x-dd-proxy-path";
+ static final String PROXY_HTTP_METHOD_KEY = "x-dd-proxy-httpmethod";
+ static final String PROXY_DOMAIN_NAME_KEY = "x-dd-proxy-domain-name";
+
+ private InferredProxyPropagator propagator;
+
+ @BeforeEach
+ void setUp() {
+ propagator = new InferredProxyPropagator();
+ }
+
+ // Moved @MethodSource providers to the outer class and made them static
+ static Stream validHeadersProviderForPropagator() {
+ Map allStandard = new HashMap<>();
+ allStandard.put(PROXY_SYSTEM_KEY, "aws-apigw"); // The only currently supported system
+ allStandard.put(PROXY_REQUEST_TIME_MS_KEY, "12345");
+ allStandard.put(PROXY_PATH_KEY, "/foo");
+ allStandard.put(PROXY_HTTP_METHOD_KEY, "GET");
+ allStandard.put(PROXY_DOMAIN_NAME_KEY, "api.example.com");
+
+ return Stream.of(
+ Arguments.of(
+ "all standard headers (aws-apigw)",
+ allStandard,
+ "aws-apigw",
+ "12345",
+ "/foo",
+ "GET",
+ "api.example.com",
+ null,
+ null));
+ }
+
+ static Stream invalidOrMissingHeadersProviderForPropagator() { // Renamed
+ Map missingSystem = new HashMap<>();
+ missingSystem.put(PROXY_REQUEST_TIME_MS_KEY, "12345");
+ missingSystem.put(PROXY_PATH_KEY, "/foo");
+
+ Map missingTime = new HashMap<>();
+ missingTime.put(PROXY_SYSTEM_KEY, "aws-apigw");
+ missingTime.put(PROXY_PATH_KEY, "/foo");
+
+ return Stream.of(
+ Arguments.of("PROXY_SYSTEM_KEY missing", missingSystem),
+ Arguments.of("PROXY_REQUEST_TIME_MS_KEY missing", missingTime));
+ }
+
+ // Simple Map visitor for tests (can remain static or non-static in outer class)
+ static class MapVisitor implements CarrierVisitor
diff --git a/dd-smoke-tests/springboot-openliberty-23/application/pom.xml b/dd-smoke-tests/springboot-openliberty-23/application/pom.xml
index a6b66b8083a..5a8fb308d78 100644
--- a/dd-smoke-tests/springboot-openliberty-23/application/pom.xml
+++ b/dd-smoke-tests/springboot-openliberty-23/application/pom.xml
@@ -107,5 +107,25 @@
${target.dir}
+
+ maven-proxy-profile
+
+
+ env.MAVEN_REPOSITORY_PROXY
+
+
+
+
+ maven-proxy-repo
+ ${env.MAVEN_REPOSITORY_PROXY}
+
+
+
+
+ maven-plugin-proxy
+ ${env.MAVEN_REPOSITORY_PROXY}
+
+
+
diff --git a/dd-smoke-tests/springboot/build.gradle b/dd-smoke-tests/springboot/build.gradle
index 3e0161aa2cb..ef534ba722b 100644
--- a/dd-smoke-tests/springboot/build.gradle
+++ b/dd-smoke-tests/springboot/build.gradle
@@ -1,5 +1,5 @@
plugins {
- id "com.github.johnrengelman.shadow"
+ id "com.gradleup.shadow"
id 'java-test-fixtures'
}
diff --git a/dd-trace-api/src/main/java/datadog/trace/api/config/TracerConfig.java b/dd-trace-api/src/main/java/datadog/trace/api/config/TracerConfig.java
index d817c88666e..5bc49039407 100644
--- a/dd-trace-api/src/main/java/datadog/trace/api/config/TracerConfig.java
+++ b/dd-trace-api/src/main/java/datadog/trace/api/config/TracerConfig.java
@@ -99,6 +99,9 @@ public final class TracerConfig {
public static final String TRACE_BAGGAGE_MAX_ITEMS = "trace.baggage.max.items";
public static final String TRACE_BAGGAGE_MAX_BYTES = "trace.baggage.max.bytes";
+ public static final String TRACE_INFERRED_PROXY_SERVICES_ENABLED =
+ "trace.inferred.proxy.services.enabled";
+
public static final String ENABLE_TRACE_AGENT_V05 = "trace.agent.v0.5.enabled";
public static final String CLIENT_IP_ENABLED = "trace.client-ip.enabled";
diff --git a/dd-trace-core/src/main/java/datadog/trace/core/CoreTracer.java b/dd-trace-core/src/main/java/datadog/trace/core/CoreTracer.java
index ffbcde5e9df..99fca082ecd 100644
--- a/dd-trace-core/src/main/java/datadog/trace/core/CoreTracer.java
+++ b/dd-trace-core/src/main/java/datadog/trace/core/CoreTracer.java
@@ -6,6 +6,7 @@
import static datadog.trace.api.DDTags.PROFILING_CONTEXT_ENGINE;
import static datadog.trace.bootstrap.instrumentation.api.AgentPropagation.BAGGAGE_CONCERN;
import static datadog.trace.bootstrap.instrumentation.api.AgentPropagation.DSM_CONCERN;
+import static datadog.trace.bootstrap.instrumentation.api.AgentPropagation.INFERRED_PROXY_CONCERN;
import static datadog.trace.bootstrap.instrumentation.api.AgentPropagation.TRACING_CONCERN;
import static datadog.trace.bootstrap.instrumentation.api.AgentPropagation.XRAY_TRACING_CONCERN;
import static datadog.trace.common.metrics.MetricsAggregatorFactory.createMetricsAggregator;
@@ -21,6 +22,7 @@
import datadog.communication.ddagent.SharedCommunicationObjects;
import datadog.communication.monitor.Monitoring;
import datadog.communication.monitor.Recording;
+import datadog.context.propagation.InferredProxyPropagator;
import datadog.context.propagation.Propagators;
import datadog.trace.api.ClassloaderConfigurationOverrides;
import datadog.trace.api.Config;
@@ -731,6 +733,9 @@ private CoreTracer(
if (config.isBaggagePropagationEnabled()) {
Propagators.register(BAGGAGE_CONCERN, new BaggagePropagator(config));
}
+ if (config.isInferredProxyPropagationEnabled()) {
+ Propagators.register(INFERRED_PROXY_CONCERN, new InferredProxyPropagator());
+ }
this.tagInterceptor =
null == tagInterceptor ? new TagInterceptor(new RuleFlags(config)) : tagInterceptor;
diff --git a/dd-trace-core/src/main/java/datadog/trace/core/baggage/BaggagePropagator.java b/dd-trace-core/src/main/java/datadog/trace/core/baggage/BaggagePropagator.java
index b7d32e7b9ea..808264222ca 100644
--- a/dd-trace-core/src/main/java/datadog/trace/core/baggage/BaggagePropagator.java
+++ b/dd-trace-core/src/main/java/datadog/trace/core/baggage/BaggagePropagator.java
@@ -24,42 +24,41 @@ public class BaggagePropagator implements Propagator {
private static final Logger LOG = LoggerFactory.getLogger(BaggagePropagator.class);
private static final PercentEscaper UTF_ESCAPER = PercentEscaper.create();
static final String BAGGAGE_KEY = "baggage";
- private final Config config;
private final boolean injectBaggage;
private final boolean extractBaggage;
+ private final int maxItems;
+ private final int maxBytes;
public BaggagePropagator(Config config) {
- this.injectBaggage = config.isBaggageInject();
- this.extractBaggage = config.isBaggageExtract();
- this.config = config;
+ this(
+ config.isBaggageInject(),
+ config.isBaggageInject(),
+ config.getTraceBaggageMaxItems(),
+ config.getTraceBaggageMaxBytes());
}
// use primarily for testing purposes
- public BaggagePropagator(boolean injectBaggage, boolean extractBaggage) {
+ BaggagePropagator(boolean injectBaggage, boolean extractBaggage, int maxItems, int maxBytes) {
this.injectBaggage = injectBaggage;
this.extractBaggage = extractBaggage;
- this.config = Config.get();
+ this.maxItems = maxItems;
+ this.maxBytes = maxBytes;
}
@Override
public void inject(Context context, C carrier, CarrierSetter setter) {
- int maxItems = this.config.getTraceBaggageMaxItems();
- int maxBytes = this.config.getTraceBaggageMaxBytes();
- //noinspection ConstantValue
+ Baggage baggage;
if (!this.injectBaggage
- || maxItems == 0
- || maxBytes == 0
+ || this.maxItems == 0
+ || this.maxBytes == 0
|| context == null
|| carrier == null
- || setter == null) {
- return;
- }
-
- Baggage baggage = Baggage.fromContext(context);
- if (baggage == null) {
+ || setter == null
+ || (baggage = Baggage.fromContext(context)) == null) {
return;
}
+ // Inject cached header if any as optimized path
String headerValue = baggage.getW3cHeader();
if (headerValue != null) {
setter.set(carrier, BAGGAGE_KEY, headerValue);
@@ -86,11 +85,11 @@ public void inject(Context context, C carrier, CarrierSetter setter) {
processedItems++;
// reached the max number of baggage items allowed
- if (processedItems == maxItems) {
+ if (processedItems == this.maxItems) {
break;
}
// Drop newest k/v pair if adding it leads to exceeding the limit
- if (currentBytes + escapedKey.size + escapedVal.size + extraBytes > maxBytes) {
+ if (currentBytes + escapedKey.size + escapedVal.size + extraBytes > this.maxBytes) {
baggageText.setLength(currentBytes);
break;
}
@@ -98,13 +97,13 @@ public void inject(Context context, C carrier, CarrierSetter setter) {
}
headerValue = baggageText.toString();
+ // Save header as cache to re-inject it later if baggage did not change
baggage.setW3cHeader(headerValue);
setter.set(carrier, BAGGAGE_KEY, headerValue);
}
@Override
public Context extract(Context context, C carrier, CarrierVisitor visitor) {
- //noinspection ConstantValue
if (!this.extractBaggage || context == null || carrier == null || visitor == null) {
return context;
}
@@ -113,12 +112,11 @@ public Context extract(Context context, C carrier, CarrierVisitor visitor
return baggageExtractor.extracted == null ? context : context.with(baggageExtractor.extracted);
}
- private static class BaggageExtractor implements BiConsumer {
+ private class BaggageExtractor implements BiConsumer {
private static final char KEY_VALUE_SEPARATOR = '=';
private static final char PAIR_SEPARATOR = ',';
private Baggage extracted;
-
- private BaggageExtractor() {}
+ private String w3cHeader;
/** URL decode value */
private String decode(final String value) {
@@ -134,6 +132,7 @@ private String decode(final String value) {
private Map parseBaggageHeaders(String input) {
Map baggage = new HashMap<>();
int start = 0;
+ boolean truncatedCache = false;
int pairSeparatorInd = input.indexOf(PAIR_SEPARATOR);
pairSeparatorInd = pairSeparatorInd == -1 ? input.length() : pairSeparatorInd;
int kvSeparatorInd = input.indexOf(KEY_VALUE_SEPARATOR);
@@ -152,11 +151,29 @@ private Map parseBaggageHeaders(String input) {
}
baggage.put(key, value);
+ // need to percent-encode non-ascii headers we pass down
+ if (UTF_ESCAPER.keyNeedsEncoding(key) || UTF_ESCAPER.valNeedsEncoding(value)) {
+ truncatedCache = true;
+ this.w3cHeader = null;
+ } else if (!truncatedCache && (end > maxBytes || baggage.size() > maxItems)) {
+ if (start == 0) { // if we go out of range after first k/v pair, there is no cache
+ this.w3cHeader = null;
+ } else {
+ this.w3cHeader = input.substring(0, start - 1); // -1 to ignore the k/v separator
+ }
+ truncatedCache = true;
+ }
+
kvSeparatorInd = input.indexOf(KEY_VALUE_SEPARATOR, pairSeparatorInd + 1);
pairSeparatorInd = input.indexOf(PAIR_SEPARATOR, pairSeparatorInd + 1);
pairSeparatorInd = pairSeparatorInd == -1 ? input.length() : pairSeparatorInd;
start = end + 1;
}
+
+ if (!truncatedCache) {
+ this.w3cHeader = input;
+ }
+
return baggage;
}
@@ -166,7 +183,7 @@ public void accept(String key, String value) {
if (BAGGAGE_KEY.equalsIgnoreCase(key)) {
Map baggage = parseBaggageHeaders(value);
if (!baggage.isEmpty()) {
- this.extracted = Baggage.create(baggage, value);
+ this.extracted = Baggage.create(baggage, this.w3cHeader);
}
}
}
diff --git a/dd-trace-core/src/main/java/datadog/trace/core/util/PercentEscaper.java b/dd-trace-core/src/main/java/datadog/trace/core/util/PercentEscaper.java
index c32036713ad..5fb3665ae8e 100644
--- a/dd-trace-core/src/main/java/datadog/trace/core/util/PercentEscaper.java
+++ b/dd-trace-core/src/main/java/datadog/trace/core/util/PercentEscaper.java
@@ -115,12 +115,38 @@ public Escaped escapeValue(String s) {
return escape(s, unsafeValOctets);
}
+ private boolean needsEncoding(char c, boolean[] unsafeOctets) {
+ if (c > '~' || c <= ' ' || c < unsafeOctets.length && unsafeOctets[c]) {
+ return true;
+ }
+ return false;
+ }
+
+ private boolean needsEncoding(String key, boolean[] unsafeOctets) {
+ int slen = key.length();
+ for (int index = 0; index < slen; index++) {
+ char c = key.charAt(index);
+ if (needsEncoding(c, unsafeOctets)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public boolean keyNeedsEncoding(String key) {
+ return needsEncoding(key, unsafeKeyOctets);
+ }
+
+ public boolean valNeedsEncoding(String val) {
+ return needsEncoding(val, unsafeValOctets);
+ }
+
/** Escape the provided String, using percent-style URL Encoding. */
public Escaped escape(String s, boolean[] unsafeOctets) {
int slen = s.length();
for (int index = 0; index < slen; index++) {
char c = s.charAt(index);
- if (c > '~' || c <= ' ' || c <= unsafeOctets.length && unsafeOctets[c]) {
+ if (needsEncoding(c, unsafeOctets)) {
return escapeSlow(s, index, unsafeOctets);
}
}
diff --git a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy
index 8fa819362d9..498a4b4a0af 100644
--- a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy
+++ b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy
@@ -23,7 +23,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
static final int HTTP_OK = 200
@Shared
- long reportingInterval = 10
+ long reportingInterval = 1
@Shared
int queueSize = 256
@@ -106,9 +106,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
CountDownLatch latch = new CountDownLatch(1)
aggregator.publish([new SimpleSpan("service", "operation", "resource", "type", false, true, false, 0, 100, HTTP_OK)])
aggregator.report()
- latch.await(2, SECONDS)
+ def latchTriggered = latch.await(2, SECONDS)
then:
+ latchTriggered
1 * writer.startBucket(1, _, _)
1 * writer.add(new MetricKey("resource", "service", "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value ->
value.getHitCount() == 1 && value.getTopLevelCount() == 1 && value.getDuration() == 100
@@ -135,9 +136,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
new SimpleSpan("service", "operation", "resource", "type", measured, topLevel, false, 0, 100, HTTP_OK)
])
aggregator.report()
- latch.await(2, SECONDS)
+ def latchTriggered = latch.await(2, SECONDS)
then:
+ latchTriggered
1 * writer.startBucket(1, _, _)
1 * writer.add(new MetricKey("resource", "service", "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value ->
value.getHitCount() == 1 && value.getTopLevelCount() == topLevelCount && value.getDuration() == 100
@@ -177,9 +179,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
aggregator.publish(trace)
}
aggregator.report()
- latch.await(2, SECONDS)
+ def latchTriggered = latch.await(2, SECONDS)
then: "metrics should be conflated"
+ latchTriggered
1 * writer.finishBucket() >> { latch.countDown() }
1 * writer.startBucket(2, _, SECONDS.toNanos(reportingInterval))
1 * writer.add(new MetricKey("resource", "service", "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value ->
@@ -216,9 +219,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
])
}
aggregator.report()
- latch.await(2, SECONDS)
+ def latchTriggered = latch.await(2, SECONDS)
then: "the first aggregate should be dropped but the rest reported"
+ latchTriggered
1 * writer.startBucket(10, _, SECONDS.toNanos(reportingInterval))
for (int i = 1; i < 11; ++i) {
1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value ->
@@ -252,9 +256,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
])
}
aggregator.report()
- latch.await(2, SECONDS)
+ def latchTriggered = latch.await(2, SECONDS)
then: "all aggregates should be reported"
+ latchTriggered
1 * writer.startBucket(5, _, SECONDS.toNanos(reportingInterval))
for (int i = 0; i < 5; ++i) {
1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value ->
@@ -271,9 +276,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
])
}
aggregator.report()
- latch.await(2, SECONDS)
+ latchTriggered = latch.await(2, SECONDS)
then: "aggregate not updated in cycle is not reported"
+ latchTriggered
1 * writer.startBucket(4, _, SECONDS.toNanos(reportingInterval))
for (int i = 1; i < 5; ++i) {
1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value ->
@@ -307,16 +313,17 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
])
}
aggregator.report()
- latch.await(2, SECONDS)
+ def latchTriggered = latch.await(2, SECONDS)
then: "all aggregates should be reported"
+ latchTriggered
1 * writer.startBucket(5, _, SECONDS.toNanos(reportingInterval))
for (int i = 0; i < 5; ++i) {
1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value ->
value.getHitCount() == 1 && value.getDuration() == duration
}
}
- 1 * writer.finishBucket()
+ 1 * writer.finishBucket() >> { latch.countDown() }
when:
reportAndWaitUntilEmpty(aggregator)
@@ -349,9 +356,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
new SimpleSpan("service" + i, "operation", "resource", "type", false, true, false, 0, duration, HTTP_OK)
])
}
- latch.await(2, SECONDS)
+ def latchTriggered = latch.await(2, SECONDS)
then: "all aggregates should be reported"
+ latchTriggered
1 * writer.startBucket(5, _, SECONDS.toNanos(1))
for (int i = 0; i < 5; ++i) {
1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value ->
@@ -421,9 +429,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
new SimpleSpan("service" + i, "operation", "resource", "type", false, true, false, 0, duration, HTTP_OK)
])
}
- latch.await(2, SECONDS)
+ def latchTriggered = latch.await(2, SECONDS)
then: "writer should be reset if reporting fails"
+ latchTriggered
1 * writer.startBucket(_, _, _) >> {
throw new IllegalArgumentException("something went wrong")
}
@@ -449,6 +458,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
then:
notThrown(TimeoutException)
!flushed
+
+ cleanup:
+ aggregator.close()
}
def "force flush should wait for aggregator to start"() {
@@ -480,6 +492,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
then:
notThrown(TimeoutException)
flushed
+
+ cleanup:
+ aggregator.close()
}
def reportAndWaitUntilEmpty(ConflatingMetricsAggregator aggregator) {
diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/baggage/BaggagePropagatorTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/baggage/BaggagePropagatorTest.groovy
index 4910898c047..288eec8d15a 100644
--- a/dd-trace-core/src/test/groovy/datadog/trace/core/baggage/BaggagePropagatorTest.groovy
+++ b/dd-trace-core/src/test/groovy/datadog/trace/core/baggage/BaggagePropagatorTest.groovy
@@ -9,6 +9,8 @@ import datadog.trace.test.util.DDSpecification
import java.util.function.BiConsumer
+import static datadog.trace.api.ConfigDefaults.DEFAULT_TRACE_BAGGAGE_MAX_BYTES
+import static datadog.trace.api.ConfigDefaults.DEFAULT_TRACE_BAGGAGE_MAX_ITEMS
import static datadog.trace.core.baggage.BaggagePropagator.BAGGAGE_KEY
class BaggagePropagatorTest extends DDSpecification {
@@ -33,7 +35,7 @@ class BaggagePropagatorTest extends DDSpecification {
}
def setup() {
- this.propagator = new BaggagePropagator(true, true)
+ this.propagator = new BaggagePropagator(true, true, DEFAULT_TRACE_BAGGAGE_MAX_ITEMS, DEFAULT_TRACE_BAGGAGE_MAX_BYTES)
this.setter = new MapCarrierAccessor()
this.carrier = [:]
this.context = Context.root()
@@ -61,10 +63,9 @@ class BaggagePropagatorTest extends DDSpecification {
["abcdefg": "hijklmnopq♥"] | "abcdefg=hijklmnopq%E2%99%A5"
}
- def "test baggage item limit"() {
+ def "test baggage inject item limit"() {
setup:
- injectSysConfig("trace.baggage.max.items", '2')
- propagator = new BaggagePropagator(true, true) //creating a new instance after injecting config
+ propagator = new BaggagePropagator(true, true, 2, DEFAULT_TRACE_BAGGAGE_MAX_BYTES) //creating a new instance after injecting config
context = Baggage.create(baggage).storeInto(context)
when:
@@ -79,10 +80,9 @@ class BaggagePropagatorTest extends DDSpecification {
[key1: "val1", key2: "val2", key3: "val3"] | "key1=val1,key2=val2"
}
- def "test baggage bytes limit"() {
+ def "test baggage inject bytes limit"() {
setup:
- injectSysConfig("trace.baggage.max.bytes", '20')
- propagator = new BaggagePropagator(true, true) //creating a new instance after injecting config
+ propagator = new BaggagePropagator(true, true, DEFAULT_TRACE_BAGGAGE_MAX_ITEMS, 20) //creating a new instance after injecting config
context = Baggage.create(baggage).storeInto(context)
when:
@@ -116,6 +116,30 @@ class BaggagePropagatorTest extends DDSpecification {
"%22%2C%3B%5C%28%29%2F%3A%3C%3D%3E%3F%40%5B%5D%7B%7D=%22%2C%3B%5C" | ['",;\\()/:<=>?@[]{}': '",;\\']
}
+ def "test extracting non ASCII headers"() {
+ setup:
+ def headers = [
+ (BAGGAGE_KEY) : "key1=vallée,clé2=value",
+ ]
+
+ when:
+ context = this.propagator.extract(context, headers, ContextVisitors.stringValuesMap())
+ def baggage = Baggage.fromContext(context)
+
+ then: 'non ASCII values data are still accessible as part of the API'
+ baggage != null
+ baggage.asMap().get('key1') == 'vallée'
+ baggage.asMap().get('clé2') == 'value'
+ baggage.w3cHeader == null
+
+
+ when:
+ this.propagator.inject(Context.root().with(baggage), carrier, setter)
+
+ then: 'baggage are URL encoded if not valid, even if not modified'
+ assert carrier[BAGGAGE_KEY] == 'key1=vall%C3%A9e,cl%C3%A92=value'
+ }
+
def "extract invalid baggage headers"() {
setup:
def headers = [
@@ -139,8 +163,28 @@ class BaggagePropagatorTest extends DDSpecification {
"=" | _
}
- def "testing baggage cache"(){
+ def "test baggage cache"(){
+ setup:
+ def headers = [
+ (BAGGAGE_KEY) : baggageHeader,
+ ]
+
+ when:
+ context = this.propagator.extract(context, headers, ContextVisitors.stringValuesMap())
+
+ then:
+ Baggage baggageContext = Baggage.fromContext(context)
+ baggageContext.w3cHeader == cachedString
+
+ where:
+ baggageHeader | cachedString
+ "key1=val1,key2=val2,foo=bar" | "key1=val1,key2=val2,foo=bar"
+ '";\\()/:<=>?@[]{}=";\\' | null
+ }
+
+ def "test baggage cache items limit"(){
setup:
+ propagator = new BaggagePropagator(true, true, 2, DEFAULT_TRACE_BAGGAGE_MAX_BYTES) //creating a new instance after injecting config
def headers = [
(BAGGAGE_KEY) : baggageHeader,
]
@@ -150,17 +194,32 @@ class BaggagePropagatorTest extends DDSpecification {
then:
Baggage baggageContext = Baggage.fromContext(context)
- baggageContext.asMap() == baggageMap
+ baggageContext.getW3cHeader() as String == cachedString
+
+ where:
+ baggageHeader | cachedString
+ "key1=val1,key2=val2" | "key1=val1,key2=val2"
+ "key1=val1,key2=val2,key3=val3" | "key1=val1,key2=val2"
+ "key1=val1,key2=val2,key3=val3,key4=val4" | "key1=val1,key2=val2"
+ }
+
+ def "test baggage cache bytes limit"(){
+ setup:
+ propagator = new BaggagePropagator(true, true, DEFAULT_TRACE_BAGGAGE_MAX_ITEMS, 20) //creating a new instance after injecting config
+ def headers = [
+ (BAGGAGE_KEY) : baggageHeader,
+ ]
when:
- this.propagator.inject(context, carrier, setter)
+ context = this.propagator.extract(context, headers, ContextVisitors.stringValuesMap())
then:
- assert carrier[BAGGAGE_KEY] == baggageHeader
+ Baggage baggageContext = Baggage.fromContext(context)
+ baggageContext.getW3cHeader() as String == cachedString
where:
- baggageHeader | baggageMap
- "key1=val1,key2=val2,foo=bar" | ["key1": "val1", "key2": "val2", "foo": "bar"]
- "%22%2C%3B%5C%28%29%2F%3A%3C%3D%3E%3F%40%5B%5D%7B%7D=%22%2C%3B%5C" | ['",;\\()/:<=>?@[]{}': '",;\\']
+ baggageHeader | cachedString
+ "key1=val1,key2=val2" | "key1=val1,key2=val2"
+ "key1=val1,key2=val2,key3=val3" | "key1=val1,key2=val2"
}
}
diff --git a/dd-trace-ot/build.gradle b/dd-trace-ot/build.gradle
index 77212c99889..fa5e07f10dc 100644
--- a/dd-trace-ot/build.gradle
+++ b/dd-trace-ot/build.gradle
@@ -1,5 +1,5 @@
plugins {
- id "com.github.johnrengelman.shadow"
+ id "com.gradleup.shadow"
id "me.champeau.jmh"
}
diff --git a/gradle/configure_tests.gradle b/gradle/configure_tests.gradle
index 60280fa3144..945a9e9665b 100644
--- a/gradle/configure_tests.gradle
+++ b/gradle/configure_tests.gradle
@@ -20,7 +20,7 @@ def isTestingInstrumentation(Project project) {
}
def forkedTestLimit = gradle.sharedServices.registerIfAbsent("forkedTestLimit", BuildService) {
- maxParallelUsages = 2
+ maxParallelUsages = 3
}
// Force timeout after 9 minutes (CircleCI defaults will fail after 10 minutes without output)
@@ -69,7 +69,6 @@ tasks.withType(Test).configureEach {
if (name.startsWith("forkedTest") || name.endsWith("ForkedTest")) {
setExcludes([])
setIncludes(["**/*ForkedTest*"])
- jvmArgs += ["-Xms256M", "-Xmx256M"]
forkEvery 1
// Limit the number of concurrent forked tests
usesService(forkedTestLimit)
diff --git a/gradle/java_no_deps.gradle b/gradle/java_no_deps.gradle
index 95a87f0e8ed..bd4f3ed0aac 100644
--- a/gradle/java_no_deps.gradle
+++ b/gradle/java_no_deps.gradle
@@ -242,7 +242,7 @@ project.afterEvaluate {
}
}
-if (project.plugins.hasPlugin('com.github.johnrengelman.shadow')) {
+if (project.plugins.hasPlugin('com.gradleup.shadow')) {
// Remove the no-deps jar from the archives to prevent publication
configurations.archives.with {
artifacts.remove artifacts.find {
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 2bcfac6d39b..135aea11b0e 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -10,7 +10,7 @@ spock = "2.3-groovy-3.0"
groovy = "3.0.17"
junit5 = "5.9.2"
logback = "1.2.3"
-bytebuddy = "1.15.11"
+bytebuddy = "1.17.5"
scala = "2.11.12" # Last version to support Java 7 (2.12+ require Java 8+)
scala211 = "2.11.12"
scala212 = "2.12.18"
diff --git a/gradle/publish.gradle b/gradle/publish.gradle
index 8fb8b6c55f6..3e3f9456aa9 100644
--- a/gradle/publish.gradle
+++ b/gradle/publish.gradle
@@ -21,7 +21,7 @@ assert !forceLocal || forceLocal != isGitlabCI
publishing {
publications {
maven(MavenPublication) { MavenPublication publication ->
- if (project.plugins.hasPlugin('com.github.johnrengelman.shadow')) {
+ if (project.plugins.hasPlugin('com.gradleup.shadow')) {
publication.artifact(project.tasks.shadowJar)
// Required by Maven Central:
@@ -56,7 +56,7 @@ publishing {
}
}
-if (project.plugins.hasPlugin('com.github.johnrengelman.shadow')) {
+if (project.plugins.hasPlugin('com.gradleup.shadow')) {
// Disable gradle module metadata to avoid publishing contradictory info.
tasks.withType(GenerateModuleMetadata).configureEach {
enabled = false
diff --git a/internal-api/build.gradle b/internal-api/build.gradle
index dd5ee2ce0cc..d137456f9fe 100644
--- a/internal-api/build.gradle
+++ b/internal-api/build.gradle
@@ -139,6 +139,7 @@ excludedClassesCoverage += [
// POJO
"datadog.trace.api.git.GitInfo",
"datadog.trace.api.git.GitInfoProvider",
+ "datadog.trace.api.git.GitInfoProvider.ShaDiscrepancy",
// POJO
"datadog.trace.api.git.PersonInfo",
// POJO
diff --git a/internal-api/src/main/java/datadog/trace/api/Config.java b/internal-api/src/main/java/datadog/trace/api/Config.java
index d6526b29716..9277e7df974 100644
--- a/internal-api/src/main/java/datadog/trace/api/Config.java
+++ b/internal-api/src/main/java/datadog/trace/api/Config.java
@@ -195,6 +195,7 @@ public static String getHostName() {
private final boolean tracePropagationExtractFirst;
private final int traceBaggageMaxItems;
private final int traceBaggageMaxBytes;
+ private final boolean traceInferredProxyEnabled;
private final int clockSyncPeriod;
private final boolean logsInjectionEnabled;
@@ -1069,6 +1070,8 @@ private Config(final ConfigProvider configProvider, final InstrumenterConfig ins
tracePropagationExtractFirst =
configProvider.getBoolean(
TRACE_PROPAGATION_EXTRACT_FIRST, DEFAULT_TRACE_PROPAGATION_EXTRACT_FIRST);
+ traceInferredProxyEnabled =
+ configProvider.getBoolean(TRACE_INFERRED_PROXY_SERVICES_ENABLED, false);
clockSyncPeriod = configProvider.getInteger(CLOCK_SYNC_PERIOD, DEFAULT_CLOCK_SYNC_PERIOD);
@@ -2022,7 +2025,7 @@ PROFILING_DATADOG_PROFILER_ENABLED, isDatadogProfilerSafeInCurrentEnvironment())
this.apmTracingEnabled = configProvider.getBoolean(GeneralConfig.APM_TRACING_ENABLED, true);
- this.jdkSocketEnabled = configProvider.getBoolean(JDK_SOCKET_ENABLED, false);
+ this.jdkSocketEnabled = configProvider.getBoolean(JDK_SOCKET_ENABLED, true);
log.debug("New instance: {}", this);
}
@@ -2368,6 +2371,10 @@ public boolean isTracePropagationExtractFirst() {
return tracePropagationExtractFirst;
}
+ public boolean isInferredProxyPropagationEnabled() {
+ return traceInferredProxyEnabled;
+ }
+
public boolean isBaggageExtract() {
return tracePropagationStylesToExtract.contains(TracePropagationStyle.BAGGAGE);
}
diff --git a/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/CiVisibilityCountMetric.java b/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/CiVisibilityCountMetric.java
index 9bf734eeab6..2d8de535abe 100644
--- a/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/CiVisibilityCountMetric.java
+++ b/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/CiVisibilityCountMetric.java
@@ -14,6 +14,10 @@
import datadog.trace.api.civisibility.telemetry.tag.ExitCode;
import datadog.trace.api.civisibility.telemetry.tag.FailFastTestOrderEnabled;
import datadog.trace.api.civisibility.telemetry.tag.FlakyTestRetriesEnabled;
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderDiscrepant;
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderExpected;
+import datadog.trace.api.civisibility.telemetry.tag.GitShaDiscrepancyType;
+import datadog.trace.api.civisibility.telemetry.tag.GitShaMatch;
import datadog.trace.api.civisibility.telemetry.tag.HasCodeowner;
import datadog.trace.api.civisibility.telemetry.tag.HasFailedAllRetries;
import datadog.trace.api.civisibility.telemetry.tag.ImpactedTestsDetectionEnabled;
@@ -101,6 +105,14 @@ public enum CiVisibilityCountMetric {
GIT_COMMAND("git.command", Command.class),
/** The number of git commands that errored */
GIT_COMMAND_ERRORS("git.command_errors", Command.class, ExitCode.class),
+ /** Number of commit sha comparisons and if they matched when building git info for a repo */
+ GIT_COMMIT_SHA_MATCH("git.commit_sha_match", GitShaMatch.class),
+ /** Number of sha mismatches when building git info for a repo */
+ GIT_COMMIT_SHA_DISCREPANCY(
+ "git.commit_sha_discrepancy",
+ GitProviderExpected.class,
+ GitProviderDiscrepant.class,
+ GitShaDiscrepancyType.class),
/** The number of requests sent to the search commit endpoint */
GIT_REQUESTS_SEARCH_COMMITS("git_requests.search_commits", RequestCompressed.class),
/** The number of search commit requests sent to the endpoint that errored */
diff --git a/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitProviderDiscrepant.java b/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitProviderDiscrepant.java
new file mode 100644
index 00000000000..e356805a255
--- /dev/null
+++ b/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitProviderDiscrepant.java
@@ -0,0 +1,16 @@
+package datadog.trace.api.civisibility.telemetry.tag;
+
+import datadog.trace.api.civisibility.telemetry.TagValue;
+
+public enum GitProviderDiscrepant implements TagValue {
+ USER_SUPPLIED,
+ CI_PROVIDER,
+ LOCAL_GIT,
+ GIT_CLIENT,
+ EMBEDDED;
+
+ @Override
+ public String asString() {
+ return "discrepant_provider:" + name().toLowerCase();
+ }
+}
diff --git a/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitProviderExpected.java b/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitProviderExpected.java
new file mode 100644
index 00000000000..cf2c6e5b9f3
--- /dev/null
+++ b/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitProviderExpected.java
@@ -0,0 +1,16 @@
+package datadog.trace.api.civisibility.telemetry.tag;
+
+import datadog.trace.api.civisibility.telemetry.TagValue;
+
+public enum GitProviderExpected implements TagValue {
+ USER_SUPPLIED,
+ CI_PROVIDER,
+ LOCAL_GIT,
+ GIT_CLIENT,
+ EMBEDDED;
+
+ @Override
+ public String asString() {
+ return "expected_provider:" + name().toLowerCase();
+ }
+}
diff --git a/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitShaDiscrepancyType.java b/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitShaDiscrepancyType.java
new file mode 100644
index 00000000000..2be69a41e08
--- /dev/null
+++ b/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitShaDiscrepancyType.java
@@ -0,0 +1,13 @@
+package datadog.trace.api.civisibility.telemetry.tag;
+
+import datadog.trace.api.civisibility.telemetry.TagValue;
+
+public enum GitShaDiscrepancyType implements TagValue {
+ REPOSITORY_DISCREPANCY,
+ COMMIT_DISCREPANCY;
+
+ @Override
+ public String asString() {
+ return "type:" + name().toLowerCase();
+ }
+}
diff --git a/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitShaMatch.java b/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitShaMatch.java
new file mode 100644
index 00000000000..f5ea9b9a7f8
--- /dev/null
+++ b/internal-api/src/main/java/datadog/trace/api/civisibility/telemetry/tag/GitShaMatch.java
@@ -0,0 +1,13 @@
+package datadog.trace.api.civisibility.telemetry.tag;
+
+import datadog.trace.api.civisibility.telemetry.TagValue;
+
+public enum GitShaMatch implements TagValue {
+ TRUE,
+ FALSE;
+
+ @Override
+ public String asString() {
+ return "matched:" + name().toLowerCase();
+ }
+}
diff --git a/internal-api/src/main/java/datadog/trace/api/git/EmbeddedGitInfoBuilder.java b/internal-api/src/main/java/datadog/trace/api/git/EmbeddedGitInfoBuilder.java
index c388f5471d0..5b0c71ce149 100644
--- a/internal-api/src/main/java/datadog/trace/api/git/EmbeddedGitInfoBuilder.java
+++ b/internal-api/src/main/java/datadog/trace/api/git/EmbeddedGitInfoBuilder.java
@@ -1,5 +1,7 @@
package datadog.trace.api.git;
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderDiscrepant;
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderExpected;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
@@ -89,4 +91,14 @@ public GitInfo build(@Nullable String repositoryPath) {
public int order() {
return Integer.MAX_VALUE;
}
+
+ @Override
+ public GitProviderExpected providerAsExpected() {
+ return GitProviderExpected.EMBEDDED;
+ }
+
+ @Override
+ public GitProviderDiscrepant providerAsDiscrepant() {
+ return GitProviderDiscrepant.EMBEDDED;
+ }
}
diff --git a/internal-api/src/main/java/datadog/trace/api/git/GitInfoBuilder.java b/internal-api/src/main/java/datadog/trace/api/git/GitInfoBuilder.java
index a498407532b..7deb594d25b 100644
--- a/internal-api/src/main/java/datadog/trace/api/git/GitInfoBuilder.java
+++ b/internal-api/src/main/java/datadog/trace/api/git/GitInfoBuilder.java
@@ -1,9 +1,20 @@
package datadog.trace.api.git;
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderDiscrepant;
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderExpected;
import javax.annotation.Nullable;
public interface GitInfoBuilder {
GitInfo build(@Nullable String repositoryPath);
int order();
+
+ /**
+ * Used for SHA discrepancies telemetry. Two enums are needed, one for each tag:
+ * `expected_provider`, `discrepant_provider`. A provider can act as either of them depending on
+ * the discrepancy found.
+ */
+ GitProviderExpected providerAsExpected();
+
+ GitProviderDiscrepant providerAsDiscrepant();
}
diff --git a/internal-api/src/main/java/datadog/trace/api/git/GitInfoProvider.java b/internal-api/src/main/java/datadog/trace/api/git/GitInfoProvider.java
index 02f95eedbb5..9550a16236e 100644
--- a/internal-api/src/main/java/datadog/trace/api/git/GitInfoProvider.java
+++ b/internal-api/src/main/java/datadog/trace/api/git/GitInfoProvider.java
@@ -2,15 +2,24 @@
import datadog.trace.api.cache.DDCache;
import datadog.trace.api.cache.DDCaches;
+import datadog.trace.api.civisibility.InstrumentationBridge;
+import datadog.trace.api.civisibility.telemetry.CiVisibilityCountMetric;
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderDiscrepant;
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderExpected;
+import datadog.trace.api.civisibility.telemetry.tag.GitShaDiscrepancyType;
+import datadog.trace.api.civisibility.telemetry.tag.GitShaMatch;
import datadog.trace.util.Strings;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
+import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import javax.annotation.Nullable;
@@ -46,35 +55,48 @@ public GitInfo getGitInfo(@Nullable String repositoryPath) {
if (repositoryPath == null) {
repositoryPath = NULL_PATH_STRING;
}
+
return gitInfoCache.computeIfAbsent(repositoryPath, this::buildGitInfo);
}
private GitInfo buildGitInfo(String repositoryPath) {
Evaluator evaluator = new Evaluator(repositoryPath, builders);
- return new GitInfo(
- evaluator.get(
- gi -> GitUtils.filterSensitiveInfo(gi.getRepositoryURL()),
- GitInfoProvider::validateGitRemoteUrl),
- evaluator.get(GitInfo::getBranch, Strings::isNotBlank),
- evaluator.get(GitInfo::getTag, Strings::isNotBlank),
- new CommitInfo(
- evaluator.get(gi1 -> gi1.getCommit().getSha(), Strings::isNotBlank),
- new PersonInfo(
- evaluator.getIfCommitShaMatches(
- gi -> gi.getCommit().getAuthor().getName(), Strings::isNotBlank),
- evaluator.getIfCommitShaMatches(
- gi -> gi.getCommit().getAuthor().getEmail(), Strings::isNotBlank),
- evaluator.getIfCommitShaMatches(
- gi -> gi.getCommit().getAuthor().getIso8601Date(), Strings::isNotBlank)),
- new PersonInfo(
- evaluator.getIfCommitShaMatches(
- gi -> gi.getCommit().getCommitter().getName(), Strings::isNotBlank),
+ GitInfo gitInfo =
+ new GitInfo(
+ evaluator.get(
+ gi -> GitUtils.filterSensitiveInfo(gi.getRepositoryURL()),
+ GitInfoProvider::validateGitRemoteUrl),
+ evaluator.get(GitInfo::getBranch, Strings::isNotBlank),
+ evaluator.get(GitInfo::getTag, Strings::isNotBlank),
+ new CommitInfo(
+ evaluator.get(gi1 -> gi1.getCommit().getSha(), Strings::isNotBlank),
+ new PersonInfo(
+ evaluator.getIfCommitShaMatches(
+ gi -> gi.getCommit().getAuthor().getName(), Strings::isNotBlank),
+ evaluator.getIfCommitShaMatches(
+ gi -> gi.getCommit().getAuthor().getEmail(), Strings::isNotBlank),
+ evaluator.getIfCommitShaMatches(
+ gi -> gi.getCommit().getAuthor().getIso8601Date(), Strings::isNotBlank)),
+ new PersonInfo(
+ evaluator.getIfCommitShaMatches(
+ gi -> gi.getCommit().getCommitter().getName(), Strings::isNotBlank),
+ evaluator.getIfCommitShaMatches(
+ gi -> gi.getCommit().getCommitter().getEmail(), Strings::isNotBlank),
+ evaluator.getIfCommitShaMatches(
+ gi -> gi.getCommit().getCommitter().getIso8601Date(), Strings::isNotBlank)),
evaluator.getIfCommitShaMatches(
- gi -> gi.getCommit().getCommitter().getEmail(), Strings::isNotBlank),
- evaluator.getIfCommitShaMatches(
- gi -> gi.getCommit().getCommitter().getIso8601Date(), Strings::isNotBlank)),
- evaluator.getIfCommitShaMatches(
- gi -> gi.getCommit().getFullMessage(), Strings::isNotBlank)));
+ gi -> gi.getCommit().getFullMessage(), Strings::isNotBlank)));
+
+ InstrumentationBridge.getMetricCollector()
+ .add(
+ CiVisibilityCountMetric.GIT_COMMIT_SHA_MATCH,
+ 1,
+ evaluator.shaDiscrepancies.isEmpty() ? GitShaMatch.TRUE : GitShaMatch.FALSE);
+ for (ShaDiscrepancy mismatch : evaluator.shaDiscrepancies) {
+ mismatch.addTelemetry();
+ }
+
+ return gitInfo;
}
private static boolean validateGitRemoteUrl(String s) {
@@ -82,6 +104,46 @@ private static boolean validateGitRemoteUrl(String s) {
return Strings.isNotBlank(s) && !s.startsWith("file:");
}
+ private static final class ShaDiscrepancy {
+ private final GitProviderExpected expectedGitProvider;
+ private final GitProviderDiscrepant discrepantGitProvider;
+ private final GitShaDiscrepancyType discrepancyType;
+
+ private ShaDiscrepancy(
+ GitProviderExpected expectedGitProvider,
+ GitProviderDiscrepant discrepantGitProvider,
+ GitShaDiscrepancyType discrepancyType) {
+ this.expectedGitProvider = expectedGitProvider;
+ this.discrepantGitProvider = discrepantGitProvider;
+ this.discrepancyType = discrepancyType;
+ }
+
+ private void addTelemetry() {
+ InstrumentationBridge.getMetricCollector()
+ .add(
+ CiVisibilityCountMetric.GIT_COMMIT_SHA_DISCREPANCY,
+ 1,
+ expectedGitProvider,
+ discrepantGitProvider,
+ discrepancyType);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) return true;
+ if (obj == null || getClass() != obj.getClass()) return false;
+ ShaDiscrepancy that = (ShaDiscrepancy) obj;
+ return expectedGitProvider.equals(that.expectedGitProvider)
+ && discrepantGitProvider.equals(that.discrepantGitProvider)
+ && discrepancyType.equals(that.discrepancyType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(expectedGitProvider, discrepantGitProvider, discrepancyType);
+ }
+ }
+
/**
* Uses provided GitInfoBuilder instances to get GitInfo data.
*
@@ -95,10 +157,12 @@ private static boolean validateGitRemoteUrl(String s) {
private static final class Evaluator {
private final String repositoryPath;
private final Map infos;
+ private final Set shaDiscrepancies;
private Evaluator(String repositoryPath, Collection builders) {
this.repositoryPath = repositoryPath;
this.infos = new LinkedHashMap<>();
+ this.shaDiscrepancies = new HashSet<>();
for (GitInfoBuilder builder : builders) {
infos.put(builder, null);
}
@@ -121,7 +185,10 @@ private String get(
Function function,
Predicate validator,
boolean checkShaIntegrity) {
- String commitSha = null;
+ String expectedCommitSha = null;
+ String expectedRepoUrl = null;
+ GitProviderExpected expectedGitProvider = null;
+
for (Map.Entry e : infos.entrySet()) {
GitInfo info = e.getValue();
if (info == null) {
@@ -134,11 +201,22 @@ private String get(
CommitInfo currentCommit = info.getCommit();
String currentCommitSha = currentCommit != null ? currentCommit.getSha() : null;
if (Strings.isNotBlank(currentCommitSha)) {
- if (commitSha == null) {
- commitSha = currentCommitSha;
- } else if (!commitSha.equals(currentCommitSha)) {
+ if (expectedCommitSha == null) {
+ expectedCommitSha = currentCommitSha;
+ expectedRepoUrl = info.getRepositoryURL();
+ expectedGitProvider = e.getKey().providerAsExpected();
+ } else if (!expectedCommitSha.equals(currentCommitSha)) {
// We already have a commit SHA from source that has higher priority.
// Commit SHA from current source is different, so we have to skip it
+ GitShaDiscrepancyType discrepancyType = GitShaDiscrepancyType.COMMIT_DISCREPANCY;
+ String repoUrl = info.getRepositoryURL();
+ if (expectedRepoUrl != null && repoUrl != null && !repoUrl.equals(expectedRepoUrl)) {
+ discrepancyType = GitShaDiscrepancyType.REPOSITORY_DISCREPANCY;
+ }
+
+ shaDiscrepancies.add(
+ new ShaDiscrepancy(
+ expectedGitProvider, e.getKey().providerAsDiscrepant(), discrepancyType));
continue;
}
}
diff --git a/internal-api/src/main/java/datadog/trace/api/git/UserSuppliedGitInfoBuilder.java b/internal-api/src/main/java/datadog/trace/api/git/UserSuppliedGitInfoBuilder.java
index 31751ff3bb6..215e439562b 100644
--- a/internal-api/src/main/java/datadog/trace/api/git/UserSuppliedGitInfoBuilder.java
+++ b/internal-api/src/main/java/datadog/trace/api/git/UserSuppliedGitInfoBuilder.java
@@ -1,6 +1,8 @@
package datadog.trace.api.git;
import datadog.trace.api.Config;
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderDiscrepant;
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderExpected;
import datadog.trace.api.config.GeneralConfig;
import datadog.trace.bootstrap.config.provider.ConfigProvider;
import datadog.trace.bootstrap.instrumentation.api.Tags;
@@ -106,4 +108,14 @@ public GitInfo build(@Nullable String repositoryPath) {
public int order() {
return 0;
}
+
+ @Override
+ public GitProviderExpected providerAsExpected() {
+ return GitProviderExpected.USER_SUPPLIED;
+ }
+
+ @Override
+ public GitProviderDiscrepant providerAsDiscrepant() {
+ return GitProviderDiscrepant.USER_SUPPLIED;
+ }
}
diff --git a/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/AgentPropagation.java b/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/AgentPropagation.java
index a25c0abfee5..444342c2c1f 100644
--- a/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/AgentPropagation.java
+++ b/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/AgentPropagation.java
@@ -14,7 +14,7 @@ public final class AgentPropagation {
public static final Concern TRACING_CONCERN = named("tracing");
public static final Concern BAGGAGE_CONCERN = named("baggage");
public static final Concern XRAY_TRACING_CONCERN = named("tracing-xray");
-
+ public static final Concern INFERRED_PROXY_CONCERN = named("inferred-proxy");
// TODO DSM propagator should run after the other propagators as it stores the pathway context
// TODO into the span context for now. Remove priority after the migration is complete.
public static final Concern DSM_CONCERN = withPriority("data-stream-monitoring", 110);
diff --git a/internal-api/src/test/groovy/datadog/trace/api/git/GitInfoProviderTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/git/GitInfoProviderTest.groovy
index 73219b9ba0d..c6177f24901 100644
--- a/internal-api/src/test/groovy/datadog/trace/api/git/GitInfoProviderTest.groovy
+++ b/internal-api/src/test/groovy/datadog/trace/api/git/GitInfoProviderTest.groovy
@@ -1,5 +1,12 @@
package datadog.trace.api.git
+import datadog.trace.api.civisibility.InstrumentationBridge
+import datadog.trace.api.civisibility.telemetry.CiVisibilityCountMetric
+import datadog.trace.api.civisibility.telemetry.CiVisibilityMetricCollector
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderDiscrepant
+import datadog.trace.api.civisibility.telemetry.tag.GitProviderExpected
+import datadog.trace.api.civisibility.telemetry.tag.GitShaDiscrepancyType
+import datadog.trace.api.civisibility.telemetry.tag.GitShaMatch
import spock.lang.Specification
class GitInfoProviderTest extends Specification {
@@ -238,6 +245,81 @@ class GitInfoProviderTest extends Specification {
actualGitInfo.commit.committer.iso8601Date == null
}
+ def "test adds correct telemetry metrics when SHA discrepancies are found"() {
+ setup:
+ def metricCollector = Mock(CiVisibilityMetricCollector)
+ InstrumentationBridge.registerMetricCollector(metricCollector)
+
+ def gitInfoA = new GitInfo("repoUrlA", null, null,
+ new CommitInfo("shaA",
+ PersonInfo.NOOP,
+ PersonInfo.NOOP,
+ "message"
+ ))
+ def gitInfoB = new GitInfo("repoUrlA", null, null,
+ new CommitInfo("shaB",
+ new PersonInfo("author name", "author email", "author date"),
+ new PersonInfo("committer name", "committer email", "committer date"),
+ "message"
+ ))
+ def gitInfoC = new GitInfo("repoUrlB", null, null,
+ new CommitInfo("shaC",
+ new PersonInfo("author name", "author email", "author date"),
+ new PersonInfo("committer name", "committer email", "committer date"),
+ "message"
+ ))
+
+ def gitInfoBuilderA = givenABuilderReturning(gitInfoA, 1, GitProviderExpected.CI_PROVIDER, GitProviderDiscrepant.CI_PROVIDER)
+ def gitInfoBuilderB = givenABuilderReturning(gitInfoB, 2, GitProviderExpected.LOCAL_GIT, GitProviderDiscrepant.LOCAL_GIT)
+ def gitInfoBuilderC = givenABuilderReturning(gitInfoC, 3, GitProviderExpected.GIT_CLIENT, GitProviderDiscrepant.GIT_CLIENT)
+
+ def gitInfoProvider = new GitInfoProvider()
+ gitInfoProvider.registerGitInfoBuilder(gitInfoBuilderA)
+ gitInfoProvider.registerGitInfoBuilder(gitInfoBuilderB)
+ gitInfoProvider.registerGitInfoBuilder(gitInfoBuilderC)
+
+ when:
+ gitInfoProvider.getGitInfo(REPO_PATH)
+
+ then:
+ 1 * metricCollector.add(CiVisibilityCountMetric.GIT_COMMIT_SHA_MATCH, 1, GitShaMatch.FALSE)
+ 1 * metricCollector.add(CiVisibilityCountMetric.GIT_COMMIT_SHA_DISCREPANCY, 1, GitProviderExpected.CI_PROVIDER, GitProviderDiscrepant.LOCAL_GIT, GitShaDiscrepancyType.COMMIT_DISCREPANCY)
+ 1 * metricCollector.add(CiVisibilityCountMetric.GIT_COMMIT_SHA_DISCREPANCY, 1, GitProviderExpected.CI_PROVIDER, GitProviderDiscrepant.GIT_CLIENT, GitShaDiscrepancyType.REPOSITORY_DISCREPANCY)
+ }
+
+ def "test adds correct telemetry metrics when no SHA discrepancies are found"() {
+ setup:
+ def metricCollector = Mock(CiVisibilityMetricCollector)
+ InstrumentationBridge.registerMetricCollector(metricCollector)
+
+ def gitInfoA = new GitInfo("repoUrlA", null, null,
+ new CommitInfo("shaA",
+ PersonInfo.NOOP,
+ PersonInfo.NOOP,
+ "message"
+ ))
+ def gitInfoB = new GitInfo("repoUrlA", null, null,
+ new CommitInfo("shaA",
+ new PersonInfo("author name", "author email", "author date"),
+ new PersonInfo("committer name", "committer email", "committer date"),
+ "message"
+ ))
+
+ def gitInfoBuilderA = givenABuilderReturning(gitInfoA, 1, GitProviderExpected.CI_PROVIDER, GitProviderDiscrepant.CI_PROVIDER)
+ def gitInfoBuilderB = givenABuilderReturning(gitInfoB, 2, GitProviderExpected.LOCAL_GIT, GitProviderDiscrepant.LOCAL_GIT)
+
+ def gitInfoProvider = new GitInfoProvider()
+ gitInfoProvider.registerGitInfoBuilder(gitInfoBuilderA)
+ gitInfoProvider.registerGitInfoBuilder(gitInfoBuilderB)
+
+ when:
+ gitInfoProvider.getGitInfo(REPO_PATH)
+
+ then:
+ 1 * metricCollector.add(CiVisibilityCountMetric.GIT_COMMIT_SHA_MATCH, 1, GitShaMatch.TRUE)
+ 0 * metricCollector.add(CiVisibilityCountMetric.GIT_COMMIT_SHA_DISCREPANCY, *_)
+ }
+
def "test ignores remote URLs starting with file protocol"() {
setup:
def gitInfoBuilderA = givenABuilderReturning(
@@ -264,9 +346,15 @@ class GitInfoProviderTest extends Specification {
}
private GitInfoBuilder givenABuilderReturning(GitInfo gitInfo, int order) {
+ givenABuilderReturning(gitInfo, order, GitProviderExpected.USER_SUPPLIED, GitProviderDiscrepant.USER_SUPPLIED)
+ }
+
+ private GitInfoBuilder givenABuilderReturning(GitInfo gitInfo, int order, GitProviderExpected expected, GitProviderDiscrepant discrepant) {
def gitInfoBuilder = Stub(GitInfoBuilder)
gitInfoBuilder.build(REPO_PATH) >> gitInfo
gitInfoBuilder.order() >> order
+ gitInfoBuilder.providerAsExpected() >> expected
+ gitInfoBuilder.providerAsDiscrepant() >> discrepant
gitInfoBuilder
}
}
diff --git a/utils/socket-utils/src/main/java17/datadog/common/socket/TunnelingJdkSocket.java b/utils/socket-utils/src/main/java17/datadog/common/socket/TunnelingJdkSocket.java
index 063cd64c740..4037252ede4 100644
--- a/utils/socket-utils/src/main/java17/datadog/common/socket/TunnelingJdkSocket.java
+++ b/utils/socket-utils/src/main/java17/datadog/common/socket/TunnelingJdkSocket.java
@@ -29,6 +29,7 @@ final class TunnelingJdkSocket extends Socket {
private InetSocketAddress inetSocketAddress;
private SocketChannel unixSocketChannel;
+ private Selector selector;
private int timeout;
private boolean shutIn;
@@ -90,6 +91,9 @@ public synchronized int getSoTimeout() throws SocketException {
@Override
public void connect(final SocketAddress endpoint) throws IOException {
+ if (endpoint == null) {
+ throw new IllegalArgumentException("Endpoint cannot be null");
+ }
if (isClosed()) {
throw new SocketException("Socket is closed");
}
@@ -105,6 +109,12 @@ public void connect(final SocketAddress endpoint) throws IOException {
// https://github.com/jnr/jnr-unixsocket/blob/master/src/main/java/jnr/unixsocket/UnixSocket.java#L89-L97
@Override
public void connect(final SocketAddress endpoint, final int timeout) throws IOException {
+ if (endpoint == null) {
+ throw new IllegalArgumentException("Endpoint cannot be null");
+ }
+ if (timeout < 0) {
+ throw new IllegalArgumentException("Timeout cannot be negative");
+ }
if (isClosed()) {
throw new SocketException("Socket is closed");
}
@@ -122,17 +132,19 @@ public SocketChannel getChannel() {
@Override
public void setSendBufferSize(int size) throws SocketException {
+ if (size <= 0) {
+ throw new IllegalArgumentException("Invalid send buffer size");
+ }
if (isClosed()) {
throw new SocketException("Socket is closed");
}
- if (size < 0) {
- throw new IllegalArgumentException("Invalid send buffer size");
- }
+ sendBufferSize = size;
try {
unixSocketChannel.setOption(java.net.StandardSocketOptions.SO_SNDBUF, size);
- sendBufferSize = size;
} catch (IOException e) {
- throw new SocketException("Failed to set send buffer size");
+ SocketException se = new SocketException("Failed to set send buffer size socket option");
+ se.initCause(e);
+ throw se;
}
}
@@ -149,17 +161,19 @@ public int getSendBufferSize() throws SocketException {
@Override
public void setReceiveBufferSize(int size) throws SocketException {
+ if (size <= 0) {
+ throw new IllegalArgumentException("Invalid receive buffer size");
+ }
if (isClosed()) {
throw new SocketException("Socket is closed");
}
- if (size < 0) {
- throw new IllegalArgumentException("Invalid receive buffer size");
- }
+ receiveBufferSize = size;
try {
unixSocketChannel.setOption(java.net.StandardSocketOptions.SO_RCVBUF, size);
- receiveBufferSize = size;
} catch (IOException e) {
- throw new SocketException("Failed to set receive buffer size");
+ SocketException se = new SocketException("Failed to set receive buffer size socket option");
+ se.initCause(e);
+ throw se;
}
}
@@ -196,14 +210,14 @@ public InputStream getInputStream() throws IOException {
throw new SocketException("Socket input is shutdown");
}
+ if (selector == null) {
+ selector = Selector.open();
+ unixSocketChannel.configureBlocking(false);
+ unixSocketChannel.register(selector, SelectionKey.OP_READ);
+ }
+
return new InputStream() {
private final ByteBuffer buffer = ByteBuffer.allocate(getStreamBufferSize());
- private final Selector selector = Selector.open();
-
- {
- unixSocketChannel.configureBlocking(false);
- unixSocketChannel.register(selector, SelectionKey.OP_READ);
- }
@Override
public int read() throws IOException {
@@ -213,6 +227,9 @@ public int read() throws IOException {
@Override
public int read(byte[] b, int off, int len) throws IOException {
+ if (isInputShutdown()) {
+ return -1;
+ }
buffer.clear();
int readyChannels = selector.select(timeout);
@@ -241,7 +258,7 @@ public int read(byte[] b, int off, int len) throws IOException {
@Override
public void close() throws IOException {
- selector.close();
+ TunnelingJdkSocket.this.close();
}
};
}
@@ -254,7 +271,7 @@ public OutputStream getOutputStream() throws IOException {
if (!isConnected()) {
throw new SocketException("Socket is not connected");
}
- if (isInputShutdown()) {
+ if (isOutputShutdown()) {
throw new SocketException("Socket output is shutdown");
}
@@ -267,12 +284,19 @@ public void write(int b) throws IOException {
@Override
public void write(byte[] b, int off, int len) throws IOException {
+ if (isOutputShutdown()) {
+ throw new IOException("Stream closed");
+ }
ByteBuffer buffer = ByteBuffer.wrap(b, off, len);
-
while (buffer.hasRemaining()) {
unixSocketChannel.write(buffer);
}
}
+
+ @Override
+ public void close() throws IOException {
+ TunnelingJdkSocket.this.close();
+ }
};
}
@@ -308,6 +332,9 @@ public void shutdownOutput() throws IOException {
@Override
public InetAddress getInetAddress() {
+ if (!isConnected()) {
+ return null;
+ }
return inetSocketAddress.getAddress();
}
@@ -316,8 +343,31 @@ public void close() throws IOException {
if (isClosed()) {
return;
}
- if (null != unixSocketChannel) {
- unixSocketChannel.close();
+ // Ignore possible exceptions so that we continue closing the socket
+ try {
+ if (!isInputShutdown()) {
+ shutdownInput();
+ }
+ } catch (IOException e) {
+ }
+ try {
+ if (!isOutputShutdown()) {
+ shutdownOutput();
+ }
+ } catch (IOException e) {
+ }
+ try {
+ if (selector != null) {
+ selector.close();
+ selector = null;
+ }
+ } catch (IOException e) {
+ }
+ try {
+ if (unixSocketChannel != null) {
+ unixSocketChannel.close();
+ }
+ } catch (IOException e) {
}
closed = true;
}
diff --git a/utils/socket-utils/src/test/java/datadog/common/socket/TunnelingJdkSocketTest.java b/utils/socket-utils/src/test/java/datadog/common/socket/TunnelingJdkSocketTest.java
index 74cca0d4bd1..76362accb1e 100644
--- a/utils/socket-utils/src/test/java/datadog/common/socket/TunnelingJdkSocketTest.java
+++ b/utils/socket-utils/src/test/java/datadog/common/socket/TunnelingJdkSocketTest.java
@@ -6,6 +6,8 @@
import datadog.trace.api.Config;
import java.io.IOException;
import java.io.InputStream;
+import java.io.OutputStream;
+import java.lang.management.ManagementFactory;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.net.StandardProtocolFamily;
@@ -23,7 +25,7 @@ public class TunnelingJdkSocketTest {
private static final AtomicBoolean isServerRunning = new AtomicBoolean(false);
@Test
- public void testTimeout() throws Exception {
+ public void testSocketConnectAndClose() throws Exception {
if (!Config.get().isJdkSocketEnabled()) {
System.out.println(
"TunnelingJdkSocket usage is disabled. Enable it by setting the property 'JDK_SOCKET_ENABLED' to 'true'.");
@@ -33,7 +35,104 @@ public void testTimeout() throws Exception {
Path socketPath = getSocketPath();
UnixDomainSocketAddress socketAddress = UnixDomainSocketAddress.of(socketPath);
startServer(socketAddress);
- TunnelingJdkSocket clientSocket = createClient(socketPath);
+ TunnelingJdkSocket clientSocket = new TunnelingJdkSocket(socketPath);
+
+ assertFalse(clientSocket.isConnected());
+ assertFalse(clientSocket.isClosed());
+
+ clientSocket.connect(new InetSocketAddress("localhost", 0));
+ InputStream inputStream = clientSocket.getInputStream();
+ OutputStream outputStream = clientSocket.getOutputStream();
+
+ assertTrue(clientSocket.isConnected());
+ assertFalse(clientSocket.isClosed());
+ assertFalse(clientSocket.isInputShutdown());
+ assertFalse(clientSocket.isOutputShutdown());
+ assertThrows(
+ SocketException.class, () -> clientSocket.connect(new InetSocketAddress("localhost", 0)));
+
+ clientSocket.close();
+
+ assertTrue(clientSocket.isConnected());
+ assertTrue(clientSocket.isClosed());
+ assertTrue(clientSocket.isInputShutdown());
+ assertTrue(clientSocket.isOutputShutdown());
+ assertEquals(-1, inputStream.read());
+ assertThrows(IOException.class, () -> outputStream.write(1));
+ assertThrows(SocketException.class, () -> clientSocket.getInputStream());
+ assertThrows(SocketException.class, () -> clientSocket.getOutputStream());
+ clientSocket.close();
+
+ isServerRunning.set(false);
+ }
+
+ @Test
+ public void testInputStreamClose() throws Exception {
+ if (!Config.get().isJdkSocketEnabled()) {
+ System.out.println(
+ "TunnelingJdkSocket usage is disabled. Enable it by setting the property 'JDK_SOCKET_ENABLED' to 'true'.");
+ return;
+ }
+
+ TunnelingJdkSocket clientSocket = createClient();
+ InputStream inputStream = clientSocket.getInputStream();
+ OutputStream outputStream = clientSocket.getOutputStream();
+
+ assertFalse(clientSocket.isClosed());
+ assertFalse(clientSocket.isInputShutdown());
+ assertFalse(clientSocket.isOutputShutdown());
+
+ inputStream.close();
+
+ assertTrue(clientSocket.isClosed());
+ assertTrue(clientSocket.isInputShutdown());
+ assertTrue(clientSocket.isOutputShutdown());
+ assertEquals(-1, inputStream.read());
+ assertThrows(IOException.class, () -> outputStream.write(1));
+ assertThrows(SocketException.class, () -> clientSocket.getInputStream());
+ assertThrows(SocketException.class, () -> clientSocket.getOutputStream());
+
+ isServerRunning.set(false);
+ }
+
+ @Test
+ public void testOutputStreamClose() throws Exception {
+ if (!Config.get().isJdkSocketEnabled()) {
+ System.out.println(
+ "TunnelingJdkSocket usage is disabled. Enable it by setting the property 'JDK_SOCKET_ENABLED' to 'true'.");
+ return;
+ }
+
+ TunnelingJdkSocket clientSocket = createClient();
+ InputStream inputStream = clientSocket.getInputStream();
+ OutputStream outputStream = clientSocket.getOutputStream();
+
+ assertFalse(clientSocket.isClosed());
+ assertFalse(clientSocket.isInputShutdown());
+ assertFalse(clientSocket.isOutputShutdown());
+
+ outputStream.close();
+
+ assertTrue(clientSocket.isClosed());
+ assertTrue(clientSocket.isInputShutdown());
+ assertTrue(clientSocket.isOutputShutdown());
+ assertEquals(-1, inputStream.read());
+ assertThrows(IOException.class, () -> outputStream.write(1));
+ assertThrows(SocketException.class, () -> clientSocket.getInputStream());
+ assertThrows(SocketException.class, () -> clientSocket.getOutputStream());
+
+ isServerRunning.set(false);
+ }
+
+ @Test
+ public void testTimeout() throws Exception {
+ if (!Config.get().isJdkSocketEnabled()) {
+ System.out.println(
+ "TunnelingJdkSocket usage is disabled. Enable it by setting the property 'JDK_SOCKET_ENABLED' to 'true'.");
+ return;
+ }
+
+ TunnelingJdkSocket clientSocket = createClient();
InputStream inputStream = clientSocket.getInputStream();
int testTimeout = 1000;
@@ -83,10 +182,7 @@ public void testBufferSizes() throws Exception {
return;
}
- Path socketPath = getSocketPath();
- UnixDomainSocketAddress socketAddress = UnixDomainSocketAddress.of(socketPath);
- startServer(socketAddress);
- TunnelingJdkSocket clientSocket = createClient(socketPath);
+ TunnelingJdkSocket clientSocket = createClient();
assertEquals(TunnelingJdkSocket.DEFAULT_BUFFER_SIZE, clientSocket.getSendBufferSize());
assertEquals(TunnelingJdkSocket.DEFAULT_BUFFER_SIZE, clientSocket.getReceiveBufferSize());
@@ -119,11 +215,48 @@ public void testBufferSizes() throws Exception {
isServerRunning.set(false);
}
- private Path getSocketPath() throws IOException {
- Path socketPath = Files.createTempFile("testSocket", null);
- Files.delete(socketPath);
- socketPath.toFile().deleteOnExit();
- return socketPath;
+ @Test
+ public void testFileDescriptorLeak() throws Exception {
+ if (!Config.get().isJdkSocketEnabled()) {
+ System.out.println(
+ "TunnelingJdkSocket usage is disabled. Enable it by setting the property 'JDK_SOCKET_ENABLED' to 'true'.");
+ return;
+ }
+ long initialCount = getFileDescriptorCount();
+
+ TunnelingJdkSocket clientSocket = createClient();
+
+ for (int i = 0; i < 100; i++) {
+ InputStream inputStream = clientSocket.getInputStream();
+ long currentCount = getFileDescriptorCount();
+ assertTrue(currentCount <= initialCount + 7);
+ }
+
+ clientSocket.close();
+ isServerRunning.set(false);
+
+ long finalCount = getFileDescriptorCount();
+ assertTrue(finalCount <= initialCount + 3);
+ }
+
+ private long getFileDescriptorCount() {
+ try {
+ Process process = Runtime.getRuntime().exec("lsof -p " + getPid());
+ int count = 0;
+ try (java.io.BufferedReader reader =
+ new java.io.BufferedReader(new java.io.InputStreamReader(process.getInputStream()))) {
+ while (reader.readLine() != null) {
+ count++;
+ }
+ }
+ return count;
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to get file descriptor count", e);
+ }
+ }
+
+ private String getPid() {
+ return ManagementFactory.getRuntimeMXBean().getName().split("@")[0];
}
private static void startServer(UnixDomainSocketAddress socketAddress) {
@@ -159,7 +292,17 @@ private static void startServer(UnixDomainSocketAddress socketAddress) {
}
}
- private TunnelingJdkSocket createClient(Path socketPath) throws IOException {
+ private Path getSocketPath() throws IOException {
+ Path socketPath = Files.createTempFile("testSocket", null);
+ Files.delete(socketPath);
+ socketPath.toFile().deleteOnExit();
+ return socketPath;
+ }
+
+ private TunnelingJdkSocket createClient() throws IOException {
+ Path socketPath = getSocketPath();
+ UnixDomainSocketAddress socketAddress = UnixDomainSocketAddress.of(socketPath);
+ startServer(socketAddress);
TunnelingJdkSocket clientSocket = new TunnelingJdkSocket(socketPath);
clientSocket.connect(new InetSocketAddress("localhost", 0));
return clientSocket;