diff --git a/.ci/.matrix_exclude.yml b/.ci/.matrix_exclude.yml index eaf0fa82b..5eb71a09f 100644 --- a/.ci/.matrix_exclude.yml +++ b/.ci/.matrix_exclude.yml @@ -184,8 +184,12 @@ exclude: # asyncpg - VERSION: pypy-3 FRAMEWORK: asyncpg-newest + - VERSION: pypy-3 + FRAMEWORK: asyncpg-0.28 - VERSION: python-3.6 FRAMEWORK: asyncpg-newest + - VERSION: python-3.6 + FRAMEWORK: asyncpg-0.28 # sanic - VERSION: pypy-3 FRAMEWORK: sanic-newest @@ -259,23 +263,9 @@ exclude: - VERSION: python-3.7 FRAMEWORK: celery-4-flask-1.0 # TODO py3.12 - - VERSION: python-3.12 - FRAMEWORK: pymssql-newest # no wheels available yet - - VERSION: python-3.12 - FRAMEWORK: aiohttp-newest # no wheels available yet - - VERSION: python-3.12 - FRAMEWORK: elasticsearch-7 # relies on aiohttp - - VERSION: python-3.12 - FRAMEWORK: elasticsearch-8 # relies on aiohttp - - VERSION: python-3.12 - FRAMEWORK: aiobotocore-newest # relies on aiohttp - VERSION: python-3.12 FRAMEWORK: sanic-20.12 # no wheels available yet - - VERSION: python-3.12 - FRAMEWORK: sanic-newest # no wheels available yet - VERSION: python-3.12 FRAMEWORK: kafka-python-newest # https://github.com/dpkp/kafka-python/pull/2376 - - VERSION: python-3.12 - FRAMEWORK: pyodbc-newest # error on wheel - VERSION: python-3.12 FRAMEWORK: cassandra-newest # c extension issue diff --git a/.ci/updatecli.d/update-gherkin-specs.yml b/.ci/updatecli.d/update-gherkin-specs.yml deleted file mode 100644 index 8deb269fc..000000000 --- a/.ci/updatecli.d/update-gherkin-specs.yml +++ /dev/null @@ -1,117 +0,0 @@ -name: update-gherkin-specs -pipelineid: update-gherkin-specs -title: synchronize gherkin specs - -scms: - default: - kind: github - spec: - user: '{{ requiredEnv "GIT_USER" }}' - email: '{{ requiredEnv "GIT_EMAIL" }}' - owner: elastic - repository: apm-agent-python - token: '{{ requiredEnv "GITHUB_TOKEN" }}' - username: '{{ requiredEnv "GIT_USER" }}' - branch: main - -sources: - sha: - kind: file - spec: - file: 'https://github.com/elastic/apm/commit/main.patch' - matchpattern: "^From\\s([0-9a-f]{40})\\s" - transformers: - - findsubmatch: - pattern: "[0-9a-f]{40}" - - api_key.feature: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/gherkin-specs/api_key.feature - azure_app_service_metadata.feature: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/gherkin-specs/azure_app_service_metadata.feature - azure_functions_metadata.feature: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/gherkin-specs/azure_functions_metadata.feature - otel_bridge.feature: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/gherkin-specs/otel_bridge.feature - outcome.feature: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/gherkin-specs/outcome.feature - user_agent.feature: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/gherkin-specs/user_agent.feature - -actions: - pr: - kind: "github/pullrequest" - scmid: default - title: '[Automation] Update Gherkin specs' - spec: - automerge: false - draft: false - labels: - - "automation" - description: |- - ### What - APM agent Gherkin specs automatic sync - ### Why - *Changeset* - * https://github.com/elastic/apm/commit/{{ source "sha" }} - -targets: - api_key.feature: - name: api_key.feature - scmid: default - sourceid: api_key.feature - kind: file - spec: - file: tests/bdd/features/api_key.feature - forcecreate: true - azure_app_service_metadata.feature: - name: azure_app_service_metadata.feature - scmid: default - sourceid: azure_app_service_metadata.feature - kind: file - spec: - file: tests/bdd/features/azure_app_service_metadata.feature - forcecreate: true - azure_functions_metadata.feature: - name: azure_functions_metadata.feature - scmid: default - sourceid: azure_functions_metadata.feature - kind: file - spec: - file: tests/bdd/features/azure_functions_metadata.feature - forcecreate: true - otel_bridge.feature: - name: otel_bridge.feature - scmid: default - sourceid: otel_bridge.feature - kind: file - spec: - file: tests/bdd/features/otel_bridge.feature - forcecreate: true - outcome.feature: - name: outcome.feature - scmid: default - sourceid: outcome.feature - kind: file - spec: - file: tests/bdd/features/outcome.feature - forcecreate: true - user_agent.feature: - name: user_agent.feature - scmid: default - sourceid: user_agent.feature - kind: file - spec: - file: tests/bdd/features/user_agent.feature - forcecreate: true diff --git a/.ci/updatecli.d/update-json-specs.yml b/.ci/updatecli.d/update-json-specs.yml deleted file mode 100644 index 13d25c834..000000000 --- a/.ci/updatecli.d/update-json-specs.yml +++ /dev/null @@ -1,122 +0,0 @@ -name: update-json-specs -pipelineid: update-json-specs -title: synchronize json specs - -scms: - default: - kind: github - spec: - user: '{{ requiredEnv "GIT_USER" }}' - email: '{{ requiredEnv "GIT_EMAIL" }}' - owner: elastic - repository: apm-agent-python - token: '{{ requiredEnv "GITHUB_TOKEN" }}' - username: '{{ requiredEnv "GIT_USER" }}' - branch: main - -sources: - sha: - kind: file - spec: - file: 'https://github.com/elastic/apm/commit/main.patch' - matchpattern: "^From\\s([0-9a-f]{40})\\s" - transformers: - - findsubmatch: - pattern: "[0-9a-f]{40}" - - container_metadata_discovery.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/json-specs/container_metadata_discovery.json - service_resource_inference.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/json-specs/service_resource_inference.json - span_types.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/json-specs/span_types.json - sql_signature_examples.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/json-specs/sql_signature_examples.json - sql_token_examples.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/json-specs/sql_token_examples.json - w3c_distributed_tracing.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/json-specs/w3c_distributed_tracing.json - wildcard_matcher_tests.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm/main/tests/agents/json-specs/wildcard_matcher_tests.json - -actions: - pr: - kind: "github/pullrequest" - scmid: default - title: '[Automation] Update JSON specs' - spec: - automerge: false - draft: false - labels: - - "automation" - description: |- - ### What - APM agent specs automatic sync - ### Why - *Changeset* - * https://github.com/elastic/apm/commit/{{ source "sha" }} - -targets: - container_metadata_discovery.json: - name: container_metadata_discovery.json - scmid: default - sourceid: container_metadata_discovery.json - kind: file - spec: - file: tests/upstream/json-specs/container_metadata_discovery.json - service_resource_inference.json: - name: service_resource_inference.json - scmid: default - sourceid: service_resource_inference.json - kind: file - spec: - file: tests/upstream/json-specs/service_resource_inference.json - span_types.json: - name: span_types.json - scmid: default - sourceid: span_types.json - kind: file - spec: - file: tests/upstream/json-specs/span_types.json - sql_signature_examples.json: - name: sql_signature_examples.json - scmid: default - sourceid: sql_signature_examples.json - kind: file - spec: - file: tests/upstream/json-specs/sql_signature_examples.json - sql_token_examples.json: - name: sql_token_examples.json - scmid: default - sourceid: sql_token_examples.json - kind: file - spec: - file: tests/upstream/json-specs/sql_token_examples.json - w3c_distributed_tracing.json: - name: w3c_distributed_tracing.json - scmid: default - sourceid: w3c_distributed_tracing.json - kind: file - spec: - file: tests/upstream/json-specs/w3c_distributed_tracing.json - wildcard_matcher_tests.json: - name: wildcard_matcher_tests.json - scmid: default - sourceid: wildcard_matcher_tests.json - kind: file - spec: - file: tests/upstream/json-specs/wildcard_matcher_tests.json diff --git a/.ci/updatecli.d/update-specs.yml b/.ci/updatecli.d/update-specs.yml deleted file mode 100644 index ab3bd34c7..000000000 --- a/.ci/updatecli.d/update-specs.yml +++ /dev/null @@ -1,104 +0,0 @@ -name: update-specs -pipelineid: update-schema-specs -title: synchronize schema specs - -scms: - default: - kind: github - spec: - user: '{{ requiredEnv "GIT_USER" }}' - email: '{{ requiredEnv "GIT_EMAIL" }}' - owner: elastic - repository: apm-agent-python - token: '{{ requiredEnv "GITHUB_TOKEN" }}' - username: '{{ requiredEnv "GIT_USER" }}' - branch: main - -sources: - sha: - kind: file - spec: - file: 'https://github.com/elastic/apm-data/commit/main.patch' - matchpattern: "^From\\s([0-9a-f]{40})\\s" - transformers: - - findsubmatch: - pattern: "[0-9a-f]{40}" - error.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm-data/main/input/elasticapm/docs/spec/v2/error.json - metadata.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm-data/main/input/elasticapm/docs/spec/v2/metadata.json - metricset.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm-data/main/input/elasticapm/docs/spec/v2/metricset.json - span.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm-data/main/input/elasticapm/docs/spec/v2/span.json - transaction.json: - kind: file - spec: - file: https://raw.githubusercontent.com/elastic/apm-data/main/input/elasticapm/docs/spec/v2/transaction.json - -actions: - pr: - kind: "github/pullrequest" - scmid: default - title: '[Automation] Update JSON schema specs' - spec: - automerge: false - draft: false - labels: - - "automation" - description: |- - ### What - APM agent json schema automatic sync - ### Why - *Changeset* - * https://github.com/elastic/apm-data/commit/{{ source "sha" }} - -targets: - error.json: - name: error.json - scmid: default - sourceid: error.json - kind: file - spec: - file: tests/upstream/json-specs/error.json - forcecreate: true - metadata.json: - name: metadata.json - scmid: default - sourceid: metadata.json - kind: file - spec: - file: tests/upstream/json-specs/metadata.json - forcecreate: true - metricset.json: - name: metricset.json - scmid: default - sourceid: metricset.json - kind: file - spec: - file: tests/upstream/json-specs/metricset.json - forcecreate: true - span.json: - name: span.json - scmid: default - sourceid: span.json - kind: file - spec: - file: tests/upstream/json-specs/span.json - forcecreate: true - transaction.json: - name: transaction.json - scmid: default - sourceid: transaction.json - kind: file - spec: - file: tests/upstream/json-specs/transaction.json - forcecreate: true diff --git a/.ci/updatecli/updatecli.d/update-gherkin-specs.yml b/.ci/updatecli/updatecli.d/update-gherkin-specs.yml new file mode 100644 index 000000000..f12ece861 --- /dev/null +++ b/.ci/updatecli/updatecli.d/update-gherkin-specs.yml @@ -0,0 +1,84 @@ +name: update-gherkin-specs +pipelineid: update-gherkin-specs + +scms: + default: + kind: github + spec: + user: '{{ requiredEnv "GIT_USER" }}' + email: '{{ requiredEnv "GIT_EMAIL" }}' + owner: "{{ .github.owner }}" + repository: "{{ .github.repository }}" + token: '{{ requiredEnv "GITHUB_TOKEN" }}' + username: '{{ requiredEnv "GIT_USER" }}' + branch: "{{ .github.branch }}" + apm: + kind: github + spec: + user: '{{ requiredEnv "GIT_USER" }}' + email: '{{ requiredEnv "GIT_EMAIL" }}' + owner: "{{ .github.owner }}" + repository: "{{ .github.apm_repository }}" + token: '{{ requiredEnv "GITHUB_TOKEN" }}' + username: '{{ requiredEnv "GIT_USER" }}' + branch: "{{ .github.branch }}" + +sources: + sha: + kind: file + spec: + file: 'https://github.com/{{ .github.owner }}/{{ .github.apm_repository }}/commit/{{ .github.branch }}.patch' + matchpattern: "^From\\s([0-9a-f]{40})\\s" + transformers: + - findsubmatch: + pattern: "[0-9a-f]{40}" + pull_request: + kind: shell + dependson: + - sha + spec: + command: gh api /repos/{{ .github.owner }}/{{ .github.apm_repository }}/commits/{{ source "sha" }}/pulls --jq '.[].html_url' + environments: + - name: GITHUB_TOKEN + - name: PATH + agents-gherkin-specs-tarball: + kind: shell + scmid: apm + dependson: + - sha + spec: + command: tar cvzf {{ requiredEnv "GITHUB_WORKSPACE" }}/gherkin-specs.tgz . + environments: + - name: PATH + workdir: "{{ .specs.apm_gherkin_path }}" + +actions: + pr: + kind: "github/pullrequest" + scmid: default + spec: + automerge: false + draft: false + labels: + - "automation" + description: |- + ### What + APM agent Gherkin specs automatic sync + + ### Why + *Changeset* + * {{ source "pull_request" }} + * https://github.com/elastic/apm/commit/{{ source "sha" }} + title: '[Automation] Update Gherkin specs' + +targets: + agent-gherkin-specs: + name: APM agent gherkin specs {{ source "sha" }} + scmid: default + disablesourceinput: true + kind: shell + spec: + # git diff helps to print what it changed, If it is empty, then updatecli report a success with no changes applied. + # See https://www.updatecli.io/docs/plugins/resource/shell/#_shell_target + command: 'tar -xzf {{ requiredEnv "GITHUB_WORKSPACE" }}/gherkin-specs.tgz && git --no-pager diff' + workdir: "{{ .apm_agent.gherkin_specs_path }}" diff --git a/.ci/updatecli/updatecli.d/update-json-specs.yml b/.ci/updatecli/updatecli.d/update-json-specs.yml new file mode 100644 index 000000000..e05aaecdb --- /dev/null +++ b/.ci/updatecli/updatecli.d/update-json-specs.yml @@ -0,0 +1,84 @@ +name: update-json-specs +pipelineid: update-json-specs + +scms: + default: + kind: github + spec: + user: '{{ requiredEnv "GIT_USER" }}' + email: '{{ requiredEnv "GIT_EMAIL" }}' + owner: "{{ .github.owner }}" + repository: "{{ .github.repository }}" + token: '{{ requiredEnv "GITHUB_TOKEN" }}' + username: '{{ requiredEnv "GIT_USER" }}' + branch: "{{ .github.branch }}" + apm: + kind: github + spec: + user: '{{ requiredEnv "GIT_USER" }}' + email: '{{ requiredEnv "GIT_EMAIL" }}' + owner: "{{ .github.owner }}" + repository: "{{ .github.apm_repository }}" + token: '{{ requiredEnv "GITHUB_TOKEN" }}' + username: '{{ requiredEnv "GIT_USER" }}' + branch: "{{ .github.branch }}" + +sources: + sha: + kind: file + spec: + file: 'https://github.com/{{ .github.owner }}/{{ .github.apm_repository }}/commit/{{ .github.branch }}.patch' + matchpattern: "^From\\s([0-9a-f]{40})\\s" + transformers: + - findsubmatch: + pattern: "[0-9a-f]{40}" + pull_request: + kind: shell + dependson: + - sha + spec: + command: gh api /repos/{{ .github.owner }}/{{ .github.apm_repository }}/commits/{{ source "sha" }}/pulls --jq '.[].html_url' + environments: + - name: GITHUB_TOKEN + - name: PATH + agents-json-specs-tarball: + kind: shell + scmid: apm + dependson: + - sha + spec: + command: tar cvzf {{ requiredEnv "GITHUB_WORKSPACE" }}/json-specs.tgz . + environments: + - name: PATH + workdir: "{{ .specs.apm_json_path }}" + +actions: + pr: + kind: "github/pullrequest" + scmid: default + spec: + automerge: false + draft: false + labels: + - "automation" + description: |- + ### What + APM agent specs automatic sync + + ### Why + *Changeset* + * {{ source "pull_request" }} + * https://github.com/{{ .github.owner }}/{{ .github.apm_repository }}/commit/{{ source "sha" }} + title: '[Automation] Update JSON specs' + +targets: + agent-json-specs: + name: APM agent json specs {{ source "sha" }} + scmid: default + disablesourceinput: true + kind: shell + spec: + # git diff helps to print what it changed, If it is empty, then updatecli report a success with no changes applied. + # See https://www.updatecli.io/docs/plugins/resource/shell/#_shell_target + command: 'tar -xzf {{ requiredEnv "GITHUB_WORKSPACE" }}/json-specs.tgz && git --no-pager diff' + workdir: "{{ .apm_agent.json_specs_path }}" diff --git a/.ci/updatecli/updatecli.d/update-specs.yml b/.ci/updatecli/updatecli.d/update-specs.yml new file mode 100644 index 000000000..554140da2 --- /dev/null +++ b/.ci/updatecli/updatecli.d/update-specs.yml @@ -0,0 +1,86 @@ +name: update-specs +pipelineid: update-schema-specs + +scms: + default: + kind: github + spec: + user: '{{ requiredEnv "GIT_USER" }}' + email: '{{ requiredEnv "GIT_EMAIL" }}' + owner: "{{ .github.owner }}" + repository: "{{ .github.repository }}" + token: '{{ requiredEnv "GITHUB_TOKEN" }}' + username: '{{ requiredEnv "GIT_USER" }}' + branch: "{{ .github.branch }}" + + apm-data: + kind: github + spec: + user: '{{ requiredEnv "GIT_USER" }}' + email: '{{ requiredEnv "GIT_EMAIL" }}' + owner: "{{ .github.owner }}" + repository: "{{ .github.apm_data_repository }}" + token: '{{ requiredEnv "GITHUB_TOKEN" }}' + username: '{{ requiredEnv "GIT_USER" }}' + branch: "{{ .github.branch }}" + +sources: + sha: + kind: file + spec: + file: 'https://github.com/{{ .github.owner }}/{{ .github.apm_data_repository }}/commit/{{ .github.branch }}.patch' + matchpattern: "^From\\s([0-9a-f]{40})\\s" + transformers: + - findsubmatch: + pattern: "[0-9a-f]{40}" + pull_request: + kind: shell + dependson: + - sha + spec: + command: gh api /repos/{{ .github.owner }}/{{ .github.apm_data_repository }}/commits/{{ source "sha" }}/pulls --jq '.[].html_url' + environments: + - name: GITHUB_TOKEN + - name: PATH + agent-specs-tarball: + kind: shell + scmid: apm-data + dependson: + - sha + spec: + command: tar cvzf {{ requiredEnv "GITHUB_WORKSPACE" }}/json-schema.tgz . + environments: + - name: PATH + workdir: "{{ .specs.apm_data_path }}" + +actions: + pr: + kind: "github/pullrequest" + scmid: default + sourceid: sha + spec: + automerge: false + draft: false + labels: + - "automation" + description: |- + ### What + APM agent json server schema automatic sync + + ### Why + *Changeset* + * {{ source "pull_request" }} + * https://github.com/{{ .github.owner }}/{{ .github.apm_data_repository }}/commit/{{ source "sha" }} + title: '[Automation] Update JSON server schema specs' + +targets: + agent-json-schema: + name: APM agent json server schema {{ source "sha" }} + scmid: default + disablesourceinput: true + kind: shell + spec: + # git diff helps to print what it changed, If it is empty, then updatecli report a success with no changes applied. + # See https://www.updatecli.io/docs/plugins/resource/shell/#_shell_target + command: 'tar -xzf {{ requiredEnv "GITHUB_WORKSPACE" }}/json-schema.tgz && git --no-pager diff' + workdir: "{{ .apm_agent.server_schema_specs_path }}" diff --git a/.ci/updatecli/values.yml b/.ci/updatecli/values.yml new file mode 100644 index 000000000..b0b58d73e --- /dev/null +++ b/.ci/updatecli/values.yml @@ -0,0 +1,14 @@ +github: + owner: "elastic" + repository: "apm-agent-python" + apm_repository: "apm" + apm_data_repository: "apm-data" + branch: "main" +specs: + apm_data_path: "input/elasticapm/docs/spec/v2" + apm_json_path: "tests/agents/json-specs" + apm_gherkin_path: "tests/agents/gherkin-specs" +apm_agent: + gherkin_specs_path: "tests/bdd/features" + json_specs_path: "tests/upstream/json-specs" + server_schema_specs_path: "tests/upstream/json-specs" \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml index eb8155b22..eb1cff95b 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,8 +8,24 @@ updates: # Check for updates once a week schedule: interval: "weekly" + day: "sunday" + time: "22:00" reviewers: - "elastic/apm-agent-python" ignore: - dependency-name: "urllib3" # ignore until lambda runtimes use OpenSSL 1.1.1+ versions: [">=2.0.0"] + + # GitHub actions + - package-ecosystem: "github-actions" + directory: "/" + reviewers: + - "elastic/observablt-ci" + schedule: + interval: "weekly" + day: "sunday" + time: "22:00" + groups: + github-actions: + patterns: + - "*" diff --git a/.github/workflows/README.md b/.github/workflows/README.md index 3cdfe70f0..c224d62b8 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -39,6 +39,7 @@ Once a PR has been opened then there are two different ways you can trigger buil 1. Commit based 1. UI based, any Elasticians can force a build through the GitHub UI +1. PR review comment-based, any Elastic employees can force a full matrix test run through a PR review comment with the following syntax: `/test matrix`. #### Branches diff --git a/.github/workflows/build-distribution.yml b/.github/workflows/build-distribution.yml index 986632acd..fd3e11ed7 100644 --- a/.github/workflows/build-distribution.yml +++ b/.github/workflows/build-distribution.yml @@ -7,8 +7,8 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: "3.10" - name: Build lambda layer zip diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index df219658c..377caaa5c 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -17,7 +17,7 @@ jobs: configuration-path: .github/labeler-config.yml enable-versioned-regex: 0 - name: Check team membership for user - uses: elastic/get-user-teams-membership@v1.0.4 + uses: elastic/get-user-teams-membership@1.1.0 id: checkUserMember with: username: ${{ github.actor }} diff --git a/.github/workflows/matrix-command.yml b/.github/workflows/matrix-command.yml new file mode 100644 index 000000000..f2c32658f --- /dev/null +++ b/.github/workflows/matrix-command.yml @@ -0,0 +1,49 @@ +name: matrix-command + +on: + pull_request_review: + types: + - submitted + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} + +permissions: + contents: read + +jobs: + command-validation: + if: startsWith(github.event.review.body, '/test matrix') + runs-on: ubuntu-latest + timeout-minutes: 5 + permissions: + pull-requests: write + steps: + - name: Is comment allowed? + uses: actions/github-script@v7 + with: + script: | + const actorPermission = (await github.rest.repos.getCollaboratorPermissionLevel({ + ...context.repo, + username: context.actor + })).data.permission + const isPermitted = ['write', 'admin'].includes(actorPermission) + if (!isPermitted) { + const errorMessage = 'Only users with write permission to the repository can run GitHub commands' + await github.rest.issues.createComment({ + ...context.repo, + issue_number: context.issue.number, + body: errorMessage, + }) + core.setFailed(errorMessage) + return + } + + test: + needs: + - command-validation + uses: ./.github/workflows/test.yml + with: + full-matrix: true + ref: ${{ github.event.pull_request.head.sha }} diff --git a/.github/workflows/opentelemetry.yml b/.github/workflows/opentelemetry.yml index ea858e655..84a6209ff 100644 --- a/.github/workflows/opentelemetry.yml +++ b/.github/workflows/opentelemetry.yml @@ -1,18 +1,16 @@ --- +# Look up results at https://ela.st/oblt-ci-cd-stats. +# There will be one service per GitHub repository, including the org name, and one Transaction per Workflow. name: OpenTelemetry Export Trace on: workflow_run: - workflows: - - pre-commit - - test - - test-reporter - - snapshoty - - release - - packages - - updatecli + workflows: [ "*" ] types: [completed] +permissions: + contents: read + jobs: otel-export-trace: runs-on: ubuntu-latest diff --git a/.github/workflows/packages.yml b/.github/workflows/packages.yml index 148110c7f..af485c455 100644 --- a/.github/workflows/packages.yml +++ b/.github/workflows/packages.yml @@ -17,8 +17,8 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install wheel @@ -28,7 +28,7 @@ jobs: - name: Building source distribution run: python setup.py sdist - name: Upload Packages - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: packages path: | diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index c2f7e71fc..65947d33b 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -9,6 +9,6 @@ jobs: pre-commit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 - - uses: pre-commit/action@v3.0.0 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 03a77ce47..a2a48b62b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,6 +11,8 @@ permissions: jobs: test: uses: ./.github/workflows/test.yml + with: + full-matrix: true packages: uses: ./.github/workflows/packages.yml @@ -24,13 +26,13 @@ jobs: permissions: id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - - uses: actions/checkout@v3 - - uses: actions/download-artifact@v3 + - uses: actions/checkout@v4 + - uses: actions/download-artifact@v4 with: name: packages path: dist - name: Upload - uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 + uses: pypa/gh-action-pypi-publish@2f6f737ca5f74c637829c0f5c3acd0e29ea5e8bf with: repository-url: https://upload.pypi.org/legacy/ @@ -42,8 +44,8 @@ jobs: - build-distribution runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: hashicorp/vault-action@v2.7.2 + - uses: actions/checkout@v4 + - uses: hashicorp/vault-action@v2.8.0 with: url: ${{ secrets.VAULT_ADDR }} method: approle @@ -63,7 +65,7 @@ jobs: VERSION=${VERSION//./-} ELASTIC_LAYER_NAME="elastic-apm-python-${VERSION}" .ci/publish-aws.sh - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: arn-file path: ".arn-file.md" @@ -74,7 +76,7 @@ jobs: - build-distribution runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: elastic/apm-pipeline-library/.github/actions/docker-login@current with: registry: docker.elastic.co @@ -115,8 +117,8 @@ jobs: - publish-lambda-layers runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/download-artifact@v3 + - uses: actions/checkout@v4 + - uses: actions/download-artifact@v4 with: name: arn-file - name: Create GitHub Draft Release diff --git a/.github/workflows/run-matrix.yml b/.github/workflows/run-matrix.yml index 811f68dd9..827212527 100644 --- a/.github/workflows/run-matrix.yml +++ b/.github/workflows/run-matrix.yml @@ -18,7 +18,7 @@ jobs: matrix: include: ${{ fromJSON(inputs.include) }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Run tests run: ./tests/scripts/docker/run_tests.sh ${{ matrix.version }} ${{ matrix.framework }} env: diff --git a/.github/workflows/snapshoty.yml b/.github/workflows/snapshoty.yml index 3f91e2213..49d1b3423 100644 --- a/.github/workflows/snapshoty.yml +++ b/.github/workflows/snapshoty.yml @@ -21,8 +21,8 @@ jobs: - packages runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/download-artifact@v3 + - uses: actions/checkout@v4 + - uses: actions/download-artifact@v4 with: name: packages path: dist diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 363dec4a6..4638ab5d3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,7 +1,16 @@ name: test # The name must be the same as in test-docs.yml on: - workflow_call: ~ + workflow_call: + inputs: + full-matrix: + description: "Run the full matrix" + required: true + type: boolean + ref: + description: "The git ref of elastic/apm-agent-python to run test workflow from." + required: false + type: string pull_request: paths-ignore: - "**/*.md" @@ -14,6 +23,12 @@ on: - "**/*.asciidoc" schedule: - cron: "0 2 * * *" + workflow_dispatch: + inputs: + full-matrix: + description: "Run the full matrix" + required: true + type: boolean jobs: build-distribution: @@ -26,14 +41,16 @@ jobs: data: ${{ steps.split.outputs.data }} chunks: ${{ steps.split.outputs.chunks }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref || github.ref }} - id: generate uses: elastic/apm-pipeline-library/.github/actions/version-framework@current with: # Use .ci/.matrix_python_full.yml if it's a scheduled workflow, otherwise use .ci/.matrix_python.yml - versionsFile: .ci/.matrix_python${{ github.event_name == 'schedule' && '_full' || '' }}.yml + versionsFile: .ci/.matrix_python${{ (github.event_name == 'schedule' || github.event_name == 'push' || inputs.full-matrix) && '_full' || '' }}.yml # Use .ci/.matrix_framework_full.yml if it's a scheduled workflow, otherwise use .ci/.matrix_framework.yml - frameworksFile: .ci/.matrix_framework${{ github.event_name == 'schedule' && '_full' || '' }}.yml + frameworksFile: .ci/.matrix_framework${{ (github.event_name == 'schedule' || github.event_name == 'push' || inputs.full-matrix) && '_full' || '' }}.yml excludedFile: .ci/.matrix_exclude.yml - name: Split matrix shell: python @@ -107,8 +124,10 @@ jobs: FRAMEWORK: ${{ matrix.framework }} ASYNCIO: ${{ matrix.asyncio }} steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref || github.ref }} + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.version }} cache: pip @@ -142,7 +161,19 @@ jobs: - chunks-3 - windows steps: - - run: test $(echo '${{ toJSON(needs) }}' | jq -s 'map(.[].result) | all(.=="success")') = 'true' + - id: check + uses: elastic/apm-pipeline-library/.github/actions/check-dependent-jobs@current + with: + needs: ${{ toJSON(needs) }} + - run: ${{ steps.check.outputs.isSuccess }} + - if: failure() && (github.event_name == 'schedule' || github.event_name == 'push') + uses: elastic/apm-pipeline-library/.github/actions/notify-build-status@current + with: + status: ${{ steps.check.outputs.status }} + vaultUrl: ${{ secrets.VAULT_ADDR }} + vaultRoleId: ${{ secrets.VAULT_ROLE_ID }} + vaultSecretId: ${{ secrets.VAULT_SECRET_ID }} + slackChannel: "#apm-agent-python" coverage: name: Combine & check coverage. @@ -150,9 +181,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref || github.ref }} - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: # Use latest Python, so it understands all syntax. python-version: 3.11 @@ -175,10 +208,10 @@ jobs: python -Im coverage report --fail-under=84 - name: Upload HTML report - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: html-coverage-report path: htmlcov - - uses: geekyeggo/delete-artifact@54ab544f12cdb7b71613a16a2b5a37a9ade990af + - uses: geekyeggo/delete-artifact@65041433121f7239077fa20be14c0690f70569de with: name: coverage-reports diff --git a/.github/workflows/updatecli.yml b/.github/workflows/updatecli.yml index 2101ec798..4fc00bd71 100644 --- a/.github/workflows/updatecli.yml +++ b/.github/workflows/updatecli.yml @@ -12,13 +12,14 @@ jobs: bump: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: elastic/apm-pipeline-library/.github/actions/updatecli@current with: vaultUrl: ${{ secrets.VAULT_ADDR }} vaultRoleId: ${{ secrets.VAULT_ROLE_ID }} vaultSecretId: ${{ secrets.VAULT_SECRET_ID }} - pipeline: .ci/updatecli.d + pipeline: .ci/updatecli/updatecli.d + values: .ci/updatecli/values.yml - if: failure() uses: elastic/apm-pipeline-library/.github/actions/notify-build-status@current with: diff --git a/.hound.yml b/.hound.yml deleted file mode 100644 index 0745a960a..000000000 --- a/.hound.yml +++ /dev/null @@ -1,5 +0,0 @@ -flake8: - enabled: true - config_file: .flake8 - -fail_on_violations: true diff --git a/CHANGELOG.asciidoc b/CHANGELOG.asciidoc index fb87b1107..57a6cb95f 100644 --- a/CHANGELOG.asciidoc +++ b/CHANGELOG.asciidoc @@ -5,7 +5,7 @@ endif::[] //// [[release-notes-x.x.x]] -==== x.x.x - YYYY/MM/DD +==== x.x.x - YYYY-MM-DD [float] ===== Breaking changes @@ -32,6 +32,44 @@ endif::[] [[release-notes-6.x]] === Python Agent version 6.x +[[release-notes-6.21.3]] +==== 6.21.3 - 2024-03-08 + +[float] +===== Bug fixes + +* Fix artifacts download in CI workflows {pull}1996[#1996] + +[[release-notes-6.21.2]] +==== 6.21.2 - 2024-03-07 + +[float] +===== Bug fixes + +* Fix artifacts upload in CI build-distribution workflow {pull}1993[#1993] + +[[release-notes-6.21.1]] +==== 6.21.1 - 2024-03-07 + +[float] +===== Bug fixes + +* Fix CI release workflow {pull}1990[#1990] + +[[release-notes-6.21.0]] +==== 6.21.0 - 2024-03-06 + +[float] +===== Bug fixes + +* Fix starlette middleware setup without client argument {pull}1952[#1952] +* Fix blocking of gRPC stream-to-stream requests {pull}1967[#1967] +* Always take into account body reading time for starlette requests {pull}1970[#1970] +* Make urllib3 transport tests more robust against local env {pull}1969[#1969] +* Clarify starlette integration documentation {pull}1956[#1956] +* Make dbapi2 query scanning for dollar quotes a bit more correct {pull}1976[#1976] +* Normalize headers in AWS Lambda integration on API Gateway v1 requests {pull}1982[#1982] + [[release-notes-6.20.0]] ==== 6.20.0 - 2024-01-10 diff --git a/dev-utils/requirements.txt b/dev-utils/requirements.txt index 21258b2ad..59008afc2 100644 --- a/dev-utils/requirements.txt +++ b/dev-utils/requirements.txt @@ -1,4 +1,4 @@ # These are the pinned requirements for the lambda layer/docker image -certifi==2023.11.17 +certifi==2024.2.2 urllib3==1.26.18 wrapt==1.14.1 diff --git a/docs/serverless-lambda.asciidoc b/docs/serverless-lambda.asciidoc index 48c091390..732abb2b4 100644 --- a/docs/serverless-lambda.asciidoc +++ b/docs/serverless-lambda.asciidoc @@ -5,6 +5,11 @@ The Python APM Agent can be used with AWS Lambda to monitor the execution of your AWS Lambda functions. +``` +Note: The Centralized Agent Configuration on the Elasticsearch APM currently does NOT support AWS Lambda. +``` + + [float] ==== Prerequisites diff --git a/docs/starlette.asciidoc b/docs/starlette.asciidoc index 77aaca0d4..941bf6d7a 100644 --- a/docs/starlette.asciidoc +++ b/docs/starlette.asciidoc @@ -42,10 +42,12 @@ app = Starlette() app.add_middleware(ElasticAPM) ---- -WARNING: If you are using any `BaseHTTPMiddleware` middleware, you must add them -*before* the ElasticAPM middleware. This is because `BaseHTTPMiddleware` breaks -`contextvar` propagation, as noted -https://www.starlette.io/middleware/#limitations[here]. +WARNING: `BaseHTTPMiddleware` breaks `contextvar` propagation, as noted +https://www.starlette.io/middleware/#limitations[here]. This means the +ElasticAPM middleware must be above any `BaseHTTPMiddleware` in the final +middleware list. If you're calling `add_middleware` repeatedly, add the +ElasticAPM middleware last. If you're passing in a list of middleware, +ElasticAPM should be first on that list. To configure the agent using initialization arguments: diff --git a/elasticapm/contrib/grpc/async_server_interceptor.py b/elasticapm/contrib/grpc/async_server_interceptor.py index 5af0c1372..e7c9b659f 100644 --- a/elasticapm/contrib/grpc/async_server_interceptor.py +++ b/elasticapm/contrib/grpc/async_server_interceptor.py @@ -33,20 +33,18 @@ import grpc import elasticapm -from elasticapm.contrib.grpc.server_interceptor import _ServicerContextWrapper, _wrap_rpc_behavior, get_trace_parent +from elasticapm.contrib.grpc.server_interceptor import _ServicerContextWrapper, get_trace_parent class _AsyncServerInterceptor(grpc.aio.ServerInterceptor): async def intercept_service(self, continuation, handler_call_details): - def transaction_wrapper(behavior, request_streaming, response_streaming): - async def _interceptor(request_or_iterator, context): - if request_streaming or response_streaming: # only unary-unary is supported - return behavior(request_or_iterator, context) + def wrap_unary_unary(behavior): + async def _interceptor(request, context): tp = get_trace_parent(handler_call_details) client = elasticapm.get_client() transaction = client.begin_transaction("request", trace_parent=tp) try: - result = behavior(request_or_iterator, _ServicerContextWrapper(context, transaction)) + result = behavior(request, _ServicerContextWrapper(context, transaction)) # This is so we can support both sync and async rpc functions if inspect.isawaitable(result): @@ -65,4 +63,12 @@ async def _interceptor(request_or_iterator, context): return _interceptor - return _wrap_rpc_behavior(await continuation(handler_call_details), transaction_wrapper) + handler = await continuation(handler_call_details) + if handler.request_streaming or handler.response_streaming: + return handler + + return grpc.unary_unary_rpc_method_handler( + wrap_unary_unary(handler.unary_unary), + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) diff --git a/elasticapm/contrib/serverless/aws.py b/elasticapm/contrib/serverless/aws.py index 26f37bdfb..9f5f7b133 100644 --- a/elasticapm/contrib/serverless/aws.py +++ b/elasticapm/contrib/serverless/aws.py @@ -135,6 +135,18 @@ def prep_kwargs(kwargs=None): return kwargs +def should_normalize_headers(event: dict) -> bool: + """ + Helper to decide if we should normalize headers or not depending on the event + + Even if the documentation says that headers are lowercased it's not always the case for format version 1.0 + https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + """ + + request_context = event.get("requestContext", {}) + return ("elb" in request_context or "requestId" in request_context) and "http" not in request_context + + class _lambda_transaction(object): """ Context manager for creating transactions around AWS Lambda functions. @@ -162,7 +174,13 @@ def __enter__(self): # service like Step Functions, and is unlikely to be standardized # in any way. We just have to rely on our defaults in this case. self.event = {} - trace_parent = TraceParent.from_headers(self.event.get("headers") or {}) + + headers = self.event.get("headers") or {} + if headers and should_normalize_headers(self.event): + normalized_headers = {k.lower(): v for k, v in headers.items()} + else: + normalized_headers = headers + trace_parent = TraceParent.from_headers(normalized_headers) global COLD_START cold_start = COLD_START diff --git a/elasticapm/contrib/starlette/__init__.py b/elasticapm/contrib/starlette/__init__.py index a6262ba86..ad26d7a0a 100644 --- a/elasticapm/contrib/starlette/__init__.py +++ b/elasticapm/contrib/starlette/__init__.py @@ -36,6 +36,7 @@ from typing import Dict, Optional import starlette +from starlette.datastructures import Headers from starlette.requests import Request from starlette.routing import Match, Mount from starlette.types import ASGIApp, Message @@ -105,7 +106,7 @@ class ElasticAPM: >>> elasticapm.capture_message('hello, world!') """ - def __init__(self, app: ASGIApp, client: Optional[Client], **kwargs) -> None: + def __init__(self, app: ASGIApp, client: Optional[Client] = None, **kwargs) -> None: """ Args: @@ -151,6 +152,10 @@ async def wrapped_send(message) -> None: _mocked_receive = None _request_receive = None + # begin the transaction before capturing the body to get that time accounted + trace_parent = TraceParent.from_headers(dict(Headers(scope=scope))) + self.client.begin_transaction("request", trace_parent=trace_parent) + if self.client.config.capture_body != "off": # When we consume the body from receive, we replace the streaming @@ -234,9 +239,6 @@ async def _request_started(self, request: Request) -> None: if self.client.config.capture_body != "off": await get_body(request) - trace_parent = TraceParent.from_headers(dict(request.headers)) - self.client.begin_transaction("request", trace_parent=trace_parent) - await set_context(lambda: get_data_from_request(request, self.client.config, constants.TRANSACTION), "request") transaction_name = self.get_route_name(request) or request.url.path elasticapm.set_transaction_name("{} {}".format(request.method, transaction_name), override=False) diff --git a/elasticapm/instrumentation/packages/dbapi2.py b/elasticapm/instrumentation/packages/dbapi2.py index fb49723c2..fa1d0f31e 100644 --- a/elasticapm/instrumentation/packages/dbapi2.py +++ b/elasticapm/instrumentation/packages/dbapi2.py @@ -34,6 +34,7 @@ """ import re +import string import wrapt @@ -85,6 +86,7 @@ def scan(tokens): literal_started = None prev_was_escape = False lexeme = [] + digits = set(string.digits) i = 0 while i < len(tokens): @@ -114,6 +116,11 @@ def scan(tokens): literal_start_idx = i literal_started = token elif token == "$": + # exclude query parameters that have a digit following the dollar + if True and len(tokens) > i + 1 and tokens[i + 1] in digits: + yield i, token + i += 1 + continue # Postgres can use arbitrary characters between two $'s as a # literal separation token, e.g.: $fish$ literal $fish$ # This part will detect that and skip over the literal. diff --git a/elasticapm/instrumentation/packages/urllib.py b/elasticapm/instrumentation/packages/urllib.py index b40932a55..2b0dae16e 100644 --- a/elasticapm/instrumentation/packages/urllib.py +++ b/elasticapm/instrumentation/packages/urllib.py @@ -97,10 +97,9 @@ def call(self, module, method, wrapped, instance, args, kwargs): leaf_span.dist_tracing_propagated = True response = wrapped(*args, **kwargs) if response: - status = getattr(response, "status", None) or response.getcode() # Python 2 compat if span.context: - span.context["http"]["status_code"] = status - span.set_success() if status < 400 else span.set_failure() + span.context["http"]["status_code"] = response.status + span.set_success() if response.status < 400 else span.set_failure() return response def mutate_unsampled_call_args(self, module, method, wrapped, instance, args, kwargs, transaction): diff --git a/elasticapm/utils/__init__.py b/elasticapm/utils/__init__.py index 58a302960..0f7b52c0d 100644 --- a/elasticapm/utils/__init__.py +++ b/elasticapm/utils/__init__.py @@ -33,20 +33,14 @@ import re import socket import urllib.parse -from functools import partial +from functools import partial, partialmethod from types import FunctionType from typing import Pattern from elasticapm.conf import constants from elasticapm.utils import encoding -try: - from functools import partialmethod - - partial_types = (partial, partialmethod) -except ImportError: - # Python 2 - partial_types = (partial,) +partial_types = (partial, partialmethod) default_ports = {"https": 443, "http": 80, "postgresql": 5432, "mysql": 3306, "mssql": 1433} diff --git a/elasticapm/version.py b/elasticapm/version.py index ea64e853b..6da6c370d 100644 --- a/elasticapm/version.py +++ b/elasticapm/version.py @@ -28,5 +28,5 @@ # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -__version__ = (6, 20, 0) +__version__ = (6, 21, 3) VERSION = ".".join(map(str, __version__)) diff --git a/tests/client/client_tests.py b/tests/client/client_tests.py index 6cec88205..af266b710 100644 --- a/tests/client/client_tests.py +++ b/tests/client/client_tests.py @@ -77,11 +77,7 @@ def test_service_info_node_name(elasticapm_client): def test_process_info(elasticapm_client): process_info = elasticapm_client.get_process_info() assert process_info["pid"] == os.getpid() - if hasattr(os, "getppid"): - assert process_info["ppid"] == os.getppid() - else: - # Windows + Python 2.7 - assert process_info["ppid"] is None + assert process_info["ppid"] == os.getppid() assert "argv" not in process_info elasticapm_client.config.update("1", include_process_args=True) with mock.patch.object(sys, "argv", ["a", "b", "c"]): diff --git a/tests/contrib/asyncio/starlette_tests.py b/tests/contrib/asyncio/starlette_tests.py index 5f4c070bd..fcd7d0dee 100644 --- a/tests/contrib/asyncio/starlette_tests.py +++ b/tests/contrib/asyncio/starlette_tests.py @@ -534,3 +534,11 @@ def test_transaction_active_in_base_exception_handler(app, elasticapm_client): assert exc.transaction_id assert len(elasticapm_client.events[constants.TRANSACTION]) == 1 + + +def test_middleware_without_client_arg(): + with mock.patch.dict("os.environ", {"ELASTIC_APM_SERVICE_NAME": "foo"}): + app = Starlette() + elasticapm = ElasticAPM(app) + + assert elasticapm.client.config.service_name == "foo" diff --git a/tests/contrib/serverless/aws_elb_test_data.json b/tests/contrib/serverless/aws_elb_test_data.json index 87e05ac85..79b4dc6dd 100644 --- a/tests/contrib/serverless/aws_elb_test_data.json +++ b/tests/contrib/serverless/aws_elb_test_data.json @@ -15,6 +15,7 @@ "connection": "Keep-Alive", "host": "blabla.com", "user-agent": "Apache-HttpClient/4.5.13 (Java/11.0.15)", + "TraceParent": "00-12345678901234567890123456789012-1234567890123456-01", "x-amzn-trace-id": "Root=1-xxxxxxxxxxxxxx", "x-forwarded-for": "199.99.99.999", "x-forwarded-port": "443", diff --git a/tests/contrib/serverless/aws_tests.py b/tests/contrib/serverless/aws_tests.py index 9f4a7253f..df062a378 100644 --- a/tests/contrib/serverless/aws_tests.py +++ b/tests/contrib/serverless/aws_tests.py @@ -36,7 +36,12 @@ from elasticapm import capture_span from elasticapm.conf import constants -from elasticapm.contrib.serverless.aws import capture_serverless, get_data_from_request, get_data_from_response +from elasticapm.contrib.serverless.aws import ( + capture_serverless, + get_data_from_request, + get_data_from_response, + should_normalize_headers, +) @pytest.fixture @@ -300,6 +305,7 @@ def test_func(event, context): assert transaction["context"]["request"]["headers"] assert transaction["context"]["response"]["status_code"] == 200 assert transaction["context"]["service"]["origin"]["name"] == "lambda-279XGJDqGZ5rsrHC2Fjr" + assert transaction["trace_id"] == "12345678901234567890123456789012" def test_capture_serverless_s3(event_s3, context, elasticapm_client): @@ -477,3 +483,17 @@ def test_func(event, context): test_func(event_api2, context) assert len(elasticapm_client.events[constants.TRANSACTION]) == 1 + + +def test_should_normalize_headers_true(event_api, event_elb): + assert should_normalize_headers(event_api) is True + assert should_normalize_headers(event_elb) is True + + +def test_should_normalize_headers_false(event_api2, event_lurl, event_s3, event_s3_batch, event_sqs, event_sns): + assert should_normalize_headers(event_api2) is False + assert should_normalize_headers(event_lurl) is False + assert should_normalize_headers(event_s3) is False + assert should_normalize_headers(event_s3_batch) is False + assert should_normalize_headers(event_sqs) is False + assert should_normalize_headers(event_sns) is False diff --git a/tests/fixtures.py b/tests/fixtures.py index 94e89f961..ddeaa1f5b 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -46,6 +46,7 @@ import zlib from collections import defaultdict from typing import Optional +from urllib.request import pathname2url import jsonschema import mock @@ -61,12 +62,6 @@ from elasticapm.transport.http_base import HTTPTransportBase from elasticapm.utils.threading import ThreadManager -try: - from urllib.request import pathname2url -except ImportError: - # Python 2 - from urllib import pathname2url - cur_dir = os.path.dirname(os.path.realpath(__file__)) ERRORS_SCHEMA = os.path.join(cur_dir, "upstream", "json-specs", "error.json") diff --git a/tests/instrumentation/dbapi2_tests.py b/tests/instrumentation/dbapi2_tests.py index 3d72b6632..089571715 100644 --- a/tests/instrumentation/dbapi2_tests.py +++ b/tests/instrumentation/dbapi2_tests.py @@ -122,6 +122,20 @@ def test_extract_signature_bytes(): assert actual == expected +def test_extract_signature_pathological(): + # tune for performance testing + multiplier = 10 + values = [] + for chunk in range(multiplier): + i = chunk * 3 + values.append(f" (${1+i}::varchar, ${2+i}::varchar, ${3+i}::varchar), ") + + sql = f"SELECT * FROM (VALUES {''.join(values)})\n" + actual = extract_signature(sql) + expected = "SELECT FROM" + assert actual == expected + + @pytest.mark.parametrize( ["sql", "expected"], [ diff --git a/tests/instrumentation/urllib_tests.py b/tests/instrumentation/urllib_tests.py index 3f2796483..fbf5fa44f 100644 --- a/tests/instrumentation/urllib_tests.py +++ b/tests/instrumentation/urllib_tests.py @@ -114,7 +114,7 @@ def test_urllib_error(instrument, elasticapm_client, waiting_httpserver, status_ @mock.patch(request_method) @mock.patch(getresponse_method) def test_urllib_standard_port(mock_getresponse, mock_request, instrument, elasticapm_client): - # "code" is needed for Python 3, "status" for Python 2 + # Python internally used both "code" and "status" mock_getresponse.return_value = mock.Mock(code=200, status=200) url = "http://example.com/" diff --git a/tests/transports/test_urllib3.py b/tests/transports/test_urllib3.py index 42a21c1e9..b24408e54 100644 --- a/tests/transports/test_urllib3.py +++ b/tests/transports/test_urllib3.py @@ -115,38 +115,46 @@ def test_generic_error(mock_urlopen, elasticapm_client): def test_http_proxy_environment_variable(elasticapm_client): - with mock.patch.dict("os.environ", {"HTTP_PROXY": "http://example.com"}): + with mock.patch.dict("os.environ", {"HTTP_PROXY": "http://example.com"}, clear=True): transport = Transport("http://localhost:9999", client=elasticapm_client) assert isinstance(transport.http, urllib3.ProxyManager) def test_https_proxy_environment_variable(elasticapm_client): - with mock.patch.dict("os.environ", {"HTTPS_PROXY": "https://example.com"}): + with mock.patch.dict( + "os.environ", + { + "HTTPS_PROXY": "https://example.com", + }, + clear=True, + ): transport = Transport("http://localhost:9999", client=elasticapm_client) assert isinstance(transport.http, urllib3.poolmanager.ProxyManager) def test_https_proxy_environment_variable_is_preferred(elasticapm_client): - with mock.patch.dict("os.environ", {"https_proxy": "https://example.com", "HTTP_PROXY": "http://example.com"}): + with mock.patch.dict( + "os.environ", {"https_proxy": "https://example.com", "HTTP_PROXY": "http://example.com"}, clear=True + ): transport = Transport("http://localhost:9999", client=elasticapm_client) assert isinstance(transport.http, urllib3.poolmanager.ProxyManager) assert transport.http.proxy.scheme == "https" def test_no_proxy_star(elasticapm_client): - with mock.patch.dict("os.environ", {"HTTPS_PROXY": "https://example.com", "NO_PROXY": "*"}): + with mock.patch.dict("os.environ", {"HTTPS_PROXY": "https://example.com", "NO_PROXY": "*"}, clear=True): transport = Transport("http://localhost:9999", client=elasticapm_client) assert not isinstance(transport.http, urllib3.poolmanager.ProxyManager) def test_no_proxy_host(elasticapm_client): - with mock.patch.dict("os.environ", {"HTTPS_PROXY": "https://example.com", "NO_PROXY": "localhost"}): + with mock.patch.dict("os.environ", {"HTTPS_PROXY": "https://example.com", "NO_PROXY": "localhost"}, clear=True): transport = Transport("http://localhost:9999", client=elasticapm_client) assert not isinstance(transport.http, urllib3.poolmanager.ProxyManager) def test_no_proxy_all(elasticapm_client): - with mock.patch.dict("os.environ", {"HTTPS_PROXY": "https://example.com", "NO_PROXY": "*"}): + with mock.patch.dict("os.environ", {"HTTPS_PROXY": "https://example.com", "NO_PROXY": "*"}, clear=True): transport = Transport("http://localhost:9999", client=elasticapm_client) assert not isinstance(transport.http, urllib3.poolmanager.ProxyManager) diff --git a/tests/utils/tests.py b/tests/utils/tests.py index 5f073862d..bd09eef26 100644 --- a/tests/utils/tests.py +++ b/tests/utils/tests.py @@ -30,7 +30,7 @@ import os import socket -from functools import partial +from functools import partial, partialmethod import pytest @@ -48,12 +48,6 @@ ) from elasticapm.utils.deprecation import deprecated -try: - from functools import partialmethod -except ImportError: - # Python 2 - partialmethod = None - @deprecated("alternative") def deprecated_function(): @@ -164,7 +158,6 @@ def x(x): assert "partial(tests.utils.tests.x)" == get_name_from_func(p) -@pytest.mark.skipif(partialmethod is None, reason="partialmethod not available on Python 2") def test_get_name_from_func_partialmethod_unbound(): class X(object): def x(self, x): @@ -175,7 +168,6 @@ def x(self, x): assert "partial(tests.utils.tests.x)" == get_name_from_func(X.p) -@pytest.mark.skipif(partialmethod is None, reason="partialmethod not available on Python 2") def test_get_name_from_func_partialmethod_bound(): class X(object): def x(self, x):