diff --git a/.github/actions/setup-python-matrix/action.yml b/.github/actions/setup-python-matrix/action.yml new file mode 100644 index 000000000..344cf686c --- /dev/null +++ b/.github/actions/setup-python-matrix/action.yml @@ -0,0 +1,45 @@ +name: "setup-python-matrix" +description: "Sets up all versions of python required for matrix testing in this repo." +runs: + using: "composite" + steps: + - uses: actions/setup-python@v3 + with: + python-version: "pypy-3.7" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "pypy-2.7" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "3.7" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "3.8" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "3.10" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "2.7" + architecture: x64 + + - name: Install Dependencies + shell: bash + run: | + python3.10 -m pip install -U pip + python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 diff --git a/.github/workflows/deploy-python.yml b/.github/workflows/deploy-python.yml index 4a0054c1a..e8fbd4f7f 100644 --- a/.github/workflows/deploy-python.yml +++ b/.github/workflows/deploy-python.yml @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: persist-credentials: false fetch-depth: 0 @@ -57,7 +57,7 @@ jobs: uses: pypa/cibuildwheel@v2.1.3 env: CIBW_PLATFORM: linux - CIBW_BUILD: cp36-manylinux_aarch64 cp37-manylinux_aarch64 cp38-manylinux_aarch64 cp39-manylinux_aarch64 cp310-manylinux_aarch64 cp36-manylinux_x86_64 cp37-manylinux_x86_64 cp38-manylinux_x86_64 cp39-manylinux_x86_64 cp310-manylinux_x86_64 + CIBW_BUILD: cp37-manylinux_aarch64 cp38-manylinux_aarch64 cp39-manylinux_aarch64 cp310-manylinux_aarch64 cp37-manylinux_x86_64 cp38-manylinux_x86_64 cp39-manylinux_x86_64 cp310-manylinux_x86_64 CIBW_ARCHS: x86_64 aarch64 CIBW_ENVIRONMENT: "LD_LIBRARY_PATH=/opt/rh/devtoolset-8/root/usr/lib64:/opt/rh/devtoolset-8/root/usr/lib:/opt/rh/devtoolset-8/root/usr/lib64/dyninst:/opt/rh/devtoolset-8/root/usr/lib/dyninst:/usr/local/lib64:/usr/local/lib" diff --git a/.github/workflows/mega-linter.yml b/.github/workflows/mega-linter.yml index 75ab9a4b1..d378752dc 100644 --- a/.github/workflows/mega-linter.yml +++ b/.github/workflows/mega-linter.yml @@ -1,6 +1,6 @@ --- # Mega-Linter GitHub Action configuration file -# More info at https://megalinter.github.io +# More info at https://oxsecurity.github.io/megalinter name: Mega-Linter on: @@ -25,7 +25,7 @@ jobs: steps: # Git Checkout - name: Checkout Code - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} fetch-depth: 0 @@ -34,11 +34,11 @@ jobs: - name: Mega-Linter id: ml # You can override Mega-Linter flavor used to have faster performances - # More info at https://megalinter.github.io/flavors/ - uses: megalinter/megalinter/flavors/python@v5 + # More info at https://oxsecurity.github.io/megalinter/flavors/ + uses: oxsecurity/megalinter/flavors/python@v6 env: # All available variables are described in documentation - # https://megalinter.github.io/configuration/ + # https://oxsecurity.github.io/megalinter/configuration/ VALIDATE_ALL_CODEBASE: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} # Validates all source when push on main, else just the git diff with main. Set 'true' if you always want to lint all sources DEFAULT_BRANCH: ${{ github.event_name == 'pull_request' && github.base_ref || 'main' }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -51,7 +51,7 @@ jobs: with: name: Mega-Linter reports path: | - report + megalinter-reports mega-linter.log # Create pull request if applicable (for now works only on PR from same repository, not from forks) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b96a10186..892bfce9a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -29,18 +29,19 @@ jobs: runs-on: ubuntu-latest needs: - python + - elasticsearchserver01 + - elasticsearchserver07 + - gearman - grpc + - kafka - libcurl - - postgres + - memcached + - mongodb - mysql + - postgres + - rabbitmq - redis - solr - - memcached - - rabbitmq - - mongodb - - elasticsearchserver01 - - elasticsearchserver07 - - gearman steps: - name: Success @@ -51,6 +52,7 @@ jobs: TOTAL_GROUPS: 20 strategy: + fail-fast: false matrix: group-number: [ @@ -80,59 +82,8 @@ jobs: timeout-minutes: 30 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -153,6 +104,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -160,59 +112,8 @@ jobs: timeout-minutes: 30 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -233,6 +134,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -240,59 +142,8 @@ jobs: timeout-minutes: 30 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix # Special case packages - name: Install libcurl-dev @@ -319,6 +170,7 @@ jobs: TOTAL_GROUPS: 2 strategy: + fail-fast: false matrix: group-number: [1, 2] @@ -341,59 +193,8 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -414,6 +215,7 @@ jobs: TOTAL_GROUPS: 2 strategy: + fail-fast: false matrix: group-number: [1, 2] @@ -439,59 +241,8 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -512,6 +263,7 @@ jobs: TOTAL_GROUPS: 2 strategy: + fail-fast: false matrix: group-number: [1, 2] @@ -532,59 +284,8 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -605,6 +306,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -627,59 +329,8 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -700,6 +351,7 @@ jobs: TOTAL_GROUPS: 2 strategy: + fail-fast: false matrix: group-number: [1, 2] @@ -720,59 +372,8 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -793,6 +394,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -814,59 +416,8 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -882,11 +433,77 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + kafka: + env: + TOTAL_GROUPS: 2 + + strategy: + fail-fast: false + matrix: + group-number: [1, 2] + + runs-on: ubuntu-latest + timeout-minutes: 30 + + services: + zookeeper: + image: bitnami/zookeeper:3.7 + env: + ALLOW_ANONYMOUS_LOGIN: yes + + ports: + - 2181:2181 + + kafka: + image: bitnami/kafka:3.2 + ports: + - 8080:8080 + - 8081:8081 + env: + ALLOW_PLAINTEXT_LISTENER: yes + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: true + KAFKA_CFG_LISTENERS: L1://:8080,L2://:8081 + KAFKA_CFG_ADVERTISED_LISTENERS: L1://127.0.0.1:8080,L2://kafka:8081, + KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: L1:PLAINTEXT,L2:PLAINTEXT + KAFKA_CFG_INTER_BROKER_LISTENER_NAME: L2 + + steps: + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix + + # Special case packages + - name: Install librdkafka-dev + run: | + # Use lsb-release to find the codename of Ubuntu to use to install the correct library name + sudo apt-get update + sudo ln -fs /usr/share/zoneinfo/America/Los_Angeles /etc/localtime + sudo apt-get install -y wget gnupg2 software-properties-common + sudo wget -qO - https://packages.confluent.io/deb/7.2/archive.key | sudo apt-key add - + sudo add-apt-repository "deb https://packages.confluent.io/clients/deb $(lsb_release -cs) main" + sudo apt-get update + sudo apt-get install -y librdkafka-dev/$(lsb_release -c | cut -f 2) + + - name: Get Environments + id: get-envs + run: | + echo "::set-output name=envs::$(tox -l | grep "^${{ github.job }}\-" | ./.github/workflows/get-envs.py)" + env: + GROUP_NUMBER: ${{ matrix.group-number }} + + - name: Test + run: | + tox -vv -e ${{ steps.get-envs.outputs.envs }} + env: + TOX_PARALLEL_NO_SPINNER: 1 + PY_COLORS: 0 + mongodb: env: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -907,59 +524,8 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -980,6 +546,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -1002,59 +569,8 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -1075,6 +591,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -1097,59 +614,8 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -1170,6 +636,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -1189,59 +656,8 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs diff --git a/.gitignore b/.gitignore index dadd8da50..8226b0e97 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ # Linter -report/ +megalinter-reports/ # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/.mega-linter.yml b/.mega-linter.yml index b947f9f4b..0f7205fcd 100644 --- a/.mega-linter.yml +++ b/.mega-linter.yml @@ -1,5 +1,5 @@ # Configuration file for Mega-Linter -# See all available variables at https://megalinter.github.io/configuration/ and in linters documentation +# See all available variables at https://oxsecurity.github.io/megalinter/configuration/ and in linters documentation APPLY_FIXES: none # all, none, or list of linter keys DEFAULT_BRANCH: main # Usually master or main @@ -29,7 +29,10 @@ PYTHON_FLAKE8_CONFIG_FILE: setup.cfg PYTHON_BLACK_CONFIG_FILE: pyproject.toml PYTHON_PYLINT_CONFIG_FILE: pyproject.toml PYTHON_ISORT_CONFIG_FILE: pyproject.toml +PYTHON_BANDIT_CONFIG_FILE: pyproject.toml +PYTHON_BANDIT_FILTER_REGEX_EXCLUDE: "tests" +PYTHON_BANDIT_PRE_COMMANDS: + - command: "pip install bandit[toml]" + cwd: "workspace" -# Bandit's next release supports pyproject.toml. Until then, add config by cli option -PYTHON_BANDIT_ARGUMENTS: --skip=B110,B101,B404 PYTHON_PYLINT_ARGUMENTS: "--fail-under=0 --fail-on=E" diff --git a/newrelic/admin/record_deploy.py b/newrelic/admin/record_deploy.py index 3a6229de6..65748cc2a 100644 --- a/newrelic/admin/record_deploy.py +++ b/newrelic/admin/record_deploy.py @@ -39,7 +39,7 @@ def fetch_app_id(app_name, client, headers): return for application in response_json["applications"]: - if application["name"] == app_name: + if application["name"].lower() == app_name.lower(): return application["id"] diff --git a/newrelic/api/database_trace.py b/newrelic/api/database_trace.py index 2f58e9268..09dfa1e11 100644 --- a/newrelic/api/database_trace.py +++ b/newrelic/api/database_trace.py @@ -205,8 +205,9 @@ def finalize_data(self, transaction, exc=None, value=None, tb=None): execute_params = self.execute_params transaction._explain_plan_count += 1 - self.sql_format = tt.record_sql - + self.sql_format = ( + tt.record_sql if tt.record_sql else "" + ) # If tt.record_sql is None, then the empty string will default to sql being obfuscated self.connect_params = connect_params self.cursor_params = cursor_params self.sql_parameters = sql_parameters diff --git a/newrelic/api/message_trace.py b/newrelic/api/message_trace.py index a217ba6cc..be819d704 100644 --- a/newrelic/api/message_trace.py +++ b/newrelic/api/message_trace.py @@ -28,7 +28,7 @@ class MessageTrace(CatHeaderMixin, TimeTrace): cat_appdata_key = "NewRelicAppData" cat_synthetics_key = "NewRelicSynthetics" - def __init__(self, library, operation, destination_type, destination_name, params=None, **kwargs): + def __init__(self, library, operation, destination_type, destination_name, params=None, terminal=True, **kwargs): parent = kwargs.pop("parent", None) source = kwargs.pop("source", None) if kwargs: @@ -36,6 +36,8 @@ def __init__(self, library, operation, destination_type, destination_name, param super(MessageTrace, self).__init__(parent=parent, source=source) + self.terminal = terminal + self.library = library self.operation = operation @@ -69,7 +71,7 @@ def __repr__(self): ) def terminal_node(self): - return True + return self.terminal def create_node(self): return MessageNode( @@ -89,7 +91,7 @@ def create_node(self): ) -def MessageTraceWrapper(wrapped, library, operation, destination_type, destination_name, params={}): +def MessageTraceWrapper(wrapped, library, operation, destination_type, destination_name, params={}, terminal=True): def _nr_message_trace_wrapper_(wrapped, instance, args, kwargs): wrapper = async_wrapper(wrapped) if not wrapper: @@ -131,7 +133,7 @@ def _nr_message_trace_wrapper_(wrapped, instance, args, kwargs): else: _destination_name = destination_name - trace = MessageTrace(_library, _operation, _destination_type, _destination_name, params={}, parent=parent, source=wrapped) + trace = MessageTrace(_library, _operation, _destination_type, _destination_name, params={}, terminal=terminal, parent=parent, source=wrapped) if wrapper: # pylint: disable=W0125,W0126 return wrapper(wrapped, trace)(*args, **kwargs) @@ -142,7 +144,7 @@ def _nr_message_trace_wrapper_(wrapped, instance, args, kwargs): return FunctionWrapper(wrapped, _nr_message_trace_wrapper_) -def message_trace(library, operation, destination_type, destination_name, params={}): +def message_trace(library, operation, destination_type, destination_name, params={}, terminal=True): return functools.partial( MessageTraceWrapper, library=library, @@ -150,10 +152,11 @@ def message_trace(library, operation, destination_type, destination_name, params destination_type=destination_type, destination_name=destination_name, params=params, + terminal=terminal, ) -def wrap_message_trace(module, object_path, library, operation, destination_type, destination_name, params={}): +def wrap_message_trace(module, object_path, library, operation, destination_type, destination_name, params={}, terminal=True): wrap_object( - module, object_path, MessageTraceWrapper, (library, operation, destination_type, destination_name, params) + module, object_path, MessageTraceWrapper, (library, operation, destination_type, destination_name, params, terminal) ) diff --git a/newrelic/api/message_transaction.py b/newrelic/api/message_transaction.py index 1ce0025e9..291a3897e 100644 --- a/newrelic/api/message_transaction.py +++ b/newrelic/api/message_transaction.py @@ -19,33 +19,39 @@ from newrelic.api.background_task import BackgroundTask from newrelic.api.message_trace import MessageTrace from newrelic.api.transaction import current_transaction +from newrelic.common.async_proxy import TransactionContext, async_proxy from newrelic.common.object_wrapper import FunctionWrapper, wrap_object -from newrelic.common.async_proxy import async_proxy, TransactionContext class MessageTransaction(BackgroundTask): - - def __init__(self, library, destination_type, - destination_name, application, routing_key=None, - exchange_type=None, headers=None, queue_name=None, reply_to=None, - correlation_id=None, source=None): - - name, group = self.get_transaction_name(library, destination_type, - destination_name) - - super(MessageTransaction, self).__init__(application, name, - group=group, source=source) + def __init__( + self, + library, + destination_type, + destination_name, + application, + routing_key=None, + exchange_type=None, + headers=None, + queue_name=None, + reply_to=None, + correlation_id=None, + transport_type="AMQP", + source=None, + ): + + name, group = self.get_transaction_name(library, destination_type, destination_name) + + super(MessageTransaction, self).__init__(application, name, group=group, source=source) self.headers = headers if headers is not None and self.settings is not None: if self.settings.distributed_tracing.enabled: - self.accept_distributed_trace_headers( - headers, transport_type='AMQP') + self.accept_distributed_trace_headers(headers, transport_type=transport_type) elif self.settings.cross_application_tracer.enabled: self._process_incoming_cat_headers( - headers.pop(MessageTrace.cat_id_key, None), - headers.pop(MessageTrace.cat_transaction_key, None) + headers.pop(MessageTrace.cat_id_key, None), headers.pop(MessageTrace.cat_transaction_key, None) ) self.routing_key = routing_key @@ -56,37 +62,45 @@ def __init__(self, library, destination_type, @staticmethod def get_transaction_name(library, destination_type, destination_name): - group = 'Message/%s/%s' % (library, destination_type) - name = 'Named/%s' % destination_name + group = "Message/%s/%s" % (library, destination_type) + name = "Named/%s" % destination_name return name, group def _update_agent_attributes(self): ms_attrs = self._agent_attributes if self.exchange_type is not None: - ms_attrs['message.exchangeType'] = self.exchange_type + ms_attrs["message.exchangeType"] = self.exchange_type if self.queue_name is not None: - ms_attrs['message.queueName'] = self.queue_name + ms_attrs["message.queueName"] = self.queue_name if self.reply_to is not None: - ms_attrs['message.replyTo'] = self.reply_to + ms_attrs["message.replyTo"] = self.reply_to if self.correlation_id is not None: - ms_attrs['message.correlationId'] = self.correlation_id + ms_attrs["message.correlationId"] = self.correlation_id if self.headers: for k, v in self.headers.items(): - new_key = 'message.headers.%s' % k + new_key = "message.headers.%s" % k new_val = str(v) ms_attrs[new_key] = new_val if self.routing_key is not None: - ms_attrs['message.routingKey'] = self.routing_key + ms_attrs["message.routingKey"] = self.routing_key super(MessageTransaction, self)._update_agent_attributes() -def MessageTransactionWrapper(wrapped, library, destination_type, - destination_name, application=None, routing_key=None, - exchange_type=None, headers=None, queue_name=None, reply_to=None, - correlation_id=None): - +def MessageTransactionWrapper( + wrapped, + library, + destination_type, + destination_name, + application=None, + routing_key=None, + exchange_type=None, + headers=None, + queue_name=None, + reply_to=None, + correlation_id=None, +): def wrapper(wrapped, instance, args, kwargs): if callable(library): if instance is not None: @@ -173,9 +187,8 @@ def create_transaction(transaction): if not transaction.background_task: transaction.background_task = True transaction.set_transaction_name( - *MessageTransaction.get_transaction_name( - _library, _destination_type, - _destination_name)) + *MessageTransaction.get_transaction_name(_library, _destination_type, _destination_name) + ) return None @@ -233,22 +246,61 @@ def create_transaction(transaction): return FunctionWrapper(wrapped, wrapper) -def message_transaction(library, destination_type, destination_name, - application=None, routing_key=None, exchange_type=None, headers=None, - queue_name=None, reply_to=None, correlation_id=None): - return functools.partial(MessageTransactionWrapper, - library=library, destination_type=destination_type, - destination_name=destination_name, application=application, - routing_key=routing_key, exchange_type=exchange_type, - headers=headers, queue_name=queue_name, reply_to=reply_to, - correlation_id=correlation_id) - - -def wrap_message_transaction(module, object_path, library, destination_type, - destination_name, application=None, routing_key=None, - exchange_type=None, headers=None, queue_name=None, reply_to=None, - correlation_id=None): - wrap_object(module, object_path, MessageTransactionWrapper, - (library, destination_type, destination_name, application, - routing_key, exchange_type, headers, queue_name, reply_to, - correlation_id)) +def message_transaction( + library, + destination_type, + destination_name, + application=None, + routing_key=None, + exchange_type=None, + headers=None, + queue_name=None, + reply_to=None, + correlation_id=None, +): + return functools.partial( + MessageTransactionWrapper, + library=library, + destination_type=destination_type, + destination_name=destination_name, + application=application, + routing_key=routing_key, + exchange_type=exchange_type, + headers=headers, + queue_name=queue_name, + reply_to=reply_to, + correlation_id=correlation_id, + ) + + +def wrap_message_transaction( + module, + object_path, + library, + destination_type, + destination_name, + application=None, + routing_key=None, + exchange_type=None, + headers=None, + queue_name=None, + reply_to=None, + correlation_id=None, +): + wrap_object( + module, + object_path, + MessageTransactionWrapper, + ( + library, + destination_type, + destination_name, + application, + routing_key, + exchange_type, + headers, + queue_name, + reply_to, + correlation_id, + ), + ) diff --git a/newrelic/common/agent_http.py b/newrelic/common/agent_http.py index 239892000..e9d9a00aa 100644 --- a/newrelic/common/agent_http.py +++ b/newrelic/common/agent_http.py @@ -524,24 +524,32 @@ def _supportability_request(params, payload, body, compression_time): # ********* # Used only for supportability metrics. Do not use to drive business # logic! + # payload: uncompressed + # body: compressed agent_method = params and params.get("method") # ********* - if agent_method and body: + if agent_method and payload: # Compression was applied if compression_time is not None: internal_metric( - "Supportability/Python/Collector/ZLIB/Bytes/%s" % agent_method, - len(payload), + "Supportability/Python/Collector/%s/ZLIB/Bytes" % agent_method, + len(body), ) internal_metric( - "Supportability/Python/Collector/ZLIB/Compress/%s" % agent_method, + "Supportability/Python/Collector/ZLIB/Bytes", len(body) + ) + internal_metric( + "Supportability/Python/Collector/%s/ZLIB/Compress" % agent_method, compression_time, ) - internal_metric( - "Supportability/Python/Collector/Output/Bytes/%s" % agent_method, - len(body), + "Supportability/Python/Collector/%s/Output/Bytes" % agent_method, + len(payload), + ) + # Top level metric to aggregate overall bytes being sent + internal_metric( + "Supportability/Python/Collector/Output/Bytes", len(payload) ) @staticmethod diff --git a/newrelic/config.py b/newrelic/config.py index 1c3571a54..4e0912db8 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2307,6 +2307,43 @@ def _process_module_builtin_defaults(): "instrument_cherrypy__cptree", ) + _process_module_definition( + "confluent_kafka.cimpl", + "newrelic.hooks.messagebroker_confluentkafka", + "instrument_confluentkafka_cimpl", + ) + _process_module_definition( + "confluent_kafka.serializing_producer", + "newrelic.hooks.messagebroker_confluentkafka", + "instrument_confluentkafka_serializing_producer", + ) + _process_module_definition( + "confluent_kafka.deserializing_consumer", + "newrelic.hooks.messagebroker_confluentkafka", + "instrument_confluentkafka_deserializing_consumer", + ) + + _process_module_definition( + "kafka.consumer.group", + "newrelic.hooks.messagebroker_kafkapython", + "instrument_kafka_consumer_group", + ) + _process_module_definition( + "kafka.producer.kafka", + "newrelic.hooks.messagebroker_kafkapython", + "instrument_kafka_producer", + ) + _process_module_definition( + "kafka.coordinator.heartbeat", + "newrelic.hooks.messagebroker_kafkapython", + "instrument_kafka_heartbeat", + ) + _process_module_definition( + "kafka.consumer.group", + "newrelic.hooks.messagebroker_kafkapython", + "instrument_kafka_consumer_group", + ) + _process_module_definition( "logging", "newrelic.hooks.logger_logging", @@ -2540,8 +2577,21 @@ def _process_module_builtin_defaults(): _process_module_definition("uvicorn.config", "newrelic.hooks.adapter_uvicorn", "instrument_uvicorn_config") + _process_module_definition( + "hypercorn.asyncio.run", "newrelic.hooks.adapter_hypercorn", "instrument_hypercorn_asyncio_run" + ) + _process_module_definition( + "hypercorn.trio.run", "newrelic.hooks.adapter_hypercorn", "instrument_hypercorn_trio_run" + ) + _process_module_definition("hypercorn.utils", "newrelic.hooks.adapter_hypercorn", "instrument_hypercorn_utils") + + _process_module_definition("daphne.server", "newrelic.hooks.adapter_daphne", "instrument_daphne_server") + _process_module_definition("sanic.app", "newrelic.hooks.framework_sanic", "instrument_sanic_app") _process_module_definition("sanic.response", "newrelic.hooks.framework_sanic", "instrument_sanic_response") + _process_module_definition( + "sanic.touchup.service", "newrelic.hooks.framework_sanic", "instrument_sanic_touchup_service" + ) _process_module_definition("aiohttp.wsgi", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_wsgi") _process_module_definition("aiohttp.web", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_web") @@ -2712,7 +2762,9 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "redis.commands.timeseries.commands", "newrelic.hooks.datastore_redis", "instrument_redis_commands_timeseries_commands" + "redis.commands.timeseries.commands", + "newrelic.hooks.datastore_redis", + "instrument_redis_commands_timeseries_commands", ) _process_module_definition( diff --git a/newrelic/core/config.py b/newrelic/core/config.py index 57f408b87..60520c113 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -812,7 +812,7 @@ def default_host(license_key): _settings.application_logging.enabled = _environ_as_bool("NEW_RELIC_APPLICATION_LOGGING_ENABLED", default=True) _settings.application_logging.forwarding.enabled = _environ_as_bool( - "NEW_RELIC_APPLICATION_LOGGING_FORWARDING_ENABLED", default=False + "NEW_RELIC_APPLICATION_LOGGING_FORWARDING_ENABLED", default=True ) _settings.application_logging.metrics.enabled = _environ_as_bool( "NEW_RELIC_APPLICATION_LOGGING_METRICS_ENABLED", default=True diff --git a/newrelic/core/environment.py b/newrelic/core/environment.py index f198155a2..9fc6e2dd4 100644 --- a/newrelic/core/environment.py +++ b/newrelic/core/environment.py @@ -20,6 +20,7 @@ import os import platform import sys +import sysconfig import newrelic from newrelic.common.system_info import ( @@ -28,11 +29,6 @@ total_physical_memory, ) -# try: -# import pkg_resources -# except ImportError: -# pass - try: import newrelic.core._thread_utilization except ImportError: @@ -42,6 +38,17 @@ def environment_settings(): """Returns an array of arrays of environment settings""" + # Find version resolver. + + get_version = None + # importlib was introduced into the standard library starting in Python3.8. + if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"): + get_version = sys.modules["importlib"].metadata.version + elif "pkg_resources" in sys.modules: + + def get_version(name): # pylint: disable=function-redefined + return sys.modules["pkg_resources"].get_distribution(name).version + env = [] # Agent information. @@ -103,6 +110,8 @@ def environment_settings(): dispatcher = [] + # Find the first dispatcher module that's been loaded and report that as the dispatcher. + # If possible, also report the dispatcher's version and any other environment information. if not dispatcher and "mod_wsgi" in sys.modules: mod_wsgi = sys.modules["mod_wsgi"] if hasattr(mod_wsgi, "process_group"): @@ -169,6 +178,25 @@ def environment_settings(): if hasattr(uvicorn, "__version__"): dispatcher.append(("Dispatcher Version", uvicorn.__version__)) + if not dispatcher and "hypercorn" in sys.modules: + dispatcher.append(("Dispatcher", "hypercorn")) + hypercorn = sys.modules["hypercorn"] + + if hasattr(hypercorn, "__version__"): + dispatcher.append(("Dispatcher Version", hypercorn.__version__)) + else: + try: + dispatcher.append(("Dispatcher Version", get_version("hypercorn"))) + except Exception: + pass + + if not dispatcher and "daphne" in sys.modules: + dispatcher.append(("Dispatcher", "daphne")) + daphne = sys.modules["daphne"] + + if hasattr(daphne, "__version__"): + dispatcher.append(("Dispatcher Version", daphne.__version__)) + if not dispatcher and "tornado" in sys.modules: dispatcher.append(("Dispatcher", "tornado")) tornado = sys.modules["tornado"] @@ -178,6 +206,8 @@ def environment_settings(): env.extend(dispatcher) # Module information. + purelib = sysconfig.get_path("purelib") + platlib = sysconfig.get_path("platlib") plugins = [] @@ -187,31 +217,29 @@ def environment_settings(): # # TL;DR: Do NOT use an iterable on the original sys.modules to generate the # list - for name, module in sys.modules.copy().items(): + # Exclude lib.sub_paths as independent modules except for newrelic.hooks. + if "." in name and not name.startswith("newrelic.hooks."): + continue # If the module isn't actually loaded (such as failed relative imports # in Python 2.7), the module will be None and should not be reported. - if module is None: + if not module: + continue + # Exclude standard library/built-in modules. + # Third-party modules can be installed in either purelib or platlib directories. + # See https://docs.python.org/3/library/sysconfig.html#installation-paths. + if ( + not hasattr(module, "__file__") + or not module.__file__ + or not module.__file__.startswith(purelib) + or not module.__file__.startswith(platlib) + ): continue - if name.startswith("newrelic.hooks."): - plugins.append(name) - - elif name.find(".") == -1 and hasattr(module, "__file__"): - # XXX This is disabled as it can cause notable overhead in - # pathalogical cases. Will be replaced with a new system - # where have a allowlist of packages we really want version - # information for and will work out on case by case basis - # how to extract that from the modules themselves. - - # try: - # if 'pkg_resources' in sys.modules: - # version = pkg_resources.get_distribution(name).version - # if version: - # name = '%s (%s)' % (name, version) - # except Exception: - # pass - + try: + version = get_version(name) + plugins.append("%s (%s)" % (name, version)) + except Exception: plugins.append(name) env.append(("Plugin List", plugins)) diff --git a/newrelic/core/infinite_tracing_pb2.py b/newrelic/core/infinite_tracing_pb2.py index 987c96303..a0fa9dc54 100644 --- a/newrelic/core/infinite_tracing_pb2.py +++ b/newrelic/core/infinite_tracing_pb2.py @@ -13,374 +13,13 @@ # limitations under the License. try: - from google.protobuf import descriptor as _descriptor - from google.protobuf import message as _message - from google.protobuf import reflection as _reflection - from google.protobuf import symbol_database as _symbol_database - # @@protoc_insertion_point(imports) -except ImportError: - pass + from google.protobuf import __version__ + PROTOBUF_VERSION = tuple(int(v) for v in __version__.split(".")) +except Exception: + PROTOBUF_VERSION = (0, 0, 0) + +# Import appropriate generated pb2 file for protobuf version +if PROTOBUF_VERSION >= (4,): + from newrelic.core.infinite_tracing_v4_pb2 import * else: - _sym_db = _symbol_database.Default() - - - DESCRIPTOR = _descriptor.FileDescriptor( - name='infinite_tracing.proto', - package='com.newrelic.trace.v1', - syntax='proto3', - serialized_options=None, - serialized_pb=b'\n\x16infinite_tracing.proto\x12\x15\x63om.newrelic.trace.v1\"\x86\x04\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\t\x12?\n\nintrinsics\x18\x02 \x03(\x0b\x32+.com.newrelic.trace.v1.Span.IntrinsicsEntry\x12H\n\x0fuser_attributes\x18\x03 \x03(\x0b\x32/.com.newrelic.trace.v1.Span.UserAttributesEntry\x12J\n\x10\x61gent_attributes\x18\x04 \x03(\x0b\x32\x30.com.newrelic.trace.v1.Span.AgentAttributesEntry\x1aX\n\x0fIntrinsicsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a\\\n\x13UserAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a]\n\x14\x41gentAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\"t\n\x0e\x41ttributeValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x42\x07\n\x05value\"%\n\x0cRecordStatus\x12\x15\n\rmessages_seen\x18\x01 \x01(\x04\x32\x65\n\rIngestService\x12T\n\nRecordSpan\x12\x1b.com.newrelic.trace.v1.Span\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x62\x06proto3' - ) - - - - - _SPAN_INTRINSICSENTRY = _descriptor.Descriptor( - name='IntrinsicsEntry', - full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=291, - serialized_end=379, - ) - - _SPAN_USERATTRIBUTESENTRY = _descriptor.Descriptor( - name='UserAttributesEntry', - full_name='com.newrelic.trace.v1.Span.UserAttributesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='com.newrelic.trace.v1.Span.UserAttributesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='com.newrelic.trace.v1.Span.UserAttributesEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=381, - serialized_end=473, - ) - - _SPAN_AGENTATTRIBUTESENTRY = _descriptor.Descriptor( - name='AgentAttributesEntry', - full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=475, - serialized_end=568, - ) - - _SPAN = _descriptor.Descriptor( - name='Span', - full_name='com.newrelic.trace.v1.Span', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='trace_id', full_name='com.newrelic.trace.v1.Span.trace_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='intrinsics', full_name='com.newrelic.trace.v1.Span.intrinsics', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='user_attributes', full_name='com.newrelic.trace.v1.Span.user_attributes', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='agent_attributes', full_name='com.newrelic.trace.v1.Span.agent_attributes', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_SPAN_INTRINSICSENTRY, _SPAN_USERATTRIBUTESENTRY, _SPAN_AGENTATTRIBUTESENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=50, - serialized_end=568, - ) - - - _ATTRIBUTEVALUE = _descriptor.Descriptor( - name='AttributeValue', - full_name='com.newrelic.trace.v1.AttributeValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='string_value', full_name='com.newrelic.trace.v1.AttributeValue.string_value', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bool_value', full_name='com.newrelic.trace.v1.AttributeValue.bool_value', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='int_value', full_name='com.newrelic.trace.v1.AttributeValue.int_value', index=2, - number=3, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double_value', full_name='com.newrelic.trace.v1.AttributeValue.double_value', index=3, - number=4, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='value', full_name='com.newrelic.trace.v1.AttributeValue.value', - index=0, containing_type=None, fields=[]), - ], - serialized_start=570, - serialized_end=686, - ) - - - _RECORDSTATUS = _descriptor.Descriptor( - name='RecordStatus', - full_name='com.newrelic.trace.v1.RecordStatus', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='messages_seen', full_name='com.newrelic.trace.v1.RecordStatus.messages_seen', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=688, - serialized_end=725, - ) - - _SPAN_INTRINSICSENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE - _SPAN_INTRINSICSENTRY.containing_type = _SPAN - _SPAN_USERATTRIBUTESENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE - _SPAN_USERATTRIBUTESENTRY.containing_type = _SPAN - _SPAN_AGENTATTRIBUTESENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE - _SPAN_AGENTATTRIBUTESENTRY.containing_type = _SPAN - _SPAN.fields_by_name['intrinsics'].message_type = _SPAN_INTRINSICSENTRY - _SPAN.fields_by_name['user_attributes'].message_type = _SPAN_USERATTRIBUTESENTRY - _SPAN.fields_by_name['agent_attributes'].message_type = _SPAN_AGENTATTRIBUTESENTRY - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['string_value']) - _ATTRIBUTEVALUE.fields_by_name['string_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['bool_value']) - _ATTRIBUTEVALUE.fields_by_name['bool_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['int_value']) - _ATTRIBUTEVALUE.fields_by_name['int_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['double_value']) - _ATTRIBUTEVALUE.fields_by_name['double_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - DESCRIPTOR.message_types_by_name['Span'] = _SPAN - DESCRIPTOR.message_types_by_name['AttributeValue'] = _ATTRIBUTEVALUE - DESCRIPTOR.message_types_by_name['RecordStatus'] = _RECORDSTATUS - _sym_db.RegisterFileDescriptor(DESCRIPTOR) - - Span = _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), { - - 'IntrinsicsEntry' : _reflection.GeneratedProtocolMessageType('IntrinsicsEntry', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_INTRINSICSENTRY, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.IntrinsicsEntry) - }) - , - - 'UserAttributesEntry' : _reflection.GeneratedProtocolMessageType('UserAttributesEntry', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_USERATTRIBUTESENTRY, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.UserAttributesEntry) - }) - , - - 'AgentAttributesEntry' : _reflection.GeneratedProtocolMessageType('AgentAttributesEntry', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_AGENTATTRIBUTESENTRY, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.AgentAttributesEntry) - }) - , - 'DESCRIPTOR' : _SPAN, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span) - }) - _sym_db.RegisterMessage(Span) - _sym_db.RegisterMessage(Span.IntrinsicsEntry) - _sym_db.RegisterMessage(Span.UserAttributesEntry) - _sym_db.RegisterMessage(Span.AgentAttributesEntry) - - AttributeValue = _reflection.GeneratedProtocolMessageType('AttributeValue', (_message.Message,), { - 'DESCRIPTOR' : _ATTRIBUTEVALUE, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.AttributeValue) - }) - _sym_db.RegisterMessage(AttributeValue) - - RecordStatus = _reflection.GeneratedProtocolMessageType('RecordStatus', (_message.Message,), { - 'DESCRIPTOR' : _RECORDSTATUS, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.RecordStatus) - }) - _sym_db.RegisterMessage(RecordStatus) - - - _SPAN_INTRINSICSENTRY._options = None - _SPAN_USERATTRIBUTESENTRY._options = None - _SPAN_AGENTATTRIBUTESENTRY._options = None - - _INGESTSERVICE = _descriptor.ServiceDescriptor( - name='IngestService', - full_name='com.newrelic.trace.v1.IngestService', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=727, - serialized_end=828, - methods=[ - _descriptor.MethodDescriptor( - name='RecordSpan', - full_name='com.newrelic.trace.v1.IngestService.RecordSpan', - index=0, - containing_service=None, - input_type=_SPAN, - output_type=_RECORDSTATUS, - serialized_options=None, - ), - ]) - _sym_db.RegisterServiceDescriptor(_INGESTSERVICE) - - DESCRIPTOR.services_by_name['IngestService'] = _INGESTSERVICE - - # @@protoc_insertion_point(module_scope) - + from newrelic.core.infinite_tracing_v3_pb2 import * diff --git a/newrelic/core/infinite_tracing_v3_pb2.py b/newrelic/core/infinite_tracing_v3_pb2.py new file mode 100644 index 000000000..987c96303 --- /dev/null +++ b/newrelic/core/infinite_tracing_v3_pb2.py @@ -0,0 +1,386 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + from google.protobuf import descriptor as _descriptor + from google.protobuf import message as _message + from google.protobuf import reflection as _reflection + from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) +except ImportError: + pass +else: + _sym_db = _symbol_database.Default() + + + DESCRIPTOR = _descriptor.FileDescriptor( + name='infinite_tracing.proto', + package='com.newrelic.trace.v1', + syntax='proto3', + serialized_options=None, + serialized_pb=b'\n\x16infinite_tracing.proto\x12\x15\x63om.newrelic.trace.v1\"\x86\x04\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\t\x12?\n\nintrinsics\x18\x02 \x03(\x0b\x32+.com.newrelic.trace.v1.Span.IntrinsicsEntry\x12H\n\x0fuser_attributes\x18\x03 \x03(\x0b\x32/.com.newrelic.trace.v1.Span.UserAttributesEntry\x12J\n\x10\x61gent_attributes\x18\x04 \x03(\x0b\x32\x30.com.newrelic.trace.v1.Span.AgentAttributesEntry\x1aX\n\x0fIntrinsicsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a\\\n\x13UserAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a]\n\x14\x41gentAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\"t\n\x0e\x41ttributeValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x42\x07\n\x05value\"%\n\x0cRecordStatus\x12\x15\n\rmessages_seen\x18\x01 \x01(\x04\x32\x65\n\rIngestService\x12T\n\nRecordSpan\x12\x1b.com.newrelic.trace.v1.Span\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x62\x06proto3' + ) + + + + + _SPAN_INTRINSICSENTRY = _descriptor.Descriptor( + name='IntrinsicsEntry', + full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=291, + serialized_end=379, + ) + + _SPAN_USERATTRIBUTESENTRY = _descriptor.Descriptor( + name='UserAttributesEntry', + full_name='com.newrelic.trace.v1.Span.UserAttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='com.newrelic.trace.v1.Span.UserAttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='com.newrelic.trace.v1.Span.UserAttributesEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=381, + serialized_end=473, + ) + + _SPAN_AGENTATTRIBUTESENTRY = _descriptor.Descriptor( + name='AgentAttributesEntry', + full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=475, + serialized_end=568, + ) + + _SPAN = _descriptor.Descriptor( + name='Span', + full_name='com.newrelic.trace.v1.Span', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='trace_id', full_name='com.newrelic.trace.v1.Span.trace_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='intrinsics', full_name='com.newrelic.trace.v1.Span.intrinsics', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='user_attributes', full_name='com.newrelic.trace.v1.Span.user_attributes', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='agent_attributes', full_name='com.newrelic.trace.v1.Span.agent_attributes', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_SPAN_INTRINSICSENTRY, _SPAN_USERATTRIBUTESENTRY, _SPAN_AGENTATTRIBUTESENTRY, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=50, + serialized_end=568, + ) + + + _ATTRIBUTEVALUE = _descriptor.Descriptor( + name='AttributeValue', + full_name='com.newrelic.trace.v1.AttributeValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='string_value', full_name='com.newrelic.trace.v1.AttributeValue.string_value', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bool_value', full_name='com.newrelic.trace.v1.AttributeValue.bool_value', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='int_value', full_name='com.newrelic.trace.v1.AttributeValue.int_value', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='double_value', full_name='com.newrelic.trace.v1.AttributeValue.double_value', index=3, + number=4, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='value', full_name='com.newrelic.trace.v1.AttributeValue.value', + index=0, containing_type=None, fields=[]), + ], + serialized_start=570, + serialized_end=686, + ) + + + _RECORDSTATUS = _descriptor.Descriptor( + name='RecordStatus', + full_name='com.newrelic.trace.v1.RecordStatus', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='messages_seen', full_name='com.newrelic.trace.v1.RecordStatus.messages_seen', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=688, + serialized_end=725, + ) + + _SPAN_INTRINSICSENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE + _SPAN_INTRINSICSENTRY.containing_type = _SPAN + _SPAN_USERATTRIBUTESENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE + _SPAN_USERATTRIBUTESENTRY.containing_type = _SPAN + _SPAN_AGENTATTRIBUTESENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE + _SPAN_AGENTATTRIBUTESENTRY.containing_type = _SPAN + _SPAN.fields_by_name['intrinsics'].message_type = _SPAN_INTRINSICSENTRY + _SPAN.fields_by_name['user_attributes'].message_type = _SPAN_USERATTRIBUTESENTRY + _SPAN.fields_by_name['agent_attributes'].message_type = _SPAN_AGENTATTRIBUTESENTRY + _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( + _ATTRIBUTEVALUE.fields_by_name['string_value']) + _ATTRIBUTEVALUE.fields_by_name['string_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] + _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( + _ATTRIBUTEVALUE.fields_by_name['bool_value']) + _ATTRIBUTEVALUE.fields_by_name['bool_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] + _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( + _ATTRIBUTEVALUE.fields_by_name['int_value']) + _ATTRIBUTEVALUE.fields_by_name['int_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] + _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( + _ATTRIBUTEVALUE.fields_by_name['double_value']) + _ATTRIBUTEVALUE.fields_by_name['double_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] + DESCRIPTOR.message_types_by_name['Span'] = _SPAN + DESCRIPTOR.message_types_by_name['AttributeValue'] = _ATTRIBUTEVALUE + DESCRIPTOR.message_types_by_name['RecordStatus'] = _RECORDSTATUS + _sym_db.RegisterFileDescriptor(DESCRIPTOR) + + Span = _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), { + + 'IntrinsicsEntry' : _reflection.GeneratedProtocolMessageType('IntrinsicsEntry', (_message.Message,), { + 'DESCRIPTOR' : _SPAN_INTRINSICSENTRY, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.IntrinsicsEntry) + }) + , + + 'UserAttributesEntry' : _reflection.GeneratedProtocolMessageType('UserAttributesEntry', (_message.Message,), { + 'DESCRIPTOR' : _SPAN_USERATTRIBUTESENTRY, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.UserAttributesEntry) + }) + , + + 'AgentAttributesEntry' : _reflection.GeneratedProtocolMessageType('AgentAttributesEntry', (_message.Message,), { + 'DESCRIPTOR' : _SPAN_AGENTATTRIBUTESENTRY, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.AgentAttributesEntry) + }) + , + 'DESCRIPTOR' : _SPAN, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span) + }) + _sym_db.RegisterMessage(Span) + _sym_db.RegisterMessage(Span.IntrinsicsEntry) + _sym_db.RegisterMessage(Span.UserAttributesEntry) + _sym_db.RegisterMessage(Span.AgentAttributesEntry) + + AttributeValue = _reflection.GeneratedProtocolMessageType('AttributeValue', (_message.Message,), { + 'DESCRIPTOR' : _ATTRIBUTEVALUE, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.AttributeValue) + }) + _sym_db.RegisterMessage(AttributeValue) + + RecordStatus = _reflection.GeneratedProtocolMessageType('RecordStatus', (_message.Message,), { + 'DESCRIPTOR' : _RECORDSTATUS, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.RecordStatus) + }) + _sym_db.RegisterMessage(RecordStatus) + + + _SPAN_INTRINSICSENTRY._options = None + _SPAN_USERATTRIBUTESENTRY._options = None + _SPAN_AGENTATTRIBUTESENTRY._options = None + + _INGESTSERVICE = _descriptor.ServiceDescriptor( + name='IngestService', + full_name='com.newrelic.trace.v1.IngestService', + file=DESCRIPTOR, + index=0, + serialized_options=None, + serialized_start=727, + serialized_end=828, + methods=[ + _descriptor.MethodDescriptor( + name='RecordSpan', + full_name='com.newrelic.trace.v1.IngestService.RecordSpan', + index=0, + containing_service=None, + input_type=_SPAN, + output_type=_RECORDSTATUS, + serialized_options=None, + ), + ]) + _sym_db.RegisterServiceDescriptor(_INGESTSERVICE) + + DESCRIPTOR.services_by_name['IngestService'] = _INGESTSERVICE + + # @@protoc_insertion_point(module_scope) + diff --git a/newrelic/core/infinite_tracing_v4_pb2.py b/newrelic/core/infinite_tracing_v4_pb2.py new file mode 100644 index 000000000..ae1739670 --- /dev/null +++ b/newrelic/core/infinite_tracing_v4_pb2.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- + +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: v1.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x08v1.proto\x12\x15\x63om.newrelic.trace.v1\"7\n\tSpanBatch\x12*\n\x05spans\x18\x01 \x03(\x0b\x32\x1b.com.newrelic.trace.v1.Span\"\x86\x04\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\t\x12?\n\nintrinsics\x18\x02 \x03(\x0b\x32+.com.newrelic.trace.v1.Span.IntrinsicsEntry\x12H\n\x0fuser_attributes\x18\x03 \x03(\x0b\x32/.com.newrelic.trace.v1.Span.UserAttributesEntry\x12J\n\x10\x61gent_attributes\x18\x04 \x03(\x0b\x32\x30.com.newrelic.trace.v1.Span.AgentAttributesEntry\x1aX\n\x0fIntrinsicsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a\\\n\x13UserAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a]\n\x14\x41gentAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\"t\n\x0e\x41ttributeValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x42\x07\n\x05value\"%\n\x0cRecordStatus\x12\x15\n\rmessages_seen\x18\x01 \x01(\x04\x32\xc5\x01\n\rIngestService\x12T\n\nRecordSpan\x12\x1b.com.newrelic.trace.v1.Span\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x12^\n\x0fRecordSpanBatch\x12 .com.newrelic.trace.v1.SpanBatch\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x62\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'v1_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _SPAN_INTRINSICSENTRY._options = None + _SPAN_INTRINSICSENTRY._serialized_options = b'8\001' + _SPAN_USERATTRIBUTESENTRY._options = None + _SPAN_USERATTRIBUTESENTRY._serialized_options = b'8\001' + _SPAN_AGENTATTRIBUTESENTRY._options = None + _SPAN_AGENTATTRIBUTESENTRY._serialized_options = b'8\001' + _SPANBATCH._serialized_start=35 + _SPANBATCH._serialized_end=90 + _SPAN._serialized_start=93 + _SPAN._serialized_end=611 + _SPAN_INTRINSICSENTRY._serialized_start=334 + _SPAN_INTRINSICSENTRY._serialized_end=422 + _SPAN_USERATTRIBUTESENTRY._serialized_start=424 + _SPAN_USERATTRIBUTESENTRY._serialized_end=516 + _SPAN_AGENTATTRIBUTESENTRY._serialized_start=518 + _SPAN_AGENTATTRIBUTESENTRY._serialized_end=611 + _ATTRIBUTEVALUE._serialized_start=613 + _ATTRIBUTEVALUE._serialized_end=729 + _RECORDSTATUS._serialized_start=731 + _RECORDSTATUS._serialized_end=768 + _INGESTSERVICE._serialized_start=771 + _INGESTSERVICE._serialized_end=968 +# @@protoc_insertion_point(module_scope) diff --git a/newrelic/core/message_node.py b/newrelic/core/message_node.py index 8c0a334b7..02e431eb3 100644 --- a/newrelic/core/message_node.py +++ b/newrelic/core/message_node.py @@ -51,6 +51,12 @@ def time_metrics(self, stats, root, parent): yield TimeMetric(name=name, scope=root.path, duration=self.duration, exclusive=self.exclusive) + # Now for the children, if the trace is not terminal. + + for child in self.children: + for metric in child.time_metrics(stats, root, self): + yield metric + def trace_node(self, stats, root, connections): name = root.string_table.cache(self.name) diff --git a/newrelic/core/trace_cache.py b/newrelic/core/trace_cache.py index 4a087c4fb..1634d0d0b 100644 --- a/newrelic/core/trace_cache.py +++ b/newrelic/core/trace_cache.py @@ -197,7 +197,7 @@ def active_threads(self): debug = global_settings().debug if debug.enable_coroutine_profiling: - for thread_id, trace in self._cache.items(): + for thread_id, trace in list(self._cache.items()): transaction = trace.transaction if transaction and transaction._greenlet is not None: gr = transaction._greenlet() @@ -359,7 +359,7 @@ def record_event_loop_wait(self, start_time, end_time): task = getattr(transaction.root_span, "_task", None) loop = get_event_loop(task) - for trace in self._cache.values(): + for trace in list(self._cache.values()): if trace in seen: continue diff --git a/newrelic/hooks/adapter_daphne.py b/newrelic/hooks/adapter_daphne.py new file mode 100644 index 000000000..430d9c4b3 --- /dev/null +++ b/newrelic/hooks/adapter_daphne.py @@ -0,0 +1,33 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.api.asgi_application import ASGIApplicationWrapper + + +@property +def application(self): + return getattr(self, "_nr_application", vars(self).get("application", None)) + + +@application.setter +def application(self, value): + # Wrap app only once + if value and not getattr(value, "_nr_wrapped", False): + value = ASGIApplicationWrapper(value) + value._nr_wrapped = True + self._nr_application = value + + +def instrument_daphne_server(module): + module.Server.application = application diff --git a/newrelic/hooks/adapter_hypercorn.py b/newrelic/hooks/adapter_hypercorn.py new file mode 100644 index 000000000..f22dc74f1 --- /dev/null +++ b/newrelic/hooks/adapter_hypercorn.py @@ -0,0 +1,79 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.api.asgi_application import ASGIApplicationWrapper +from newrelic.api.wsgi_application import WSGIApplicationWrapper +from newrelic.common.object_wrapper import wrap_function_wrapper + + +def bind_worker_serve(app, *args, **kwargs): + return app, args, kwargs + + +async def wrap_worker_serve(wrapped, instance, args, kwargs): + import hypercorn + + wrapper_module = getattr(hypercorn, "app_wrappers", None) + asgi_wrapper_class = getattr(wrapper_module, "ASGIWrapper", None) + wsgi_wrapper_class = getattr(wrapper_module, "WSGIWrapper", None) + + app, args, kwargs = bind_worker_serve(*args, **kwargs) + + # Hypercorn 0.14.1 introduced wrappers for ASGI and WSGI apps that need to be above our instrumentation. + if asgi_wrapper_class is not None and isinstance(app, asgi_wrapper_class): + app.app = ASGIApplicationWrapper(app.app) + elif wsgi_wrapper_class is not None and isinstance(app, wsgi_wrapper_class): + app.app = WSGIApplicationWrapper(app.app) + else: + app = ASGIApplicationWrapper(app) + + app._nr_wrapped = True + return await wrapped(app, *args, **kwargs) + + +def bind_is_asgi(app): + return app + + +def wrap_is_asgi(wrapped, instance, args, kwargs): + # Wrapper is identical and reused for the functions is_asgi and _is_asgi_2. + app = bind_is_asgi(*args, **kwargs) + + # Unwrap apps wrapped by our instrumentation. + # ASGI 2/3 detection for hypercorn is unable to process + # our wrappers and will return incorrect results. This + # should be sufficient to allow hypercorn to run detection + # on an application that was not wrapped by this instrumentation. + while getattr(app, "_nr_wrapped", False): + app = app.__wrapped__ + + return wrapped(app) + + +def instrument_hypercorn_asyncio_run(module): + if hasattr(module, "worker_serve"): + wrap_function_wrapper(module, "worker_serve", wrap_worker_serve) + + +def instrument_hypercorn_trio_run(module): + if hasattr(module, "worker_serve"): + wrap_function_wrapper(module, "worker_serve", wrap_worker_serve) + + +def instrument_hypercorn_utils(module): + if hasattr(module, "_is_asgi_2"): + wrap_function_wrapper(module, "_is_asgi_2", wrap_is_asgi) + + if hasattr(module, "is_asgi"): + wrap_function_wrapper(module, "is_asgi", wrap_is_asgi) diff --git a/newrelic/hooks/datastore_aioredis.py b/newrelic/hooks/datastore_aioredis.py index 13f08fd19..a2267960c 100644 --- a/newrelic/hooks/datastore_aioredis.py +++ b/newrelic/hooks/datastore_aioredis.py @@ -12,16 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.api.datastore_trace import DatastoreTrace +from newrelic.api.datastore_trace import DatastoreTrace, DatastoreTraceWrapper from newrelic.api.time_trace import current_trace from newrelic.api.transaction import current_transaction -from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.common.object_wrapper import wrap_function_wrapper, function_wrapper, FunctionWrapper from newrelic.hooks.datastore_redis import ( _redis_client_methods, _redis_multipart_commands, _redis_operation_re, ) +from newrelic.common.async_wrapper import async_wrapper + +import aioredis + +try: + AIOREDIS_VERSION = tuple(int(x) for x in getattr(aioredis, "__version__").split(".")) +except Exception: + AIOREDIS_VERSION = (0, 0, 0) + def _conn_attrs_to_dict(connection): host = getattr(connection, "host", None) @@ -45,13 +54,36 @@ def _instance_info(kwargs): def _wrap_AioRedis_method_wrapper(module, instance_class_name, operation): - async def _nr_wrapper_AioRedis_method_(wrapped, instance, args, kwargs): + + @function_wrapper + async def _nr_wrapper_AioRedis_async_method_(wrapped, instance, args, kwargs): transaction = current_transaction() if transaction is None: return await wrapped(*args, **kwargs) with DatastoreTrace(product="Redis", target=None, operation=operation): return await wrapped(*args, **kwargs) + + def _nr_wrapper_AioRedis_method_(wrapped, instance, args, kwargs): + # Check for transaction and return early if found. + # Method will return synchronously without executing, + # it will be added to the command stack and run later. + if AIOREDIS_VERSION < (2,): + # AioRedis v1 uses a RedisBuffer instead of a real connection for queueing up pipeline commands + from aioredis.commands.transaction import _RedisBuffer + if isinstance(instance._pool_or_conn, _RedisBuffer): + # Method will return synchronously without executing, + # it will be added to the command stack and run later. + return wrapped(*args, **kwargs) + else: + # AioRedis v2 uses a Pipeline object for a client and internally queues up pipeline commands + from aioredis.client import Pipeline + if isinstance(instance, Pipeline): + return wrapped(*args, **kwargs) + + # Method should be run when awaited, therefore we wrap in an async wrapper. + return _nr_wrapper_AioRedis_async_method_(wrapped)(*args, **kwargs) + name = "%s.%s" % (instance_class_name, operation) wrap_function_wrapper(module, name, _nr_wrapper_AioRedis_method_) @@ -108,6 +140,58 @@ async def wrap_Connection_send_command(wrapped, instance, args, kwargs): return await wrapped(*args, **kwargs) +def wrap_RedisConnection_execute(wrapped, instance, args, kwargs): + # RedisConnection in aioredis v1 returns a future instead of using coroutines + transaction = current_transaction() + if not transaction: + return wrapped(*args, **kwargs) + + host, port_path_or_id, db = (None, None, None) + + try: + dt = transaction.settings.datastore_tracer + if dt.instance_reporting.enabled or dt.database_name_reporting.enabled: + conn_kwargs = _conn_attrs_to_dict(instance) + host, port_path_or_id, db = _instance_info(conn_kwargs) + except Exception: + pass + + # Older Redis clients would when sending multi part commands pass + # them in as separate arguments to send_command(). Need to therefore + # detect those and grab the next argument from the set of arguments. + + operation = args[0].strip().lower() + + # If it's not a multi part command, there's no need to trace it, so + # we can return early. + + if operation.split()[0] not in _redis_multipart_commands: # Set the datastore info on the DatastoreTrace containing this function call. + trace = current_trace() + + # Find DatastoreTrace no matter how many other traces are inbetween + while trace is not None and not isinstance(trace, DatastoreTrace): + trace = getattr(trace, "parent", None) + + if trace is not None: + trace.host = host + trace.port_path_or_id = port_path_or_id + trace.database_name = db + + return wrapped(*args, **kwargs) + + # Convert multi args to single arg string + + if operation in _redis_multipart_commands and len(args) > 1: + operation = "%s %s" % (operation, args[1].strip().lower()) + + operation = _redis_operation_re.sub("_", operation) + + with DatastoreTrace( + product="Redis", target=None, operation=operation, host=host, port_path_or_id=port_path_or_id, database_name=db + ): + return wrapped(*args, **kwargs) + + def instrument_aioredis_client(module): # StrictRedis is just an alias of Redis, no need to wrap it as well. if hasattr(module, "Redis"): @@ -124,4 +208,4 @@ def instrument_aioredis_connection(module): if hasattr(module, "RedisConnection"): if hasattr(module.RedisConnection, "execute"): - wrap_function_wrapper(module, "RedisConnection.execute", wrap_Connection_send_command) \ No newline at end of file + wrap_function_wrapper(module, "RedisConnection.execute", wrap_RedisConnection_execute) diff --git a/newrelic/hooks/framework_sanic.py b/newrelic/hooks/framework_sanic.py index aabeb9b57..745cdbf70 100644 --- a/newrelic/hooks/framework_sanic.py +++ b/newrelic/hooks/framework_sanic.py @@ -15,13 +15,12 @@ import sys from inspect import isawaitable -from newrelic.api.web_transaction import web_transaction -from newrelic.api.transaction import current_transaction -from newrelic.api.function_trace import function_trace, FunctionTrace +from newrelic.api.function_trace import FunctionTrace, function_trace from newrelic.api.time_trace import notice_error -from newrelic.common.object_wrapper import (wrap_function_wrapper, - function_wrapper) +from newrelic.api.transaction import current_transaction +from newrelic.api.web_transaction import web_transaction from newrelic.common.object_names import callable_name +from newrelic.common.object_wrapper import function_wrapper, wrap_function_wrapper def _bind_add(uri, methods, handler, *args, **kwargs): @@ -36,19 +35,20 @@ def _nr_wrapper_handler_(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) name = callable_name(wrapped) - view_class = getattr(wrapped, 'view_class', None) + view_class = getattr(wrapped, "view_class", None) view = view_class or wrapped if view_class: try: method = args[0].method.lower() - name = callable_name(view_class) + '.' + method + name = callable_name(view_class) + "." + method view = getattr(view_class, method) except: pass - + transaction.set_transaction_name(name, priority=3) import sanic - transaction.add_framework_info(name='Sanic', version=sanic.__version__) + + transaction.add_framework_info(name="Sanic", version=sanic.__version__) with FunctionTrace(name=name, source=view): return wrapped(*args, **kwargs) @@ -60,7 +60,7 @@ def _nr_sanic_router_add(wrapped, instance, args, kwargs): # Cache the callable_name on the handler object callable_name(handler) - if hasattr(wrapped, 'view_class'): + if hasattr(wrapped, "view_class"): callable_name(wrapped.view_class) wrapped_handler = _nr_wrapper_handler_(handler) @@ -131,7 +131,7 @@ def error_response(wrapped, instance, args, kwargs): raise else: # response can be a response object or a coroutine - if hasattr(response, 'status'): + if hasattr(response, "status"): notice_error(error=exc_info, status_code=response.status) else: notice_error(exc_info) @@ -144,18 +144,16 @@ def error_response(wrapped, instance, args, kwargs): def _sanic_app_init(wrapped, instance, args, kwargs): result = wrapped(*args, **kwargs) - error_handler = getattr(instance, 'error_handler') - if hasattr(error_handler, 'response'): - instance.error_handler.response = error_response( - error_handler.response) - if hasattr(error_handler, 'add'): - error_handler.add = _nr_sanic_error_handlers( - error_handler.add) + error_handler = getattr(instance, "error_handler") + if hasattr(error_handler, "response"): + instance.error_handler.response = error_response(error_handler.response) + if hasattr(error_handler, "add"): + error_handler.add = _nr_sanic_error_handlers(error_handler.add) - router = getattr(instance, 'router') - if hasattr(router, 'add'): + router = getattr(instance, "router") + if hasattr(router, "add"): router.add = _nr_sanic_router_add(router.add) - if hasattr(router, 'get'): + if hasattr(router, "get"): # Cache the callable_name on the router.get callable_name(router.get) router.get = _nr_sanic_router_get(router.get) @@ -172,8 +170,7 @@ def _nr_sanic_response_get_headers(wrapped, instance, args, kwargs): return result # instance is the response object - cat_headers = transaction.process_response(str(instance.status), - instance.headers.items()) + cat_headers = transaction.process_response(str(instance.status), instance.headers.items()) for header_name, header_value in cat_headers: if header_name not in instance.headers: @@ -189,11 +186,10 @@ async def _nr_sanic_response_send(wrapped, instance, args, kwargs): await result if transaction is None: - return wrapped(*args, **kwargs) + return result # instance is the response object - cat_headers = transaction.process_response(str(instance.status), - instance.headers.items()) + cat_headers = transaction.process_response(str(instance.status), instance.headers.items()) for header_name, header_value in cat_headers: if header_name not in instance.headers: @@ -201,6 +197,7 @@ async def _nr_sanic_response_send(wrapped, instance, args, kwargs): return result + def _nr_sanic_response_parse_headers(wrapped, instance, args, kwargs): transaction = current_transaction() @@ -208,8 +205,7 @@ def _nr_sanic_response_parse_headers(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) # instance is the response object - cat_headers = transaction.process_response(str(instance.status), - instance.headers.items()) + cat_headers = transaction.process_response(str(instance.status), instance.headers.items()) for header_name, header_value in cat_headers: if header_name not in instance.headers: @@ -219,7 +215,7 @@ def _nr_sanic_response_parse_headers(wrapped, instance, args, kwargs): def _nr_wrapper_middleware_(attach_to): - is_request_middleware = attach_to == 'request' + is_request_middleware = attach_to == "request" @function_wrapper def _wrapper(wrapped, instance, args, kwargs): @@ -238,7 +234,7 @@ def _wrapper(wrapped, instance, args, kwargs): return _wrapper -def _bind_middleware(middleware, attach_to='request', *args, **kwargs): +def _bind_middleware(middleware, attach_to="request", *args, **kwargs): return middleware, attach_to @@ -247,7 +243,12 @@ def _nr_sanic_register_middleware_(wrapped, instance, args, kwargs): # Cache the callable_name on the middleware object callable_name(middleware) - wrapped_middleware = _nr_wrapper_middleware_(attach_to)(middleware) + middleware_func = middleware + if hasattr(middleware, "func"): + name = callable_name(middleware.func) + middleware_func = middleware.func + + wrapped_middleware = _nr_wrapper_middleware_(attach_to)(middleware_func) wrapped(wrapped_middleware, attach_to) return middleware @@ -259,36 +260,55 @@ def _bind_request(request, *args, **kwargs): def _nr_sanic_transaction_wrapper_(wrapped, instance, args, kwargs): request = _bind_request(*args, **kwargs) # If the request is a websocket request do not wrap it - if request.headers.get('upgrade', '').lower() == 'websocket': + if request.headers.get("upgrade", "").lower() == "websocket": return wrapped(*args, **kwargs) return web_transaction( request_method=request.method, request_path=request.path, query_string=request.query_string, - headers=request.headers)(wrapped)(*args, **kwargs) + headers=request.headers, + )(wrapped)(*args, **kwargs) + + +def _nr_wrap_touchup_run(wrapped, instance, args, kwargs): + # TouchUp uses metaprogramming to rewrite methods of classes on startup. + # To properly wrap them we need to catch the call to TouchUp.run and + # reinstrument any methods that were replaced with uninstrumented versions. + + result = wrapped(*args, **kwargs) + + if "sanic.app" in sys.modules: + module = sys.modules["sanic.app"] + target = args[0] + + if isinstance(target, module.Sanic): + # Reinstrument class after metaclass "TouchUp" has finished rewriting methods on the class. + target_cls = module.Sanic + if hasattr(target_cls, "handle_request") and not hasattr(target_cls.handle_request, "__wrapped__"): + wrap_function_wrapper(module, "Sanic.handle_request", _nr_sanic_transaction_wrapper_) + + return result def instrument_sanic_app(module): - wrap_function_wrapper(module, 'Sanic.handle_request', - _nr_sanic_transaction_wrapper_) - wrap_function_wrapper(module, 'Sanic.__init__', - _sanic_app_init) - wrap_function_wrapper(module, 'Sanic.register_middleware', - _nr_sanic_register_middleware_) - if hasattr(module.Sanic, 'register_named_middleware'): - wrap_function_wrapper(module, 'Sanic.register_named_middleware', - _nr_sanic_register_middleware_) + wrap_function_wrapper(module, "Sanic.handle_request", _nr_sanic_transaction_wrapper_) + wrap_function_wrapper(module, "Sanic.__init__", _sanic_app_init) + wrap_function_wrapper(module, "Sanic.register_middleware", _nr_sanic_register_middleware_) + if hasattr(module.Sanic, "register_named_middleware"): + wrap_function_wrapper(module, "Sanic.register_named_middleware", _nr_sanic_register_middleware_) def instrument_sanic_response(module): - if hasattr(module.BaseHTTPResponse, 'send'): - wrap_function_wrapper(module, 'BaseHTTPResponse.send', - _nr_sanic_response_send) + if hasattr(module.BaseHTTPResponse, "send"): + wrap_function_wrapper(module, "BaseHTTPResponse.send", _nr_sanic_response_send) else: - if hasattr(module.BaseHTTPResponse, 'get_headers'): - wrap_function_wrapper(module, 'BaseHTTPResponse.get_headers', - _nr_sanic_response_get_headers) - if hasattr(module.BaseHTTPResponse, '_parse_headers'): - wrap_function_wrapper(module, 'BaseHTTPResponse._parse_headers', - _nr_sanic_response_parse_headers) + if hasattr(module.BaseHTTPResponse, "get_headers"): + wrap_function_wrapper(module, "BaseHTTPResponse.get_headers", _nr_sanic_response_get_headers) + if hasattr(module.BaseHTTPResponse, "_parse_headers"): + wrap_function_wrapper(module, "BaseHTTPResponse._parse_headers", _nr_sanic_response_parse_headers) + + +def instrument_sanic_touchup_service(module): + if hasattr(module, "TouchUp") and hasattr(module.TouchUp, "run"): + wrap_function_wrapper(module.TouchUp, "run", _nr_wrap_touchup_run) diff --git a/newrelic/hooks/logger_loguru.py b/newrelic/hooks/logger_loguru.py index 65eadb1c4..801a1c8cd 100644 --- a/newrelic/hooks/logger_loguru.py +++ b/newrelic/hooks/logger_loguru.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import sys from newrelic.api.application import application_instance from newrelic.api.transaction import current_transaction, record_log_event @@ -22,6 +23,7 @@ from newrelic.packages import six _logger = logging.getLogger(__name__) +is_pypy = hasattr(sys, "pypy_version_info") def loguru_version(): from loguru import __version__ @@ -71,6 +73,16 @@ def wrap_log(wrapped, instance, args, kwargs): try: level_id, static_level_no, from_decorator, options, message, subargs, subkwargs = bind_log(*args, **kwargs) options[-2] = nr_log_patcher(options[-2]) + # Loguru looks into the stack trace to find the caller's module and function names. + # options[1] tells loguru how far up to look in the stack trace to find the caller. + # Because wrap_log is an extra call in the stack trace, loguru needs to look 1 level higher. + if not is_pypy: + options[1] += 1 + else: + # PyPy inspection requires an additional frame of offset, as the wrapt internals seem to + # add another frame on PyPy but not on CPython. + options[1] += 2 + except Exception as e: _logger.debug("Exception in loguru handling: %s" % str(e)) return wrapped(*args, **kwargs) diff --git a/newrelic/hooks/messagebroker_confluentkafka.py b/newrelic/hooks/messagebroker_confluentkafka.py new file mode 100644 index 000000000..965fd765b --- /dev/null +++ b/newrelic/hooks/messagebroker_confluentkafka.py @@ -0,0 +1,242 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +import sys + +from newrelic.api.application import application_instance +from newrelic.api.error_trace import wrap_error_trace +from newrelic.api.function_trace import FunctionTraceWrapper +from newrelic.api.message_trace import MessageTrace +from newrelic.api.message_transaction import MessageTransaction +from newrelic.api.time_trace import notice_error +from newrelic.api.transaction import current_transaction +from newrelic.common.object_wrapper import function_wrapper, wrap_function_wrapper + +_logger = logging.getLogger(__name__) + +HEARTBEAT_POLL = "MessageBroker/Kafka/Heartbeat/Poll" +HEARTBEAT_SENT = "MessageBroker/Kafka/Heartbeat/Sent" +HEARTBEAT_FAIL = "MessageBroker/Kafka/Heartbeat/Fail" +HEARTBEAT_RECEIVE = "MessageBroker/Kafka/Heartbeat/Receive" +HEARTBEAT_SESSION_TIMEOUT = "MessageBroker/Kafka/Heartbeat/SessionTimeout" +HEARTBEAT_POLL_TIMEOUT = "MessageBroker/Kafka/Heartbeat/PollTimeout" + + +def wrap_Producer_produce(wrapped, instance, args, kwargs): + transaction = current_transaction() + if transaction is None: + return wrapped(*args, **kwargs) + + # Binding with a standard function signature does not work properly due to a bug in handling arguments + # in the underlying C code, where callback=None being specified causes on_delivery=callback to never run. + + # Bind out headers from end of args list + if len(args) == 8: + # Take headers off the end of the positional args + headers = args[7] + args = args[0:7] + else: + headers = kwargs.pop("headers", []) + + # Bind topic off of the beginning of the args list + if len(args) >= 1: + topic = args[0] + args = args[1:] + else: + topic = kwargs.get("topic", None) + + with MessageTrace( + library="Kafka", + operation="Produce", + destination_type="Topic", + destination_name=topic or "Default", + source=wrapped, + ) as trace: + dt_headers = {k: v.encode("utf-8") for k, v in trace.generate_request_headers(transaction)} + # headers can be a list of tuples or a dict so convert to dict for consistency. + dt_headers.update(dict(headers) if headers else {}) + try: + return wrapped(topic, headers=dt_headers, *args, **kwargs) + except Exception as error: + # Unwrap kafka errors + while hasattr(error, "exception"): + error = error.exception # pylint: disable=E1101 + + _, _, tb = sys.exc_info() + notice_error((type(error), error, tb)) + tb = None # Clear reference to prevent reference cycles + raise + + +def wrap_Consumer_poll(wrapped, instance, args, kwargs): + # This wrapper can be called either outside of a transaction, or + # within the context of an existing transaction. There are 4 + # possibilities we need to handle: (Note that this is similar to + # our Pika, Celery, and Kafka-Python instrumentation) + # + # 1. Inside an inner wrapper in the DeserializingConsumer + # + # Do nothing. The DeserializingConsumer is double wrapped because + # the underlying C implementation is wrapped as well. We need to + # detect when the second wrapper is called and ignore it completely + # or transactions will be stopped early. + # + # 2. In an inactive transaction + # + # If the end_of_transaction() or ignore_transaction() API + # calls have been invoked, this iterator may be called in the + # context of an inactive transaction. In this case, don't wrap + # the iterator in any way. Just run the original iterator. + # + # 3. In an active transaction + # + # Do nothing. + # + # 4. Outside of a transaction + # + # Since it's not running inside of an existing transaction, we + # want to create a new background transaction for it. + + # Step 1: Stop existing transactions + if hasattr(instance, "_nr_transaction") and not instance._nr_transaction.stopped: + instance._nr_transaction.__exit__(*sys.exc_info()) + + # Step 2: Poll for records + try: + record = wrapped(*args, **kwargs) + except Exception as e: + if current_transaction(): + notice_error() + else: + notice_error(application=application_instance(activate=False)) + raise + + # Step 3: Start new transaction for received record + if record: + library = "Kafka" + destination_type = "Topic" + destination_name = record.topic() + received_bytes = len(str(record.value()).encode("utf-8")) + message_count = 1 + + headers = record.headers() + headers = dict(headers) if headers else {} + + transaction = current_transaction(active_only=False) + if not transaction: + transaction = MessageTransaction( + application=application_instance(), + library=library, + destination_type=destination_type, + destination_name=destination_name, + headers=headers, + transport_type="Kafka", + routing_key=record.key(), + source=wrapped, + ) + instance._nr_transaction = transaction + transaction.__enter__() # pylint: disable=C2801 + + transaction._add_agent_attribute("kafka.consume.byteCount", received_bytes) + + transaction = current_transaction() + + if transaction: # If there is an active transaction now. + # Add metrics whether or not a transaction was already active, or one was just started. + # Don't add metrics if there was an inactive transaction. + # Name the metrics using the same format as the transaction, but in case the active transaction + # was an existing one and not a message transaction, reproduce the naming logic here. + group = "Message/%s/%s" % (library, destination_type) + name = "Named/%s" % destination_name + transaction.record_custom_metric("%s/%s/Received/Bytes" % (group, name), received_bytes) + transaction.record_custom_metric("%s/%s/Received/Messages" % (group, name), message_count) + + return record + + +def wrap_DeserializingConsumer_poll(wrapped, instance, args, kwargs): + try: + return wrapped(*args, **kwargs) + except Exception: + notice_error() + + # Stop existing transactions + if hasattr(instance, "_nr_transaction") and not instance._nr_transaction.stopped: + instance._nr_transaction.__exit__(*sys.exc_info()) + + raise + + +def wrap_serializer(serializer_name, group_prefix): + @function_wrapper + def _wrap_serializer(wrapped, instance, args, kwargs): + if not current_transaction(): + return wrapped(*args, **kwargs) + + topic = args[1].topic + group = "%s/Kafka/Topic" % group_prefix + name = "Named/%s/%s" % (topic, serializer_name) + + return FunctionTraceWrapper(wrapped, name=name, group=group)(*args, **kwargs) + + return _wrap_serializer + + +def wrap_SerializingProducer_init(wrapped, instance, args, kwargs): + wrapped(*args, **kwargs) + + if hasattr(instance, "_key_serializer") and callable(instance._key_serializer): + instance._key_serializer = wrap_serializer("Serialization/Key", "MessageBroker")(instance._key_serializer) + + if hasattr(instance, "_value_serializer") and callable(instance._value_serializer): + instance._value_serializer = wrap_serializer("Serialization/Value", "MessageBroker")(instance._value_serializer) + + +def wrap_DeserializingConsumer_init(wrapped, instance, args, kwargs): + wrapped(*args, **kwargs) + + if hasattr(instance, "_key_deserializer") and callable(instance._key_deserializer): + instance._key_deserializer = wrap_serializer("Deserialization/Key", "Message")(instance._key_deserializer) + + if hasattr(instance, "_value_deserializer") and callable(instance._value_deserializer): + instance._value_deserializer = wrap_serializer("Deserialization/Value", "Message")(instance._value_deserializer) + + +def wrap_immutable_class(module, class_name): + # Wrap immutable binary extension class with a mutable Python subclass + new_class = type(class_name, (getattr(module, class_name),), {}) + setattr(module, class_name, new_class) + return new_class + + +def instrument_confluentkafka_cimpl(module): + if hasattr(module, "Producer"): + wrap_immutable_class(module, "Producer") + wrap_function_wrapper(module, "Producer.produce", wrap_Producer_produce) + + if hasattr(module, "Consumer"): + wrap_immutable_class(module, "Consumer") + wrap_function_wrapper(module, "Consumer.poll", wrap_Consumer_poll) + + +def instrument_confluentkafka_serializing_producer(module): + if hasattr(module, "SerializingProducer"): + wrap_function_wrapper(module, "SerializingProducer.__init__", wrap_SerializingProducer_init) + wrap_error_trace(module, "SerializingProducer.produce") + + +def instrument_confluentkafka_deserializing_consumer(module): + if hasattr(module, "DeserializingConsumer"): + wrap_function_wrapper(module, "DeserializingConsumer.__init__", wrap_DeserializingConsumer_init) + wrap_function_wrapper(module, "DeserializingConsumer.poll", wrap_DeserializingConsumer_poll) diff --git a/newrelic/hooks/messagebroker_kafkapython.py b/newrelic/hooks/messagebroker_kafkapython.py new file mode 100644 index 000000000..697b46349 --- /dev/null +++ b/newrelic/hooks/messagebroker_kafkapython.py @@ -0,0 +1,272 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sys + +from kafka.serializer import Serializer + +from newrelic.api.application import application_instance +from newrelic.api.function_trace import FunctionTraceWrapper +from newrelic.api.message_trace import MessageTrace +from newrelic.api.message_transaction import MessageTransaction +from newrelic.api.time_trace import current_trace, notice_error +from newrelic.api.transaction import current_transaction +from newrelic.common.object_wrapper import ( + ObjectProxy, + function_wrapper, + wrap_function_wrapper, +) + +HEARTBEAT_POLL = "MessageBroker/Kafka/Heartbeat/Poll" +HEARTBEAT_SENT = "MessageBroker/Kafka/Heartbeat/Sent" +HEARTBEAT_FAIL = "MessageBroker/Kafka/Heartbeat/Fail" +HEARTBEAT_RECEIVE = "MessageBroker/Kafka/Heartbeat/Receive" +HEARTBEAT_SESSION_TIMEOUT = "MessageBroker/Kafka/Heartbeat/SessionTimeout" +HEARTBEAT_POLL_TIMEOUT = "MessageBroker/Kafka/Heartbeat/PollTimeout" + + +def _bind_send(topic, value=None, key=None, headers=None, partition=None, timestamp_ms=None): + return topic, value, key, headers, partition, timestamp_ms + + +def wrap_KafkaProducer_send(wrapped, instance, args, kwargs): + transaction = current_transaction() + + if transaction is None: + return wrapped(*args, **kwargs) + + topic, value, key, headers, partition, timestamp_ms = _bind_send(*args, **kwargs) + headers = list(headers) if headers else [] + + with MessageTrace( + library="Kafka", + operation="Produce", + destination_type="Topic", + destination_name=topic or "Default", + source=wrapped, + terminal=False, + ) as trace: + dt_headers = [(k, v.encode("utf-8")) for k, v in trace.generate_request_headers(transaction)] + headers.extend(dt_headers) + try: + return wrapped(topic, value=value, key=key, headers=headers, partition=partition, timestamp_ms=timestamp_ms) + except Exception: + notice_error() + raise + + +def wrap_kafkaconsumer_next(wrapped, instance, args, kwargs): + if hasattr(instance, "_nr_transaction") and not instance._nr_transaction.stopped: + instance._nr_transaction.__exit__(*sys.exc_info()) + + try: + record = wrapped(*args, **kwargs) + except Exception as e: + # StopIteration is an expected error, indicating the end of an iterable, + # that should not be captured. + if not isinstance(e, StopIteration): + if current_transaction(): + # Report error on existing transaction if there is one + notice_error() + else: + # Report error on application + notice_error(application=application_instance(activate=False)) + raise + + if record: + # This iterator can be called either outside of a transaction, or + # within the context of an existing transaction. There are 3 + # possibilities we need to handle: (Note that this is similar to + # our Pika and Celery instrumentation) + # + # 1. In an inactive transaction + # + # If the end_of_transaction() or ignore_transaction() API + # calls have been invoked, this iterator may be called in the + # context of an inactive transaction. In this case, don't wrap + # the iterator in any way. Just run the original iterator. + # + # 2. In an active transaction + # + # Do nothing. + # + # 3. Outside of a transaction + # + # Since it's not running inside of an existing transaction, we + # want to create a new background transaction for it. + + library = "Kafka" + destination_type = "Topic" + destination_name = record.topic + received_bytes = len(str(record.value).encode("utf-8")) + message_count = 1 + + transaction = current_transaction(active_only=False) + if not transaction: + transaction = MessageTransaction( + application=application_instance(), + library=library, + destination_type=destination_type, + destination_name=destination_name, + headers=dict(record.headers), + transport_type="Kafka", + routing_key=record.key, + source=wrapped, + ) + instance._nr_transaction = transaction + transaction.__enter__() + + # Obtain consumer client_id to send up as agent attribute + if hasattr(instance, "config") and "client_id" in instance.config: + client_id = instance.config["client_id"] + transaction._add_agent_attribute("kafka.consume.client_id", client_id) + + transaction._add_agent_attribute("kafka.consume.byteCount", received_bytes) + + transaction = current_transaction() + if transaction: # If there is an active transaction now. + # Add metrics whether or not a transaction was already active, or one was just started. + # Don't add metrics if there was an inactive transaction. + # Name the metrics using the same format as the transaction, but in case the active transaction + # was an existing one and not a message transaction, reproduce the naming logic here. + group = "Message/%s/%s" % (library, destination_type) + name = "Named/%s" % destination_name + transaction.record_custom_metric("%s/%s/Received/Bytes" % (group, name), received_bytes) + transaction.record_custom_metric("%s/%s/Received/Messages" % (group, name), message_count) + + return record + + +def wrap_KafkaProducer_init(wrapped, instance, args, kwargs): + get_config_key = lambda key: kwargs.get(key, instance.DEFAULT_CONFIG[key]) # noqa: E731 + + kwargs["key_serializer"] = wrap_serializer( + instance, "Serialization/Key", "MessageBroker", get_config_key("key_serializer") + ) + kwargs["value_serializer"] = wrap_serializer( + instance, "Serialization/Value", "MessageBroker", get_config_key("value_serializer") + ) + + return wrapped(*args, **kwargs) + + +class NewRelicSerializerWrapper(ObjectProxy): + def __init__(self, wrapped, serializer_name, group_prefix): + ObjectProxy.__init__.__get__(self)(wrapped) + + self._nr_serializer_name = serializer_name + self._nr_group_prefix = group_prefix + + def serialize(self, topic, object): + wrapped = self.__wrapped__.serialize + args = (topic, object) + kwargs = {} + + if not current_transaction(): + return wrapped(*args, **kwargs) + + group = "%s/Kafka/Topic" % self._nr_group_prefix + name = "Named/%s/%s" % (topic, self._nr_serializer_name) + + return FunctionTraceWrapper(wrapped, name=name, group=group)(*args, **kwargs) + + +def wrap_serializer(client, serializer_name, group_prefix, serializer): + @function_wrapper + def _wrap_serializer(wrapped, instance, args, kwargs): + transaction = current_transaction() + if not transaction: + return wrapped(*args, **kwargs) + + topic = "Unknown" + if isinstance(transaction, MessageTransaction): + topic = transaction.destination_name + else: + # Find parent message trace to retrieve topic + message_trace = current_trace() + while message_trace is not None and not isinstance(message_trace, MessageTrace): + message_trace = message_trace.parent + if message_trace: + topic = message_trace.destination_name + + group = "%s/Kafka/Topic" % group_prefix + name = "Named/%s/%s" % (topic, serializer_name) + + return FunctionTraceWrapper(wrapped, name=name, group=group)(*args, **kwargs) + + try: + # Apply wrapper to serializer + if serializer is None: + # Do nothing + return serializer + elif isinstance(serializer, Serializer): + return NewRelicSerializerWrapper(serializer, group_prefix=group_prefix, serializer_name=serializer_name) + else: + # Wrap callable in wrapper + return _wrap_serializer(serializer) + except Exception: + return serializer # Avoid crashes from immutable serializers + + +def metric_wrapper(metric_name, check_result=False): + def _metric_wrapper(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + + application = application_instance(activate=False) + if application: + if not check_result or check_result and result: + # If the result does not need validated, send metric. + # If the result does need validated, ensure it is True. + application.record_custom_metric(metric_name, 1) + + return result + + return _metric_wrapper + + +def instrument_kafka_producer(module): + if hasattr(module, "KafkaProducer"): + wrap_function_wrapper(module, "KafkaProducer.__init__", wrap_KafkaProducer_init) + wrap_function_wrapper(module, "KafkaProducer.send", wrap_KafkaProducer_send) + + +def instrument_kafka_consumer_group(module): + if hasattr(module, "KafkaConsumer"): + wrap_function_wrapper(module, "KafkaConsumer.__next__", wrap_kafkaconsumer_next) + + +def instrument_kafka_heartbeat(module): + if hasattr(module, "Heartbeat"): + if hasattr(module.Heartbeat, "poll"): + wrap_function_wrapper(module, "Heartbeat.poll", metric_wrapper(HEARTBEAT_POLL)) + + if hasattr(module.Heartbeat, "fail_heartbeat"): + wrap_function_wrapper(module, "Heartbeat.fail_heartbeat", metric_wrapper(HEARTBEAT_FAIL)) + + if hasattr(module.Heartbeat, "sent_heartbeat"): + wrap_function_wrapper(module, "Heartbeat.sent_heartbeat", metric_wrapper(HEARTBEAT_SENT)) + + if hasattr(module.Heartbeat, "received_heartbeat"): + wrap_function_wrapper(module, "Heartbeat.received_heartbeat", metric_wrapper(HEARTBEAT_RECEIVE)) + + if hasattr(module.Heartbeat, "session_timeout_expired"): + wrap_function_wrapper( + module, + "Heartbeat.session_timeout_expired", + metric_wrapper(HEARTBEAT_SESSION_TIMEOUT, check_result=True), + ) + + if hasattr(module.Heartbeat, "poll_timeout_expired"): + wrap_function_wrapper( + module, "Heartbeat.poll_timeout_expired", metric_wrapper(HEARTBEAT_POLL_TIMEOUT, check_result=True) + ) diff --git a/newrelic/packages/wrapt/__init__.py b/newrelic/packages/wrapt/__init__.py index 7be739bf6..ee6539b77 100644 --- a/newrelic/packages/wrapt/__init__.py +++ b/newrelic/packages/wrapt/__init__.py @@ -1,4 +1,4 @@ -__version_info__ = ('1', '12', '1') +__version_info__ = ('1', '14', '1') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, @@ -13,4 +13,15 @@ from .importer import (register_post_import_hook, when_imported, notify_module_loaded, discover_post_import_hooks) +# Import of inspect.getcallargs() included for backward compatibility. An +# implementation of this was previously bundled and made available here for +# Python <2.7. Avoid using this in future. + from inspect import getcallargs + +# Variant of inspect.formatargspec() included here for forward compatibility. +# This is being done because Python 3.11 dropped inspect.formatargspec() but +# code for handling signature changing decorators relied on it. Exposing the +# bundled implementation here in case any user of wrapt was also needing it. + +from .arguments import formatargspec diff --git a/newrelic/packages/wrapt/_wrappers.c b/newrelic/packages/wrapt/_wrappers.c index 660ad6b3b..67c5d5e1a 100644 --- a/newrelic/packages/wrapt/_wrappers.c +++ b/newrelic/packages/wrapt/_wrappers.c @@ -1961,13 +1961,13 @@ static int WraptPartialCallableObjectProxy_init( if (!PyObject_Length(args)) { PyErr_SetString(PyExc_TypeError, - "__init__ of partial needs an argument"); + "__init__ of partial needs an argument"); return -1; } if (PyObject_Length(args) < 1) { PyErr_SetString(PyExc_TypeError, - "partial type takes at least one argument"); + "partial type takes at least one argument"); return -1; } @@ -1975,7 +1975,7 @@ static int WraptPartialCallableObjectProxy_init( if (!PyCallable_Check(wrapped)) { PyErr_SetString(PyExc_TypeError, - "the first argument must be callable"); + "the first argument must be callable"); return -1; } @@ -1985,7 +1985,7 @@ static int WraptPartialCallableObjectProxy_init( return -1; result = WraptPartialCallableObjectProxy_raw_init(self, wrapped, - fnargs, kwds); + fnargs, kwds); Py_DECREF(fnargs); @@ -2299,12 +2299,15 @@ static PyObject *WraptFunctionWrapperBase_call( PyObject *result = NULL; static PyObject *function_str = NULL; + static PyObject *classmethod_str = NULL; if (!function_str) { #if PY_MAJOR_VERSION >= 3 function_str = PyUnicode_InternFromString("function"); + classmethod_str = PyUnicode_InternFromString("classmethod"); #else function_str = PyString_InternFromString("function"); + classmethod_str = PyString_InternFromString("classmethod"); #endif } @@ -2334,8 +2337,10 @@ static PyObject *WraptFunctionWrapperBase_call( kwds = param_kwds; } - if (self->instance == Py_None && (self->binding == function_str || + if ((self->instance == Py_None) && (self->binding == function_str || PyObject_RichCompareBool(self->binding, function_str, + Py_EQ) == 1 || self->binding == classmethod_str || + PyObject_RichCompareBool(self->binding, classmethod_str, Py_EQ) == 1)) { PyObject *instance = NULL; @@ -2510,6 +2515,101 @@ static PyObject *WraptFunctionWrapperBase_descr_get( /* ------------------------------------------------------------------------- */ +static PyObject *WraptFunctionWrapperBase_set_name( + WraptFunctionWrapperObject *self, PyObject *args, PyObject *kwds) +{ + PyObject *method = NULL; + PyObject *result = NULL; + + if (!self->object_proxy.wrapped) { + PyErr_SetString(PyExc_ValueError, "wrapper has not been initialized"); + return NULL; + } + + method = PyObject_GetAttrString(self->object_proxy.wrapped, + "__set_name__"); + + if (!method) { + PyErr_Clear(); + Py_INCREF(Py_None); + return Py_None; + } + + result = PyObject_Call(method, args, kwds); + + Py_DECREF(method); + + return result; +} + +/* ------------------------------------------------------------------------- */ + +static PyObject *WraptFunctionWrapperBase_instancecheck( + WraptFunctionWrapperObject *self, PyObject *instance) +{ + PyObject *object = NULL; + PyObject *result = NULL; + + int check = 0; + + if (!self->object_proxy.wrapped) { + PyErr_SetString(PyExc_ValueError, "wrapper has not been initialized"); + return NULL; + } + + check = PyObject_IsInstance(instance, self->object_proxy.wrapped); + + if (check < 0) { + return NULL; + } + + result = check ? Py_True : Py_False; + + Py_INCREF(result); + return result; +} + +/* ------------------------------------------------------------------------- */ + +static PyObject *WraptFunctionWrapperBase_subclasscheck( + WraptFunctionWrapperObject *self, PyObject *args) +{ + PyObject *subclass = NULL; + PyObject *object = NULL; + PyObject *result = NULL; + + int check = 0; + + if (!self->object_proxy.wrapped) { + PyErr_SetString(PyExc_ValueError, "wrapper has not been initialized"); + return NULL; + } + + if (!PyArg_ParseTuple(args, "O", &subclass)) + return NULL; + + object = PyObject_GetAttrString(subclass, "__wrapped__"); + + if (!object) + PyErr_Clear(); + + check = PyObject_IsSubclass(object ? object: subclass, + self->object_proxy.wrapped); + + Py_XDECREF(object); + + if (check == -1) + return NULL; + + result = check ? Py_True : Py_False; + + Py_INCREF(result); + + return result; +} + +/* ------------------------------------------------------------------------- */ + static PyObject *WraptFunctionWrapperBase_get_self_instance( WraptFunctionWrapperObject *self, void *closure) { @@ -2580,6 +2680,18 @@ static PyObject *WraptFunctionWrapperBase_get_self_parent( /* ------------------------------------------------------------------------- */; +static PyMethodDef WraptFunctionWrapperBase_methods[] = { + { "__set_name__", (PyCFunction)WraptFunctionWrapperBase_set_name, + METH_VARARGS | METH_KEYWORDS, 0 }, + { "__instancecheck__", (PyCFunction)WraptFunctionWrapperBase_instancecheck, + METH_O, 0}, + { "__subclasscheck__", (PyCFunction)WraptFunctionWrapperBase_subclasscheck, + METH_VARARGS, 0 }, + { NULL, NULL }, +}; + +/* ------------------------------------------------------------------------- */; + static PyGetSetDef WraptFunctionWrapperBase_getset[] = { { "__module__", (getter)WraptObjectProxy_get_module, (setter)WraptObjectProxy_set_module, 0 }, @@ -2633,7 +2745,7 @@ PyTypeObject WraptFunctionWrapperBase_Type = { offsetof(WraptObjectProxyObject, weakreflist), /*tp_weaklistoffset*/ 0, /*tp_iter*/ 0, /*tp_iternext*/ - 0, /*tp_methods*/ + WraptFunctionWrapperBase_methods, /*tp_methods*/ 0, /*tp_members*/ WraptFunctionWrapperBase_getset, /*tp_getset*/ 0, /*tp_base*/ diff --git a/newrelic/packages/wrapt/arguments.py b/newrelic/packages/wrapt/arguments.py new file mode 100644 index 000000000..032bc059e --- /dev/null +++ b/newrelic/packages/wrapt/arguments.py @@ -0,0 +1,38 @@ +# The inspect.formatargspec() function was dropped in Python 3.11 but we need +# need it for when constructing signature changing decorators based on result of +# inspect.getargspec() or inspect.getfullargspec(). The code here implements +# inspect.formatargspec() base on Parameter and Signature from inspect module, +# which were added in Python 3.6. Thanks to Cyril Jouve for the implementation. + +try: + from inspect import Parameter, Signature +except ImportError: + from inspect import formatargspec +else: + def formatargspec(args, varargs=None, varkw=None, defaults=None, + kwonlyargs=(), kwonlydefaults={}, annotations={}): + if kwonlydefaults is None: + kwonlydefaults = {} + ndefaults = len(defaults) if defaults else 0 + parameters = [ + Parameter( + arg, + Parameter.POSITIONAL_OR_KEYWORD, + default=defaults[i] if i >= 0 else Parameter.empty, + annotation=annotations.get(arg, Parameter.empty), + ) for i, arg in enumerate(args, ndefaults - len(args)) + ] + if varargs: + parameters.append(Parameter(varargs, Parameter.VAR_POSITIONAL)) + parameters.extend( + Parameter( + kwonlyarg, + Parameter.KEYWORD_ONLY, + default=kwonlydefaults.get(kwonlyarg, Parameter.empty), + annotation=annotations.get(kwonlyarg, Parameter.empty), + ) for kwonlyarg in kwonlyargs + ) + if varkw: + parameters.append(Parameter(varkw, Parameter.VAR_KEYWORD)) + return_annotation = annotations.get('return', Signature.empty) + return str(Signature(parameters, return_annotation=return_annotation)) \ No newline at end of file diff --git a/newrelic/packages/wrapt/decorators.py b/newrelic/packages/wrapt/decorators.py index 506303d7a..c3f254729 100644 --- a/newrelic/packages/wrapt/decorators.py +++ b/newrelic/packages/wrapt/decorators.py @@ -31,10 +31,11 @@ def exec_(_code_, _globs_=None, _locs_=None): del builtins from functools import partial -from inspect import ismethod, isclass, formatargspec -from collections import namedtuple +from inspect import isclass from threading import Lock, RLock +from .arguments import formatargspec + try: from inspect import signature except ImportError: @@ -173,7 +174,7 @@ def __call__(self, wrapped): # function so the wrapper is effectively indistinguishable from the # original wrapped function. -def decorator(wrapper=None, enabled=None, adapter=None): +def decorator(wrapper=None, enabled=None, adapter=None, proxy=FunctionWrapper): # The decorator should be supplied with a single positional argument # which is the wrapper function to be used to implement the # decorator. This may be preceded by a step whereby the keyword @@ -183,7 +184,7 @@ def decorator(wrapper=None, enabled=None, adapter=None): # decorator. In that case parts of the function '__code__' and # '__defaults__' attributes are used from the adapter function # rather than those of the wrapped function. This allows for the - # argument specification from inspect.getargspec() and similar + # argument specification from inspect.getfullargspec() and similar # functions to be overridden with a prototype for a different # function than what was wrapped. The 'enabled' argument provides a # way to enable/disable the use of the decorator. If the type of @@ -194,6 +195,8 @@ def decorator(wrapper=None, enabled=None, adapter=None): # if 'enabled' is callable it will be called to obtain the value to # be checked. If False, the wrapper will not be called and instead # the original wrapped function will be called directly instead. + # The 'proxy' argument provides a way of passing a custom version of + # the FunctionWrapper class used in decorating the function. if wrapper is not None: # Helper function for creating wrapper of the appropriate @@ -206,16 +209,37 @@ def _build(wrapped, wrapper, enabled=None, adapter=None): if not callable(adapter): ns = {} + + # Check if the signature argument specification has + # annotations. If it does then we need to remember + # it but also drop it when attempting to manufacture + # a standin adapter function. This is necessary else + # it will try and look up any types referenced in + # the annotations in the empty namespace we use, + # which will fail. + + annotations = {} + if not isinstance(adapter, string_types): + if len(adapter) == 7: + annotations = adapter[-1] + adapter = adapter[:-1] adapter = formatargspec(*adapter) + exec_('def adapter{}: pass'.format(adapter), ns, ns) adapter = ns['adapter'] + # Override the annotations for the manufactured + # adapter function so they match the original + # adapter signature argument specification. + + if annotations: + adapter.__annotations__ = annotations + return AdapterWrapper(wrapped=wrapped, wrapper=wrapper, enabled=enabled, adapter=adapter) - return FunctionWrapper(wrapped=wrapped, wrapper=wrapper, - enabled=enabled) + return proxy(wrapped=wrapped, wrapper=wrapper, enabled=enabled) # The wrapper has been provided so return the final decorator. # The decorator is itself one of our function wrappers so we @@ -360,7 +384,7 @@ def _capture(target_wrapped): # This one is a bit strange because binding was actually # performed on the wrapper created by our decorator # factory. We need to apply that binding to the decorator - # wrapper function which which the decorator factory + # wrapper function that the decorator factory # was applied to. target_wrapper = wrapper.__get__(None, instance) @@ -384,7 +408,7 @@ def _capture(target_wrapped): # This one is a bit strange because binding was actually # performed on the wrapper created by our decorator # factory. We need to apply that binding to the decorator - # wrapper function which which the decorator factory + # wrapper function that the decorator factory # was applied to. target_wrapper = wrapper.__get__(instance, type(instance)) @@ -408,7 +432,8 @@ def _capture(target_wrapped): # decorator again wrapped in a partial using the collected # arguments. - return partial(decorator, enabled=enabled, adapter=adapter) + return partial(decorator, enabled=enabled, adapter=adapter, + proxy=proxy) # Decorator for implementing thread synchronization. It can be used as a # decorator, in which case the synchronization context is determined by diff --git a/newrelic/packages/wrapt/importer.py b/newrelic/packages/wrapt/importer.py index 4665f3865..5c4d4cc66 100644 --- a/newrelic/packages/wrapt/importer.py +++ b/newrelic/packages/wrapt/importer.py @@ -10,16 +10,17 @@ if PY2: string_types = basestring, + find_spec = None else: - import importlib string_types = str, + from importlib.util import find_spec from .decorators import synchronized # The dictionary registering any post import hooks to be triggered once # the target module has been imported. Once a module has been imported # and the hooks fired, the list of hooks recorded against the target -# module will be truncacted but the list left in the dictionary. This +# module will be truncated but the list left in the dictionary. This # acts as a flag to indicate that the module had already been imported. _post_import_hooks = {} @@ -152,12 +153,29 @@ class _ImportHookChainedLoader: def __init__(self, loader): self.loader = loader - def load_module(self, fullname): + if hasattr(loader, "load_module"): + self.load_module = self._load_module + if hasattr(loader, "create_module"): + self.create_module = self._create_module + if hasattr(loader, "exec_module"): + self.exec_module = self._exec_module + + def _load_module(self, fullname): module = self.loader.load_module(fullname) notify_module_loaded(module) return module + # Python 3.4 introduced create_module() and exec_module() instead of + # load_module() alone. Splitting the two steps. + + def _create_module(self, spec): + return self.loader.create_module(spec) + + def _exec_module(self, module): + self.loader.exec_module(module) + notify_module_loaded(module) + class ImportHookFinder: def __init__(self): @@ -187,7 +205,7 @@ def find_module(self, fullname, path=None): # Now call back into the import system again. try: - if PY2: + if not find_spec: # For Python 2 we don't have much choice but to # call back in to __import__(). This will # actually cause the module to be imported. If no @@ -208,14 +226,52 @@ def find_module(self, fullname, path=None): # our own loader which will then in turn call the # real loader to import the module and invoke the # post import hooks. - try: - import importlib.util - loader = importlib.util.find_spec(fullname).loader - except (ImportError, AttributeError): - loader = importlib.find_loader(fullname, path) - if loader: + + loader = getattr(find_spec(fullname), "loader", None) + + if loader and not isinstance(loader, _ImportHookChainedLoader): return _ImportHookChainedLoader(loader) + finally: + del self.in_progress[fullname] + + def find_spec(self, fullname, path=None, target=None): + # Since Python 3.4, you are meant to implement find_spec() method + # instead of find_module() and since Python 3.10 you get deprecation + # warnings if you don't define find_spec(). + + # If the module being imported is not one we have registered + # post import hooks for, we can return immediately. We will + # take no further part in the importing of this module. + + if not fullname in _post_import_hooks: + return None + + # When we are interested in a specific module, we will call back + # into the import system a second time to defer to the import + # finder that is supposed to handle the importing of the module. + # We set an in progress flag for the target module so that on + # the second time through we don't trigger another call back + # into the import system and cause a infinite loop. + + if fullname in self.in_progress: + return None + + self.in_progress[fullname] = True + + # Now call back into the import system again. + + try: + # This should only be Python 3 so find_spec() should always + # exist so don't need to check. + + spec = find_spec(fullname) + loader = getattr(spec, "loader", None) + + if loader and not isinstance(loader, _ImportHookChainedLoader): + spec.loader = _ImportHookChainedLoader(loader) + + return spec finally: del self.in_progress[fullname] diff --git a/newrelic/packages/wrapt/wrappers.py b/newrelic/packages/wrapt/wrappers.py index 18cf5e053..2716cd1da 100644 --- a/newrelic/packages/wrapt/wrappers.py +++ b/newrelic/packages/wrapt/wrappers.py @@ -86,6 +86,14 @@ def __init__(self, wrapped): except AttributeError: pass + # Python 3.10 onwards also does not allow itself to be overridden + # using a property and it must instead be set explicitly. + + try: + object.__setattr__(self, '__annotations__', wrapped.__annotations__) + except AttributeError: + pass + @property def __name__(self): return self.__wrapped__.__name__ @@ -102,14 +110,6 @@ def __class__(self): def __class__(self, value): self.__wrapped__.__class__ = value - @property - def __annotations__(self): - return self.__wrapped__.__annotations__ - - @__annotations__.setter - def __annotations__(self, value): - self.__wrapped__.__annotations__ = value - def __dir__(self): return dir(self.__wrapped__) @@ -178,11 +178,23 @@ def __setattr__(self, name, value): object.__setattr__(self, '__qualname__', value.__qualname__) except AttributeError: pass + try: + object.__delattr__(self, '__annotations__') + except AttributeError: + pass + try: + object.__setattr__(self, '__annotations__', value.__annotations__) + except AttributeError: + pass elif name == '__qualname__': setattr(self.__wrapped__, name, value) object.__setattr__(self, name, value) + elif name == '__annotations__': + setattr(self.__wrapped__, name, value) + object.__setattr__(self, name, value) + elif hasattr(type(self), name): object.__setattr__(self, name, value) @@ -550,7 +562,7 @@ def __call__(self, *args, **kwargs): # a function that was already bound to an instance. In that case # we want to extract the instance from the function and use it. - if self._self_binding == 'function': + if self._self_binding in ('function', 'classmethod'): if self._self_instance is None: instance = getattr(self.__wrapped__, '__self__', None) if instance is not None: @@ -566,6 +578,33 @@ def __call__(self, *args, **kwargs): return self._self_wrapper(self.__wrapped__, self._self_instance, args, kwargs) + def __set_name__(self, owner, name): + # This is a special method use to supply information to + # descriptors about what the name of variable in a class + # definition is. Not wanting to add this to ObjectProxy as not + # sure of broader implications of doing that. Thus restrict to + # FunctionWrapper used by decorators. + + if hasattr(self.__wrapped__, "__set_name__"): + self.__wrapped__.__set_name__(owner, name) + + def __instancecheck__(self, instance): + # This is a special method used by isinstance() to make checks + # instance of the `__wrapped__`. + return isinstance(instance, self.__wrapped__) + + def __subclasscheck__(self, subclass): + # This is a special method used by issubclass() to make checks + # about inheritance of classes. We need to upwrap any object + # proxy. Not wanting to add this to ObjectProxy as not sure of + # broader implications of doing that. Thus restrict to + # FunctionWrapper used by decorators. + + if hasattr(subclass, "__wrapped__"): + return issubclass(subclass.__wrapped__, self.__wrapped__) + else: + return issubclass(subclass, self.__wrapped__) + class BoundFunctionWrapper(_FunctionWrapperBase): def __call__(self, *args, **kwargs): diff --git a/newrelic/samplers/memory_usage.py b/newrelic/samplers/memory_usage.py index 15bdbaef1..11b75eef9 100644 --- a/newrelic/samplers/memory_usage.py +++ b/newrelic/samplers/memory_usage.py @@ -21,19 +21,18 @@ from newrelic.common.system_info import physical_memory_used, total_physical_memory from newrelic.samplers.decorators import data_source_generator -PID = os.getpid() - @data_source_generator(name="Memory Usage") def memory_usage_data_source(): memory = physical_memory_used() total_memory = total_physical_memory() + pid = os.getpid() # Calculate memory utilization without 0 division errors memory_utilization = (memory / total_memory) if total_memory != 0 else 0 yield ("Memory/Physical", memory) - yield ("Memory/Physical/%d" % (PID), memory) + yield ("Memory/Physical/%d" % (pid), memory) yield ("Memory/Physical/Utilization", memory_utilization) - yield ("Memory/Physical/Utilization/%d" % (PID), memory_utilization) + yield ("Memory/Physical/Utilization/%d" % (pid), memory_utilization) diff --git a/pyproject.toml b/pyproject.toml index a1b2989af..65be1548b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ include = '\.pyi?$' profile = "black" [tool.pylint.messages_control] -disable = "B101,C0103,C0114,C0115,C0116,C0209,C0302,C0415,E0401,E1120,E122,E126,E127,E128,E203,E501,E722,R0201,R0205,R0401,R0801,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R1705,R1710,R1725,W0201,W0212,W0223,W0402,W0603,W0612,W0613,W0702,W0703,W0706,W504,line-too-long,redefined-outer-name" +disable = "C0103,C0114,C0115,C0116,C0209,C0302,C0415,E0401,E1120,R0205,R0401,R0801,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R1705,R1710,R1725,W0201,W0212,W0223,W0402,W0603,W0612,W0613,W0702,W0703,W0706,line-too-long,redefined-outer-name" [tool.pylint.format] max-line-length = "120" @@ -15,4 +15,4 @@ max-line-length = "120" good-names = "exc,val,tb" [tool.bandit] -skips = ["B110"] +skips = ["B110", "B101", "B404"] diff --git a/setup.py b/setup.py index 9dc1b97c2..cdb4ac091 100644 --- a/setup.py +++ b/setup.py @@ -21,8 +21,8 @@ assert python_version in ((2, 7),) or python_version >= ( 3, - 6, -), "The New Relic Python agent only supports Python 2.7 and 3.6+." + 7, +), "The New Relic Python agent only supports Python 2.7 and 3.7+." with_setuptools = False @@ -120,7 +120,6 @@ def build_extension(self, ext): "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -151,18 +150,19 @@ def build_extension(self, ext): zip_safe=False, classifiers=classifiers, packages=packages, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*", + python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*", package_data={ "newrelic": ["newrelic.ini", "version.txt", "packages/urllib3/LICENSE.txt", "common/cacert.pem"], }, - scripts=["scripts/newrelic-admin"], - extras_require={"infinite-tracing": ["grpcio", "protobuf<4"]}, + extras_require={"infinite-tracing": ["grpcio", "protobuf"]}, ) if with_setuptools: kwargs["entry_points"] = { "console_scripts": ["newrelic-admin = newrelic.admin:main"], } +else: + kwargs["scripts"] = ["scripts/newrelic-admin"] def with_librt(): diff --git a/tests/adapter_daphne/conftest.py b/tests/adapter_daphne/conftest.py new file mode 100644 index 000000000..cda62f22e --- /dev/null +++ b/tests/adapter_daphne/conftest.py @@ -0,0 +1,37 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) + +_coverage_source = [ + "newrelic.hooks.adapter_daphne", +] + +code_coverage = code_coverage_fixture(source=_coverage_source) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (adapter_daphne)", default_settings=_default_settings +) diff --git a/tests/adapter_daphne/test_daphne.py b/tests/adapter_daphne/test_daphne.py new file mode 100644 index 000000000..4953e9a9f --- /dev/null +++ b/tests/adapter_daphne/test_daphne.py @@ -0,0 +1,136 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import threading +from urllib.request import HTTPError, urlopen + +import daphne.server +import pytest +from testing_support.fixtures import ( + override_application_settings, + raise_background_exceptions, + validate_transaction_errors, + validate_transaction_metrics, + wait_for_background_threads, +) +from testing_support.sample_asgi_applications import ( + AppWithCall, + AppWithCallRaw, + simple_app_v2_raw, +) +from testing_support.util import get_open_port + +from newrelic.common.object_names import callable_name + +DAPHNE_VERSION = tuple(int(v) for v in daphne.__version__.split(".")[:2]) +skip_asgi_3_unsupported = pytest.mark.skipif(DAPHNE_VERSION < (3, 0), reason="ASGI3 unsupported") +skip_asgi_2_unsupported = pytest.mark.skipif(DAPHNE_VERSION >= (3, 0), reason="ASGI2 unsupported") + + +@pytest.fixture( + params=( + pytest.param( + simple_app_v2_raw, + marks=skip_asgi_2_unsupported, + ), + pytest.param( + AppWithCallRaw(), + marks=skip_asgi_3_unsupported, + ), + pytest.param( + AppWithCall(), + marks=skip_asgi_3_unsupported, + ), + ), + ids=("raw", "class_with_call", "class_with_call_double_wrapped"), +) +def app(request, server_and_port): + app = request.param + server, _ = server_and_port + server.application = app + return app + + +@pytest.fixture(scope="session") +def port(server_and_port): + _, port = server_and_port + return port + + +@pytest.fixture(scope="session") +def server_and_port(): + port = get_open_port() + + servers = [] + loops = [] + ready = threading.Event() + + def server_run(): + def on_ready(): + if not ready.is_set(): + loops.append(asyncio.get_event_loop()) + servers.append(server) + ready.set() + + async def fake_app(*args, **kwargs): + raise RuntimeError("Failed to swap out app.") + + server = daphne.server.Server( + fake_app, + endpoints=["tcp:%d:interface=127.0.0.1" % port], + ready_callable=on_ready, + signal_handlers=False, + verbosity=9, + ) + + server.run() + + thread = threading.Thread(target=server_run, daemon=True) + thread.start() + assert ready.wait(timeout=10) + yield servers[0], port + + reactor = daphne.server.reactor + _ = [loop.call_soon_threadsafe(reactor.stop) for loop in loops] # Stop all loops + thread.join(timeout=10) + + if thread.is_alive(): + raise RuntimeError("Thread failed to exit in time.") + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +def test_daphne_200(port, app): + @validate_transaction_metrics(callable_name(app)) + @raise_background_exceptions() + @wait_for_background_threads() + def response(): + return urlopen("http://localhost:%d" % port, timeout=10) + + assert response().status == 200 + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +@validate_transaction_errors(["builtins:ValueError"]) +def test_daphne_500(port, app): + @validate_transaction_metrics(callable_name(app)) + @raise_background_exceptions() + @wait_for_background_threads() + def _test(): + try: + urlopen("http://localhost:%d/exc" % port) + except HTTPError: + pass + + _test() diff --git a/tests/adapter_hypercorn/conftest.py b/tests/adapter_hypercorn/conftest.py new file mode 100644 index 000000000..50e8bad10 --- /dev/null +++ b/tests/adapter_hypercorn/conftest.py @@ -0,0 +1,40 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixture.event_loop import ( # noqa: F401; pylint: disable=W0611 + event_loop as loop, +) +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) + +_coverage_source = [ + "newrelic.hooks.adapter_hypercorn", +] + +code_coverage = code_coverage_fixture(source=_coverage_source) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (adapter_hypercorn)", default_settings=_default_settings +) diff --git a/tests/adapter_hypercorn/test_hypercorn.py b/tests/adapter_hypercorn/test_hypercorn.py new file mode 100644 index 000000000..05bf9fdc5 --- /dev/null +++ b/tests/adapter_hypercorn/test_hypercorn.py @@ -0,0 +1,150 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import threading +import time +from urllib.request import HTTPError, urlopen + +import pkg_resources +import pytest +from testing_support.fixtures import ( + override_application_settings, + raise_background_exceptions, + validate_transaction_errors, + validate_transaction_metrics, + wait_for_background_threads, +) +from testing_support.sample_asgi_applications import ( + AppWithCall, + AppWithCallRaw, + simple_app_v2_raw, +) +from testing_support.util import get_open_port + +from newrelic.api.transaction import ignore_transaction +from newrelic.common.object_names import callable_name + +HYPERCORN_VERSION = tuple(int(v) for v in pkg_resources.get_distribution("hypercorn").version.split(".")) +asgi_2_unsupported = HYPERCORN_VERSION >= (0, 14, 1) +wsgi_unsupported = HYPERCORN_VERSION < (0, 14, 1) + + +def wsgi_app(environ, start_response): + path = environ["PATH_INFO"] + + if path == "/": + start_response("200 OK", response_headers=[]) + elif path == "/ignored": + ignore_transaction() + start_response("200 OK", response_headers=[]) + elif path == "/exc": + raise ValueError("whoopsies") + + return [] + + +@pytest.fixture( + params=( + pytest.param( + simple_app_v2_raw, + marks=pytest.mark.skipif(asgi_2_unsupported, reason="ASGI2 unsupported"), + ), + AppWithCallRaw(), + AppWithCall(), + pytest.param( + wsgi_app, + marks=pytest.mark.skipif(wsgi_unsupported, reason="WSGI unsupported"), + ), + ), + ids=("raw", "class_with_call", "class_with_call_double_wrapped", "wsgi"), +) +def app(request): + return request.param + + +@pytest.fixture() +def port(loop, app): + import hypercorn.asyncio + import hypercorn.config + + port = get_open_port() + shutdown = asyncio.Event() + + def server_run(): + async def shutdown_trigger(): + await shutdown.wait() + return True + + config = hypercorn.config.Config.from_mapping( + { + "bind": ["127.0.0.1:%d" % port], + } + ) + + try: + loop.run_until_complete(hypercorn.asyncio.serve(app, config, shutdown_trigger=shutdown_trigger)) + except Exception: + pass + + thread = threading.Thread(target=server_run, daemon=True) + thread.start() + wait_for_port(port) + yield port + + shutdown.set() + loop.call_soon_threadsafe(loop.stop) + thread.join(timeout=10) + + if thread.is_alive(): + raise RuntimeError("Thread failed to exit in time.") + + +def wait_for_port(port, retries=10): + status = None + for _ in range(retries): + try: + status = urlopen("http://localhost:%d/ignored" % port, timeout=1).status + assert status == 200 + return + except Exception as e: + status = e + + time.sleep(1) + + raise RuntimeError("Failed to wait for port %d. Got status %s" % (port, status)) + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +def test_hypercorn_200(port, app): + @validate_transaction_metrics(callable_name(app)) + @raise_background_exceptions() + @wait_for_background_threads() + def response(): + return urlopen("http://localhost:%d" % port, timeout=10) + + assert response().status == 200 + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +def test_hypercorn_500(port, app): + @validate_transaction_errors(["builtins:ValueError"]) + @validate_transaction_metrics(callable_name(app)) + @raise_background_exceptions() + @wait_for_background_threads() + def _test(): + with pytest.raises(HTTPError): + urlopen("http://localhost:%d/exc" % port) + + _test() diff --git a/tests/adapter_uvicorn/test_uvicorn.py b/tests/adapter_uvicorn/test_uvicorn.py index c93e719e8..e3261f4e8 100644 --- a/tests/adapter_uvicorn/test_uvicorn.py +++ b/tests/adapter_uvicorn/test_uvicorn.py @@ -97,7 +97,7 @@ async def on_tick(): thread = threading.Thread(target=server_run, daemon=True) thread.start() - ready.wait() + assert ready.wait(timeout=10) yield port _ = [loop.stop() for loop in loops] # Stop all loops thread.join(timeout=1) diff --git a/tests/agent_features/test_ignore_expected_errors.py b/tests/agent_features/test_ignore_expected_errors.py index 7adc07e1c..d685c39c0 100644 --- a/tests/agent_features/test_ignore_expected_errors.py +++ b/tests/agent_features/test_ignore_expected_errors.py @@ -37,12 +37,8 @@ # Settings presets # Error classes settings -expected_runtime_error_settings = { - "error_collector.expected_classes": [_runtime_error_name] -} -ignore_runtime_error_settings = { - "error_collector.ignore_classes": [_runtime_error_name] -} +expected_runtime_error_settings = {"error_collector.expected_classes": [_runtime_error_name]} +ignore_runtime_error_settings = {"error_collector.ignore_classes": [_runtime_error_name]} # Status code settings expected_status_code_settings = {"error_collector.expected_status_codes": [418]} @@ -141,9 +137,7 @@ def _test(): @pytest.mark.parametrize("settings,expected", error_trace_settings_matrix) @pytest.mark.parametrize("override_expected", override_expected_matrix) -def test_error_trace_attributes_inside_transaction( - settings, expected, override_expected -): +def test_error_trace_attributes_inside_transaction(settings, expected, override_expected): expected = override_expected if override_expected is not None else expected error_trace_attributes = { @@ -165,9 +159,7 @@ def _test(): @pytest.mark.parametrize("settings,expected", error_trace_settings_matrix) @pytest.mark.parametrize("override_expected", override_expected_matrix) -def test_error_trace_attributes_outside_transaction( - settings, expected, override_expected -): +def test_error_trace_attributes_outside_transaction(settings, expected, override_expected): expected = override_expected if override_expected is not None else expected error_trace_attributes = { @@ -182,9 +174,7 @@ def test_error_trace_attributes_outside_transaction( } @reset_core_stats_engine() - @validate_error_trace_attributes_outside_transaction( - _runtime_error_name, exact_attrs=error_trace_attributes - ) + @validate_error_trace_attributes_outside_transaction(_runtime_error_name, exact_attrs=error_trace_attributes) @override_application_settings(settings) def _test(): exercise(override_expected) @@ -206,9 +196,7 @@ def test_error_metrics_inside_transaction(expected): ("ErrorsExpected/all", expected_metrics_count), ] - @validate_transaction_metrics( - "test", background_task=True, rollup_metrics=metrics_payload - ) + @validate_transaction_metrics("test", background_task=True, rollup_metrics=metrics_payload) @background_task(name="test") def _test(): exercise(expected) @@ -316,7 +304,7 @@ def _test(): try: raise TeapotError(_error_message) except: - notice_error(status_code=status_code) + notice_error(status_code=status_code, application=application_instance(activate=False)) _test() diff --git a/tests/agent_unittests/test_environment.py b/tests/agent_unittests/test_environment.py index ef5c5e448..b2c639adc 100644 --- a/tests/agent_unittests/test_environment.py +++ b/tests/agent_unittests/test_environment.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import sys + +import pytest + from newrelic.core.environment import environment_settings @@ -29,7 +31,7 @@ class Module(object): def test_plugin_list(): # Let's pretend we fired an import hook - import newrelic.hooks.adapter_gunicorn + import newrelic.hooks.adapter_gunicorn # noqa: F401 environment_info = environment_settings() @@ -41,6 +43,8 @@ def test_plugin_list(): # Check that bogus plugins don't get reported assert "newrelic.hooks.newrelic" not in plugin_list + # Check that plugin that should get reported has version info. + assert "pytest (%s)" % (pytest.__version__) in plugin_list class NoIteratorDict(object): @@ -62,7 +66,7 @@ def __contains__(self, *args, **kwargs): def test_plugin_list_uses_no_sys_modules_iterator(monkeypatch): modules = NoIteratorDict(sys.modules) - monkeypatch.setattr(sys, 'modules', modules) + monkeypatch.setattr(sys, "modules", modules) # If environment_settings iterates over sys.modules, an attribute error will be generated environment_info = environment_settings() @@ -113,9 +117,7 @@ def test_plugin_list_uses_no_sys_modules_iterator(monkeypatch): ), ), ) -def test_uvicorn_dispatcher( - monkeypatch, loaded_modules, dispatcher, dispatcher_version, worker_version -): +def test_uvicorn_dispatcher(monkeypatch, loaded_modules, dispatcher, dispatcher_version, worker_version): # Let's pretend we load some modules for name, module in loaded_modules.items(): monkeypatch.setitem(sys.modules, name, module) diff --git a/tests/agent_unittests/test_harvest_loop.py b/tests/agent_unittests/test_harvest_loop.py index 7a1f1702d..7760e1307 100644 --- a/tests/agent_unittests/test_harvest_loop.py +++ b/tests/agent_unittests/test_harvest_loop.py @@ -896,7 +896,7 @@ def test_default_events_harvested(allowlist_event): num_seen = 0 if (allowlist_event != "span_event_data") else 1 assert app._stats_engine.span_events.num_seen == num_seen - assert app._stats_engine.metrics_count() == 1 + assert app._stats_engine.metrics_count() == 4 @failing_endpoint("analytic_event_data") diff --git a/tests/agent_unittests/test_http_client.py b/tests/agent_unittests/test_http_client.py index b1fc4b4f4..a5c340d6a 100644 --- a/tests/agent_unittests/test_http_client.py +++ b/tests/agent_unittests/test_http_client.py @@ -289,32 +289,47 @@ def test_http_payload_compression(server, client_cls, method, threshold): compression_threshold=threshold, ) as client: with InternalTraceContext(internal_metrics): - status, data = client.send_request(payload=payload, params={"method": "test"}) + status, data = client.send_request(payload=payload, params={"method": "method1"}) + + # Sending one additional request to valid metric aggregation for top level data usage supportability metrics + with client_cls( + "localhost", + server.port, + disable_certificate_validation=True, + compression_method=method, + compression_threshold=threshold, + ) as client: + with InternalTraceContext(internal_metrics): + status, data = client.send_request(payload=payload, params={"method": "method2"}) assert status == 200 data = data.split(b"\n") sent_payload = data[-1] payload_byte_len = len(sent_payload) - internal_metrics = dict(internal_metrics.metrics()) if client_cls is ApplicationModeClient: - assert internal_metrics["Supportability/Python/Collector/Output/Bytes/test"][:2] == [ + assert internal_metrics["Supportability/Python/Collector/method1/Output/Bytes"][:2] == [ 1, - payload_byte_len, + len(payload), + ] + assert internal_metrics["Supportability/Python/Collector/Output/Bytes"][:2] == [ + 2, + len(payload)*2, ] if threshold < 20: # Verify compression time is recorded - assert internal_metrics["Supportability/Python/Collector/ZLIB/Compress/test"][0] == 1 - assert internal_metrics["Supportability/Python/Collector/ZLIB/Compress/test"][1] > 0 - - # Verify the original payload length is recorded - assert internal_metrics["Supportability/Python/Collector/ZLIB/Bytes/test"][:2] == [1, len(payload)] - - assert len(internal_metrics) == 3 + assert internal_metrics["Supportability/Python/Collector/method1/ZLIB/Compress"][0] == 1 + assert internal_metrics["Supportability/Python/Collector/method1/ZLIB/Compress"][1] > 0 + + # Verify the compressed payload length is recorded + assert internal_metrics["Supportability/Python/Collector/method1/ZLIB/Bytes"][:2] == [1, payload_byte_len] + assert internal_metrics["Supportability/Python/Collector/ZLIB/Bytes"][:2] == [2, payload_byte_len*2] + + assert len(internal_metrics) == 8 else: # Verify no ZLIB compression metrics were sent - assert len(internal_metrics) == 1 + assert len(internal_metrics) == 3 else: assert not internal_metrics diff --git a/tests/coroutines_asyncio/test_context_propagation.py b/tests/coroutines_asyncio/test_context_propagation.py index 10d0ecd52..3beef38d0 100644 --- a/tests/coroutines_asyncio/test_context_propagation.py +++ b/tests/coroutines_asyncio/test_context_propagation.py @@ -15,7 +15,6 @@ import sys import pytest -import uvloop from testing_support.fixtures import ( function_not_called, override_generic_settings, @@ -34,6 +33,14 @@ from newrelic.core.config import global_settings from newrelic.core.trace_cache import trace_cache +# uvloop is not available on PyPy. +try: + import uvloop + + loop_policies = (None, uvloop.EventLoopPolicy()) +except ImportError: + loop_policies = (None,) + @function_trace("waiter3") async def child(): @@ -88,7 +95,7 @@ async def _test(asyncio, schedule, nr_enabled=True): return trace -@pytest.mark.parametrize("loop_policy", (None, uvloop.EventLoopPolicy())) +@pytest.mark.parametrize("loop_policy", loop_policies) @pytest.mark.parametrize( "schedule", ( diff --git a/tests/datastore_aioredis/conftest.py b/tests/datastore_aioredis/conftest.py index d144af2df..de9c6c04d 100644 --- a/tests/datastore_aioredis/conftest.py +++ b/tests/datastore_aioredis/conftest.py @@ -13,9 +13,11 @@ # limitations under the License. import aioredis -import asyncio import pytest +from testing_support.db_settings import redis_settings + +from testing_support.fixture.event_loop import event_loop as loop from testing_support.fixtures import ( # noqa: F401 code_coverage_fixture, collector_agent_registration_fixture, @@ -23,6 +25,10 @@ ) AIOREDIS_VERSION = tuple(int(x) for x in aioredis.__version__.split(".")[:2]) +SKIPIF_AIOREDIS_V1 = pytest.mark.skipif(AIOREDIS_VERSION < (2,), reason="Unsupported aioredis version.") +SKIPIF_AIOREDIS_V2 = pytest.mark.skipif(AIOREDIS_VERSION >= (2,), reason="Unsupported aioredis version.") +DB_SETTINGS = redis_settings()[0] + _coverage_source = [ "newrelic.hooks.datastore_aioredis", @@ -45,10 +51,19 @@ ) -event_loop = asyncio.get_event_loop() -asyncio.set_event_loop(event_loop) - - -@pytest.fixture() -def loop(): - yield event_loop +@pytest.fixture(params=("Redis", "StrictRedis")) +def client(request, loop): + if AIOREDIS_VERSION >= (2, 0): + if request.param == "Redis": + return aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + elif request.param == "StrictRedis": + return aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + else: + raise NotImplementedError() + else: + if request.param == "Redis": + return loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0)) + elif request.param == "StrictRedis": + pytest.skip("StrictRedis not implemented.") + else: + raise NotImplementedError() diff --git a/tests/datastore_aioredis/test_custom_conn_pool.py b/tests/datastore_aioredis/test_custom_conn_pool.py index 155765e5a..7644e8ffb 100644 --- a/tests/datastore_aioredis/test_custom_conn_pool.py +++ b/tests/datastore_aioredis/test_custom_conn_pool.py @@ -17,12 +17,6 @@ will not result in an error. """ -import asyncio -import pytest -import aioredis - -from conftest import event_loop, loop, AIOREDIS_VERSION - from newrelic.api.background_task import background_task # from testing_support.fixture.event_loop import event_loop as loop @@ -43,7 +37,7 @@ async def get_connection(self, name=None, *keys, **options): return self.connection async def release(self, connection): - self.connection.disconnect() + await self.connection.disconnect() async def execute(self, *args, **kwargs): return await self.connection.execute(*args, **kwargs) @@ -105,18 +99,6 @@ async def exercise_redis(client): await client.execute("CLIENT", "LIST") -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=_port, db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=_port, db=0), - ] -else: - clients = [ - event_loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], _port), db=0)), - ] - - -@pytest.mark.parametrize("client", clients) @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( "test_custom_conn_pool:test_fake_conn_pool_enable_instance", @@ -125,7 +107,7 @@ async def exercise_redis(client): background_task=True, ) @background_task() -def test_fake_conn_pool_enable_instance(client, loop): +def test_fake_conn_pool_enable_instance(client, loop, monkeypatch): # Get a real connection conn = getattr(client, "_pool_or_conn", None) if conn is None: @@ -135,14 +117,13 @@ def test_fake_conn_pool_enable_instance(client, loop): # have the `connection_kwargs` attribute. fake_pool = FakeConnectionPool(conn) - client.connection_pool = fake_pool - client._pool_or_conn = fake_pool + monkeypatch.setattr(client, "connection_pool", fake_pool, raising=False) + monkeypatch.setattr(client, "_pool_or_conn", fake_pool, raising=False) assert not hasattr(client.connection_pool, "connection_kwargs") loop.run_until_complete(exercise_redis(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( "test_custom_conn_pool:test_fake_conn_pool_disable_instance", @@ -151,15 +132,18 @@ def test_fake_conn_pool_enable_instance(client, loop): background_task=True, ) @background_task() -def test_fake_conn_pool_disable_instance(client, loop): +def test_fake_conn_pool_disable_instance(client, loop, monkeypatch): # Get a real connection - conn = loop.run_until_complete(client.connection_pool.get_connection("GET")) + conn = getattr(client, "_pool_or_conn", None) + if conn is None: + conn = loop.run_until_complete(client.connection_pool.get_connection("GET")) # Replace the original connection pool with one that doesn't # have the `connection_kwargs` attribute. fake_pool = FakeConnectionPool(conn) - client.connection_pool = fake_pool + monkeypatch.setattr(client, "connection_pool", fake_pool, raising=False) + monkeypatch.setattr(client, "_pool_or_conn", fake_pool, raising=False) assert not hasattr(client.connection_pool, "connection_kwargs") loop.run_until_complete(exercise_redis(client)) diff --git a/tests/datastore_aioredis/test_execute_command.py b/tests/datastore_aioredis/test_execute_command.py index 690007d6c..bbc8b2d4f 100644 --- a/tests/datastore_aioredis/test_execute_command.py +++ b/tests/datastore_aioredis/test_execute_command.py @@ -13,11 +13,10 @@ # limitations under the License. import pytest -import aioredis from newrelic.api.background_task import background_task from testing_support.fixtures import validate_transaction_metrics, override_application_settings -from conftest import event_loop, loop, AIOREDIS_VERSION +from conftest import AIOREDIS_VERSION from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname @@ -70,19 +69,7 @@ async def exercise_redis_single_arg(client): await client.execute_command("CLIENT LIST") -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=_port, db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=_port, db=0), - ] -else: - clients = [ - event_loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], _port), db=0)), - ] - - @SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( "test_execute_command:test_redis_execute_command_as_one_arg_enable", @@ -96,7 +83,6 @@ def test_redis_execute_command_as_one_arg_enable(client, loop): @SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( "test_execute_command:test_redis_execute_command_as_one_arg_disable", @@ -109,7 +95,6 @@ def test_redis_execute_command_as_one_arg_disable(client, loop): loop.run_until_complete(exercise_redis_single_arg(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( "test_execute_command:test_redis_execute_command_as_two_args_enable", @@ -122,7 +107,6 @@ def test_redis_execute_command_as_two_args_enable(client, loop): loop.run_until_complete(exercise_redis_multi_args(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( "test_execute_command:test_redis_execute_command_as_two_args_disable", diff --git a/tests/datastore_aioredis/test_get_and_set.py b/tests/datastore_aioredis/test_get_and_set.py index b363f14d5..a446d5f6c 100644 --- a/tests/datastore_aioredis/test_get_and_set.py +++ b/tests/datastore_aioredis/test_get_and_set.py @@ -12,12 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -import aioredis - from newrelic.api.background_task import background_task -from conftest import event_loop, loop, AIOREDIS_VERSION from testing_support.fixtures import validate_transaction_metrics, override_application_settings from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname @@ -64,23 +60,11 @@ _disable_rollup_metrics.append((_instance_metric_name, None)) -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=_port, db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=_port, db=0), - ] -else: - clients = [ - event_loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], _port), db=0)), - ] - - async def exercise_redis(client): await client.set("key", "value") await client.get("key") -@pytest.mark.parametrize("client", clients) @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( "test_get_and_set:test_redis_client_operation_enable_instance", @@ -93,7 +77,6 @@ def test_redis_client_operation_enable_instance(client, loop): loop.run_until_complete(exercise_redis(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( "test_get_and_set:test_redis_client_operation_disable_instance", diff --git a/tests/datastore_aioredis/test_instance_info.py b/tests/datastore_aioredis/test_instance_info.py index ffb5ab31d..4bb744149 100644 --- a/tests/datastore_aioredis/test_instance_info.py +++ b/tests/datastore_aioredis/test_instance_info.py @@ -17,7 +17,7 @@ import aioredis from newrelic.hooks.datastore_aioredis import _conn_attrs_to_dict, _instance_info -from conftest import event_loop, loop, AIOREDIS_VERSION +from conftest import AIOREDIS_VERSION, SKIPIF_AIOREDIS_V1 _instance_info_tests = [ ({}, ("localhost", "6379", "0")), @@ -29,8 +29,6 @@ ] -SKIP_IF_AIOREDIS_V1 = pytest.mark.skipif(AIOREDIS_VERSION < (2, 0), reason="Single arg commands not supported.") - if AIOREDIS_VERSION >= (2, 0): clients = [aioredis.Redis, aioredis.StrictRedis] class DisabledConnection(aioredis.Connection): @@ -48,22 +46,22 @@ class DisabledUnixConnection(aioredis.UnixDomainSocketConnection, DisabledConnec -@SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) +@SKIPIF_AIOREDIS_V1 +@pytest.mark.parametrize("client_cls", clients) @pytest.mark.parametrize("kwargs,expected", _instance_info_tests) -def test_strict_redis_client_instance_info(client, kwargs, expected, loop): - r = client(**kwargs) +def test_strict_redis_client_instance_info(client_cls, kwargs, expected, loop): + r = client_cls(**kwargs) if isawaitable(r): r = loop.run_until_complete(r) conn_kwargs = r.connection_pool.connection_kwargs assert _instance_info(conn_kwargs) == expected -@SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) +@SKIPIF_AIOREDIS_V1 +@pytest.mark.parametrize("client_cls", clients) @pytest.mark.parametrize("kwargs,expected", _instance_info_tests) -def test_strict_redis_connection_instance_info(client, kwargs, expected, loop): - r = client(**kwargs) +def test_strict_redis_connection_instance_info(client_cls, kwargs, expected, loop): + r = client_cls(**kwargs) if isawaitable(r): r = loop.run_until_complete(r) r.connection_pool.connection_class = DisabledConnection @@ -72,7 +70,7 @@ def test_strict_redis_connection_instance_info(client, kwargs, expected, loop): conn_kwargs = _conn_attrs_to_dict(connection) assert _instance_info(conn_kwargs) == expected finally: - r.connection_pool.release(connection) + loop.run_until_complete(r.connection_pool.release(connection)) _instance_info_from_url_tests = [ @@ -98,20 +96,20 @@ def test_strict_redis_connection_instance_info(client, kwargs, expected, loop): ] -@SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) +@SKIPIF_AIOREDIS_V1 +@pytest.mark.parametrize("client_cls", clients) @pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) -def test_strict_redis_client_from_url(client, args, kwargs, expected): - r = client.from_url(*args, **kwargs) +def test_strict_redis_client_from_url(client_cls, args, kwargs, expected): + r = client_cls.from_url(*args, **kwargs) conn_kwargs = r.connection_pool.connection_kwargs assert _instance_info(conn_kwargs) == expected -@SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) +@SKIPIF_AIOREDIS_V1 +@pytest.mark.parametrize("client_cls", clients) @pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) -def test_strict_redis_connection_from_url(client, args, kwargs, expected, loop): - r = client.from_url(*args, **kwargs) +def test_strict_redis_connection_from_url(client_cls, args, kwargs, expected, loop): + r = client_cls.from_url(*args, **kwargs) if r.connection_pool.connection_class in (aioredis.Connection, aioredis.connection.SSLConnection): r.connection_pool.connection_class = DisabledConnection elif r.connection_pool.connection_class is aioredis.UnixDomainSocketConnection: @@ -124,4 +122,4 @@ def test_strict_redis_connection_from_url(client, args, kwargs, expected, loop): conn_kwargs = _conn_attrs_to_dict(connection) assert _instance_info(conn_kwargs) == expected finally: - r.connection_pool.release(connection) + loop.run_until_complete(r.connection_pool.release(connection)) diff --git a/tests/datastore_aioredis/test_multiple_dbs.py b/tests/datastore_aioredis/test_multiple_dbs.py index 248fb847d..cb817c9f8 100644 --- a/tests/datastore_aioredis/test_multiple_dbs.py +++ b/tests/datastore_aioredis/test_multiple_dbs.py @@ -17,7 +17,7 @@ from newrelic.api.background_task import background_task from testing_support.fixtures import validate_transaction_metrics, override_application_settings -from conftest import event_loop, loop, AIOREDIS_VERSION +from conftest import AIOREDIS_VERSION from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname @@ -100,30 +100,38 @@ ] ) - if AIOREDIS_VERSION >= (2, 0): - client_set = [ - ( - aioredis.Redis(host=DB_SETTINGS[0]["host"], port=DB_SETTINGS[0]["port"], db=0), - aioredis.Redis(host=DB_SETTINGS[1]["host"], port=DB_SETTINGS[1]["port"], db=0), - ), - ( - aioredis.StrictRedis(host=DB_SETTINGS[0]["host"], port=DB_SETTINGS[0]["port"], db=0), - aioredis.StrictRedis(host=DB_SETTINGS[1]["host"], port=DB_SETTINGS[1]["port"], db=0), - ), - ] - else: - client_set = [ - ( - event_loop.run_until_complete( - aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS[0]["host"], DB_SETTINGS[0]["port"]), db=0) - ), - event_loop.run_until_complete( - aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS[1]["host"], DB_SETTINGS[1]["port"]), db=0) - ), - ) - ] -else: - client_set = [] + +@pytest.fixture(params=("Redis", "StrictRedis")) +def client_set(request, loop): + if len(DB_SETTINGS) > 1: + if AIOREDIS_VERSION >= (2, 0): + if request.param == "Redis": + return ( + aioredis.Redis(host=DB_SETTINGS[0]["host"], port=DB_SETTINGS[0]["port"], db=0), + aioredis.Redis(host=DB_SETTINGS[1]["host"], port=DB_SETTINGS[1]["port"], db=0), + ) + elif request.param == "StrictRedis": + return ( + aioredis.StrictRedis(host=DB_SETTINGS[0]["host"], port=DB_SETTINGS[0]["port"], db=0), + aioredis.StrictRedis(host=DB_SETTINGS[1]["host"], port=DB_SETTINGS[1]["port"], db=0), + ) + else: + raise NotImplementedError() + else: + if request.param == "Redis": + return ( + loop.run_until_complete( + aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS[0]["host"], DB_SETTINGS[0]["port"]), db=0) + ), + loop.run_until_complete( + aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS[1]["host"], DB_SETTINGS[1]["port"]), db=0) + ), + ) + elif request.param == "StrictRedis": + pytest.skip("StrictRedis not implemented.") + else: + raise NotImplementedError() + async def exercise_redis(client_1, client_2): @@ -137,7 +145,6 @@ async def exercise_redis(client_1, client_2): @pytest.mark.skipif(len(DB_SETTINGS) < 2, reason="Env not configured with multiple databases") -@pytest.mark.parametrize("client_set", client_set) @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( "test_multiple_dbs:test_multiple_datastores_enabled", @@ -151,7 +158,6 @@ def test_multiple_datastores_enabled(client_set, loop): @pytest.mark.skipif(len(DB_SETTINGS) < 2, reason="Env not configured with multiple databases") -@pytest.mark.parametrize("client_set", client_set) @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( "test_multiple_dbs:test_multiple_datastores_disabled", @@ -165,7 +171,6 @@ def test_multiple_datastores_disabled(client_set, loop): @pytest.mark.skipif(len(DB_SETTINGS) < 2, reason="Env not configured with multiple databases") -@pytest.mark.parametrize("client_set", client_set) @validate_transaction_metrics( "test_multiple_dbs:test_concurrent_calls", scoped_metrics=_concurrent_scoped_metrics, diff --git a/tests/datastore_aioredis/test_span_event.py b/tests/datastore_aioredis/test_span_event.py index 0ab8923ca..1c9227e54 100644 --- a/tests/datastore_aioredis/test_span_event.py +++ b/tests/datastore_aioredis/test_span_event.py @@ -13,13 +13,11 @@ # limitations under the License. import pytest -import aioredis from newrelic.api.transaction import current_transaction from newrelic.api.background_task import background_task from testing_support.db_settings import redis_settings -from conftest import event_loop, loop, AIOREDIS_VERSION from testing_support.fixtures import override_application_settings from testing_support.validators.validate_span_events import validate_span_events from testing_support.util import instance_hostname @@ -40,19 +38,6 @@ "span_events.enabled": True, } -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - ] -else: - clients = [ - event_loop.run_until_complete( - aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0) - ), - ] - - async def _exercise_db(client): await client.set("key", "value") await client.get("key") @@ -63,7 +48,6 @@ async def _exercise_db(client): await client.execute("CLIENT", "LIST") -@pytest.mark.parametrize("client", clients) @pytest.mark.parametrize("db_instance_enabled", (True, False)) @pytest.mark.parametrize("instance_enabled", (True, False)) def test_span_events(client, instance_enabled, db_instance_enabled, loop): diff --git a/tests/datastore_aioredis/test_trace_node.py b/tests/datastore_aioredis/test_trace_node.py index fb1ac8545..e4fa1e3ba 100644 --- a/tests/datastore_aioredis/test_trace_node.py +++ b/tests/datastore_aioredis/test_trace_node.py @@ -12,10 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import aioredis -import pytest - -from conftest import event_loop, loop, AIOREDIS_VERSION from testing_support.fixtures import validate_tt_collector_json, override_application_settings from testing_support.util import instance_hostname from testing_support.db_settings import redis_settings @@ -73,18 +69,6 @@ "port_path_or_id": "VALUE NOT USED", } -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - ] -else: - clients = [ - event_loop.run_until_complete( - aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0) - ), - ] - async def exercise_redis(client): await client.set("key", "value") @@ -96,7 +80,6 @@ async def exercise_redis(client): await client.execute("CLIENT", "LIST") -@pytest.mark.parametrize("client", clients) @override_application_settings(_enable_instance_settings) @validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @background_task() @@ -104,7 +87,6 @@ def test_trace_node_datastore_params_enable_instance(client, loop): loop.run_until_complete(exercise_redis(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_disable_instance_settings) @validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @background_task() @@ -112,7 +94,6 @@ def test_trace_node_datastore_params_disable_instance(client, loop): loop.run_until_complete(exercise_redis(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_instance_only_settings) @validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) @background_task() @@ -120,7 +101,6 @@ def test_trace_node_datastore_params_instance_only(client, loop): loop.run_until_complete(exercise_redis(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_database_only_settings) @validate_tt_collector_json(datastore_params=_database_only_required, datastore_forgone_params=_database_only_forgone) @background_task() diff --git a/tests/datastore_aioredis/test_transactions.py b/tests/datastore_aioredis/test_transactions.py new file mode 100644 index 000000000..168de008b --- /dev/null +++ b/tests/datastore_aioredis/test_transactions.py @@ -0,0 +1,134 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from newrelic.api.background_task import background_task +from testing_support.fixtures import validate_transaction_errors + +from conftest import SKIPIF_AIOREDIS_V1, SKIPIF_AIOREDIS_V2, AIOREDIS_VERSION + + +@background_task() +@pytest.mark.parametrize("in_transaction", (True, False)) +def test_pipelines_no_harm(client, in_transaction, loop): + async def exercise(): + if AIOREDIS_VERSION >= (2,): + pipe = client.pipeline(transaction=in_transaction) + else: + pipe = client.pipeline() # Transaction kwarg unsupported + + pipe.set("TXN", 1) + return await pipe.execute() + + status = loop.run_until_complete(exercise()) + assert status == [True] + + +def exercise_transaction_sync(pipe): + pipe.set("TXN", 1) + + +async def exercise_transaction_async(pipe): + await pipe.set("TXN", 1) + + +@SKIPIF_AIOREDIS_V1 +@pytest.mark.parametrize("exercise", (exercise_transaction_sync, exercise_transaction_async)) +@background_task() +def test_transactions_no_harm(client, loop, exercise): + status = loop.run_until_complete(client.transaction(exercise)) + assert status == [True] + + +@SKIPIF_AIOREDIS_V2 +@background_task() +def test_multi_exec_no_harm(client, loop): + async def exercise(): + pipe = client.multi_exec() + pipe.set("key", "value") + status = await pipe.execute() + assert status == [True] + + loop.run_until_complete(exercise()) + + +@SKIPIF_AIOREDIS_V1 +@background_task() +def test_pipeline_immediate_execution_no_harm(client, loop): + key = "TXN_WATCH" + async def exercise(): + await client.set(key, 1) + + if AIOREDIS_VERSION >= (2,): + pipe = client.pipeline(transaction=True) + else: + pipe = client.pipeline() # Transaction kwarg unsupported + + async with pipe: + await pipe.watch(key) + value = int(await pipe.get(key)) + assert value == 1 + value += 1 + pipe.multi() + pipe.set(key, value) + await pipe.execute() + + assert int(await client.get(key)) == 2 + + loop.run_until_complete(exercise()) + + +@SKIPIF_AIOREDIS_V1 +@background_task() +def test_transaction_immediate_execution_no_harm(client, loop): + key = "TXN_WATCH" + async def exercise(): + async def exercise_transaction(pipe): + value = int(await pipe.get(key)) + assert value == 1 + value += 1 + pipe.multi() + pipe.set(key, value) + await pipe.execute() + + await client.set(key, 1) + status = await client.transaction(exercise_transaction, key) + assert int(await client.get(key)) == 2 + + return status + + status = loop.run_until_complete(exercise()) + assert status == [] + + +@SKIPIF_AIOREDIS_V1 +@validate_transaction_errors([]) +@background_task() +def test_transaction_watch_error_no_harm(client, loop): + key = "TXN_WATCH" + async def exercise(): + async def exercise_transaction(pipe): + value = int(await pipe.get(key)) + if value == 1: + # Only run set the first pass, as this runs repeatedly until no watch error is raised. + await pipe.set(key, 2) + + await client.set(key, 1) + status = await client.transaction(exercise_transaction, key) + + return status + + status = loop.run_until_complete(exercise()) + assert status == [] diff --git a/tests/datastore_aioredis/test_uninstrumented_methods.py b/tests/datastore_aioredis/test_uninstrumented_methods.py index 43a2fe179..f1b36b1ca 100644 --- a/tests/datastore_aioredis/test_uninstrumented_methods.py +++ b/tests/datastore_aioredis/test_uninstrumented_methods.py @@ -12,27 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -import aioredis - -from conftest import event_loop, loop, AIOREDIS_VERSION - -from testing_support.db_settings import redis_settings - -DB_SETTINGS = redis_settings()[0] - -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - ] -else: - clients = [ - event_loop.run_until_complete( - aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0) - ), - ] - IGNORED_METHODS = { "address", @@ -83,7 +62,6 @@ } -@pytest.mark.parametrize("client", clients) def test_uninstrumented_methods(client): methods = {m for m in dir(client) if not m[0] == "_"} is_wrapped = lambda m: hasattr(getattr(client, m), "__wrapped__") diff --git a/tests/datastore_mysql/test_database.py b/tests/datastore_mysql/test_database.py index 06bdaba5d..0991d6df0 100644 --- a/tests/datastore_mysql/test_database.py +++ b/tests/datastore_mysql/test_database.py @@ -25,8 +25,14 @@ DB_NAMESPACE = DB_SETTINGS["namespace"] DB_PROCEDURE = "hello_" + DB_NAMESPACE +mysql_version = tuple(int(x) for x in mysql.connector.__version__.split(".")[:3]) +if mysql_version >= (8, 0, 30): + _connector_metric_name = 'Function/mysql.connector.pooling:connect' +else: + _connector_metric_name = 'Function/mysql.connector:connect' + _test_execute_via_cursor_scoped_metrics = [ - ('Function/mysql.connector:connect', 1), + (_connector_metric_name, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/select' % DB_NAMESPACE, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/insert' % DB_NAMESPACE, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/update' % DB_NAMESPACE, 1), @@ -103,7 +109,7 @@ def test_execute_via_cursor(table_name): connection.commit() _test_connect_using_alias_scoped_metrics = [ - ('Function/mysql.connector:connect', 1), + (_connector_metric_name, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/select' % DB_NAMESPACE, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/insert' % DB_NAMESPACE, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/update' % DB_NAMESPACE, 1), diff --git a/tests/datastore_pymongo/conftest.py b/tests/datastore_pymongo/conftest.py index 518b19325..8d279f2e2 100644 --- a/tests/datastore_pymongo/conftest.py +++ b/tests/datastore_pymongo/conftest.py @@ -12,26 +12,28 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest - -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) _coverage_source = [ - 'newrelic.hooks.datastore_pymongo', + "newrelic.hooks.datastore_pymongo", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (datastore_pymongo)', - default_settings=_default_settings, - linked_applications=['Python Agent Test (datastore)']) + app_name="Python Agent Test (datastore_pymongo)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (datastore)"], +) diff --git a/tests/datastore_pymongo/test_pymongo.py b/tests/datastore_pymongo/test_pymongo.py index fa464fa12..09ea62e0b 100644 --- a/tests/datastore_pymongo/test_pymongo.py +++ b/tests/datastore_pymongo/test_pymongo.py @@ -12,17 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pymongo -import pytest import sqlite3 -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors) +import pymongo from testing_support.db_settings import mongodb_settings -from testing_support.validators.validate_database_duration import validate_database_duration +from testing_support.fixtures import ( + validate_transaction_errors, + validate_transaction_metrics, +) +from testing_support.validators.validate_database_duration import ( + validate_database_duration, +) from newrelic.api.background_task import background_task - +from newrelic.packages import six DB_SETTINGS = mongodb_settings()[0] MONGODB_HOST = DB_SETTINGS["host"] @@ -30,10 +33,11 @@ MONGODB_COLLECTION = DB_SETTINGS["collection"] -def _exercise_mongo(db): +def _exercise_mongo_v3(db): db[MONGODB_COLLECTION].save({"x": 10}) db[MONGODB_COLLECTION].save({"x": 8}) db[MONGODB_COLLECTION].save({"x": 11}) + db[MONGODB_COLLECTION].find_one() for item in db[MONGODB_COLLECTION].find(): @@ -46,163 +50,243 @@ def _exercise_mongo(db): [item["x"] for item in db[MONGODB_COLLECTION].find().limit(2).skip(1)] - if pymongo.version_tuple >= (3, 0): - db[MONGODB_COLLECTION].initialize_unordered_bulk_op() - db[MONGODB_COLLECTION].initialize_ordered_bulk_op() - db[MONGODB_COLLECTION].bulk_write([pymongo.InsertOne({'x': 1})]) - db[MONGODB_COLLECTION].insert_one({'x': 300}) - db[MONGODB_COLLECTION].insert_many([{'x': 1} for i in range(20, 25)]) - db[MONGODB_COLLECTION].replace_one({'x': 1}, {'x': 2}) - db[MONGODB_COLLECTION].update_one({'x': 1}, {'$inc': {'x': 3}}) - db[MONGODB_COLLECTION].update_many({'x': 1}, {'$inc': {'x': 3}}) - db[MONGODB_COLLECTION].delete_one({'x': 4}) - db[MONGODB_COLLECTION].delete_many({'x': 4}) - db[MONGODB_COLLECTION].find_raw_batches() - db[MONGODB_COLLECTION].parallel_scan(1) - db[MONGODB_COLLECTION].create_indexes( - [pymongo.IndexModel([('x', pymongo.DESCENDING)])]) - db[MONGODB_COLLECTION].list_indexes() - db[MONGODB_COLLECTION].aggregate([]) - db[MONGODB_COLLECTION].aggregate_raw_batches([]) - db[MONGODB_COLLECTION].find_one_and_delete({'x': 10}) - db[MONGODB_COLLECTION].find_one_and_replace({'x': 300}, {'x': 301}) - db[MONGODB_COLLECTION].find_one_and_update({'x': 301}, {'$inc': {'x': 300}}) + db[MONGODB_COLLECTION].initialize_unordered_bulk_op() + db[MONGODB_COLLECTION].initialize_ordered_bulk_op() + db[MONGODB_COLLECTION].parallel_scan(1) + + db[MONGODB_COLLECTION].bulk_write([pymongo.InsertOne({"x": 1})]) + db[MONGODB_COLLECTION].insert_one({"x": 300}) + db[MONGODB_COLLECTION].insert_many([{"x": 1} for i in range(20, 25)]) + db[MONGODB_COLLECTION].replace_one({"x": 1}, {"x": 2}) + db[MONGODB_COLLECTION].update_one({"x": 1}, {"$inc": {"x": 3}}) + db[MONGODB_COLLECTION].update_many({"x": 1}, {"$inc": {"x": 3}}) + db[MONGODB_COLLECTION].delete_one({"x": 4}) + db[MONGODB_COLLECTION].delete_many({"x": 4}) + db[MONGODB_COLLECTION].find_raw_batches() + db[MONGODB_COLLECTION].create_indexes([pymongo.IndexModel([("x", pymongo.DESCENDING)])]) + db[MONGODB_COLLECTION].list_indexes() + db[MONGODB_COLLECTION].aggregate([]) + db[MONGODB_COLLECTION].aggregate_raw_batches([]) + db[MONGODB_COLLECTION].find_one_and_delete({"x": 10}) + db[MONGODB_COLLECTION].find_one_and_replace({"x": 300}, {"x": 301}) + db[MONGODB_COLLECTION].find_one_and_update({"x": 301}, {"$inc": {"x": 300}}) + + +def _exercise_mongo_v4(db): + db[MONGODB_COLLECTION].insert_one({"x": 10}) + db[MONGODB_COLLECTION].insert_one({"x": 8}) + db[MONGODB_COLLECTION].insert_one({"x": 11}) + db[MONGODB_COLLECTION].find_one() -# Common Metrics for tests that use _exercise_mongo(). + for item in db[MONGODB_COLLECTION].find(): + item["x"] -_all_count = 9 -if pymongo.version_tuple >= (3, 0): - _all_count += 19 - -_test_pymongo_scoped_metrics = [ - ('Datastore/statement/MongoDB/%s/save' % MONGODB_COLLECTION, 3), - ('Datastore/statement/MongoDB/%s/create_index' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find' % MONGODB_COLLECTION, 3), - ('Datastore/statement/MongoDB/%s/find_one' % MONGODB_COLLECTION, 1)] - -_test_pymongo_rollup_metrics = [ - ('Datastore/all', _all_count), - ('Datastore/allOther', _all_count), - ('Datastore/MongoDB/all', _all_count), - ('Datastore/MongoDB/allOther', _all_count), - ('Datastore/operation/MongoDB/save', 3), - ('Datastore/operation/MongoDB/create_index', 1), - ('Datastore/operation/MongoDB/find', 3), - ('Datastore/operation/MongoDB/find_one', 1), - ('Datastore/statement/MongoDB/%s/save' % MONGODB_COLLECTION, 3), - ('Datastore/statement/MongoDB/%s/create_index' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find' % MONGODB_COLLECTION, 3), - ('Datastore/statement/MongoDB/%s/find_one' % MONGODB_COLLECTION, 1)] - -if pymongo.version_tuple >= (3, 0): - _test_pymongo_scoped_metrics.extend([ - (('Datastore/statement/MongoDB/%s' % MONGODB_COLLECTION + - '/initialize_unordered_bulk_op'), 1), - (('Datastore/statement/MongoDB/%s' % MONGODB_COLLECTION + - '/initialize_ordered_bulk_op'), 1), - ('Datastore/statement/MongoDB/%s/bulk_write' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/insert_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/insert_many' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/replace_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/update_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/delete_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/delete_many' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_raw_batches' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/parallel_scan' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/create_indexes' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/list_indexes' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/aggregate' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/aggregate_raw_batches' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_delete' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_replace' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_update' % MONGODB_COLLECTION, 1), - ]) - _test_pymongo_rollup_metrics.extend([ - ('Datastore/operation/MongoDB/initialize_unordered_bulk_op', 1), - ('Datastore/operation/MongoDB/initialize_ordered_bulk_op', 1), - ('Datastore/operation/MongoDB/bulk_write', 1), - ('Datastore/operation/MongoDB/insert_one', 1), - ('Datastore/operation/MongoDB/insert_many', 1), - ('Datastore/operation/MongoDB/replace_one', 1), - ('Datastore/operation/MongoDB/update_one', 1), - ('Datastore/operation/MongoDB/delete_one', 1), - ('Datastore/operation/MongoDB/delete_many', 1), - ('Datastore/operation/MongoDB/find_raw_batches', 1), - ('Datastore/operation/MongoDB/parallel_scan', 1), - ('Datastore/operation/MongoDB/create_indexes', 1), - ('Datastore/operation/MongoDB/list_indexes', 1), - ('Datastore/operation/MongoDB/aggregate', 1), - ('Datastore/operation/MongoDB/aggregate_raw_batches', 1), - ('Datastore/operation/MongoDB/find_one_and_delete', 1), - ('Datastore/operation/MongoDB/find_one_and_replace', 1), - ('Datastore/operation/MongoDB/find_one_and_update', 1), - (('Datastore/statement/MongoDB/%s' % MONGODB_COLLECTION + - '/initialize_unordered_bulk_op'), 1), - (('Datastore/statement/MongoDB/%s' % MONGODB_COLLECTION + - '/initialize_ordered_bulk_op'), 1), - ('Datastore/statement/MongoDB/%s/bulk_write' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/insert_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/insert_many' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/replace_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/update_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/delete_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/delete_many' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_raw_batches' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/parallel_scan' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/create_indexes' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/list_indexes' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/aggregate' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/aggregate_raw_batches' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_delete' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_replace' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_update' % MONGODB_COLLECTION, 1), - ]) - -# Add Connection metric - -_test_pymongo_connection_scoped_metrics = (_test_pymongo_scoped_metrics + - [('Function/pymongo.connection:Connection.__init__', 1)]) - -_test_pymongo_connection_rollup_metrics = (_test_pymongo_rollup_metrics + - [('Function/pymongo.connection:Connection.__init__', 1)]) - - -@pytest.mark.skipif(pymongo.version_tuple >= (3, 0), - reason='PyMongo version does not have pymongo.Connection.') -@validate_transaction_errors(errors=[]) -@validate_transaction_metrics( - 'test_pymongo:test_mongodb_connection_operation', - scoped_metrics=_test_pymongo_connection_scoped_metrics, - rollup_metrics=_test_pymongo_connection_rollup_metrics, - background_task=True) -@background_task() -def test_mongodb_connection_operation(): - connection = pymongo.Connection(MONGODB_HOST, MONGODB_PORT) - db = connection.test - _exercise_mongo(db) + db[MONGODB_COLLECTION].create_index("x") + for item in db[MONGODB_COLLECTION].find().sort("x", pymongo.ASCENDING): + item["x"] -# Add MongoClient metric + [item["x"] for item in db[MONGODB_COLLECTION].find().limit(2).skip(1)] -_test_pymongo_mongo_client_scoped_metrics = (_test_pymongo_scoped_metrics + - [('Function/pymongo.mongo_client:MongoClient.__init__', 1)]) + db[MONGODB_COLLECTION].bulk_write([pymongo.InsertOne({"x": 1})]) + db[MONGODB_COLLECTION].insert_one({"x": 300}) + db[MONGODB_COLLECTION].insert_many([{"x": 1} for i in range(20, 25)]) + db[MONGODB_COLLECTION].replace_one({"x": 1}, {"x": 2}) + db[MONGODB_COLLECTION].update_one({"x": 1}, {"$inc": {"x": 3}}) + db[MONGODB_COLLECTION].update_many({"x": 1}, {"$inc": {"x": 3}}) + db[MONGODB_COLLECTION].delete_one({"x": 4}) + db[MONGODB_COLLECTION].delete_many({"x": 4}) + db[MONGODB_COLLECTION].find_raw_batches() + db[MONGODB_COLLECTION].create_indexes([pymongo.IndexModel([("x", pymongo.DESCENDING)])]) + db[MONGODB_COLLECTION].list_indexes() + db[MONGODB_COLLECTION].aggregate([]) + db[MONGODB_COLLECTION].aggregate_raw_batches([]) + db[MONGODB_COLLECTION].find_one_and_delete({"x": 10}) + db[MONGODB_COLLECTION].find_one_and_replace({"x": 300}, {"x": 301}) + db[MONGODB_COLLECTION].find_one_and_update({"x": 301}, {"$inc": {"x": 300}}) -_test_pymongo_mongo_client_rollup_metrics = (_test_pymongo_rollup_metrics + - [('Function/pymongo.mongo_client:MongoClient.__init__', 1)]) +def _exercise_mongo(db): + if pymongo.version_tuple < (4, 0): + _exercise_mongo_v3(db) + else: + _exercise_mongo_v4(db) -@validate_transaction_errors(errors=[]) -@validate_transaction_metrics( - 'test_pymongo:test_mongodb_mongo_client_operation', - scoped_metrics=_test_pymongo_mongo_client_scoped_metrics, - rollup_metrics=_test_pymongo_mongo_client_rollup_metrics, - background_task=True) -@background_task() -def test_mongodb_mongo_client_operation(): - client = pymongo.MongoClient(MONGODB_HOST, MONGODB_PORT) - db = client.test - _exercise_mongo(db) + +# Common Metrics for tests that use _exercise_mongo(). + + +_test_pymongo_scoped_metrics_v3 = [ + ("Function/pymongo.mongo_client:MongoClient.__init__", 1), + ("Datastore/statement/MongoDB/%s/create_index" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find" % MONGODB_COLLECTION, 3), + ("Datastore/statement/MongoDB/%s/find_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/save" % MONGODB_COLLECTION, 3), + ("Datastore/statement/MongoDB/%s" % MONGODB_COLLECTION + "/initialize_unordered_bulk_op", 1), + ("Datastore/statement/MongoDB/%s" % MONGODB_COLLECTION + "/initialize_ordered_bulk_op", 1), + ("Datastore/statement/MongoDB/%s/parallel_scan" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/bulk_write" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/replace_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/update_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/create_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/list_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_delete" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_replace" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_update" % MONGODB_COLLECTION, 1), +] + + +_test_pymongo_scoped_metrics_v4 = [ + ("Function/pymongo.mongo_client:MongoClient.__init__", 1), + ("Datastore/statement/MongoDB/%s/create_index" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find" % MONGODB_COLLECTION, 3), + ("Datastore/statement/MongoDB/%s/find_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_one" % MONGODB_COLLECTION, 4), + ("Datastore/statement/MongoDB/%s/bulk_write" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/replace_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/update_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/create_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/list_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_delete" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_replace" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_update" % MONGODB_COLLECTION, 1), +] + +_test_pymongo_rollup_metrics_v3 = [ + ("Function/pymongo.mongo_client:MongoClient.__init__", 1), + ("Datastore/all", 28), + ("Datastore/allOther", 28), + ("Datastore/MongoDB/all", 28), + ("Datastore/MongoDB/allOther", 28), + ("Datastore/operation/MongoDB/create_index", 1), + ("Datastore/operation/MongoDB/find", 3), + ("Datastore/operation/MongoDB/find_one", 1), + ("Datastore/statement/MongoDB/%s/create_index" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find" % MONGODB_COLLECTION, 3), + ("Datastore/statement/MongoDB/%s/find_one" % MONGODB_COLLECTION, 1), + ("Datastore/operation/MongoDB/save", 3), + ("Datastore/operation/MongoDB/initialize_unordered_bulk_op", 1), + ("Datastore/operation/MongoDB/initialize_ordered_bulk_op", 1), + ("Datastore/operation/MongoDB/parallel_scan", 1), + ("Datastore/statement/MongoDB/%s/save" % MONGODB_COLLECTION, 3), + (("Datastore/statement/MongoDB/%s" % MONGODB_COLLECTION + "/initialize_unordered_bulk_op"), 1), + (("Datastore/statement/MongoDB/%s" % MONGODB_COLLECTION + "/initialize_ordered_bulk_op"), 1), + ("Datastore/statement/MongoDB/%s/parallel_scan" % MONGODB_COLLECTION, 1), + ("Datastore/operation/MongoDB/bulk_write", 1), + ("Datastore/operation/MongoDB/insert_one", 1), + ("Datastore/operation/MongoDB/insert_many", 1), + ("Datastore/operation/MongoDB/replace_one", 1), + ("Datastore/operation/MongoDB/update_one", 1), + ("Datastore/operation/MongoDB/delete_one", 1), + ("Datastore/operation/MongoDB/delete_many", 1), + ("Datastore/operation/MongoDB/find_raw_batches", 1), + ("Datastore/operation/MongoDB/create_indexes", 1), + ("Datastore/operation/MongoDB/list_indexes", 1), + ("Datastore/operation/MongoDB/aggregate", 1), + ("Datastore/operation/MongoDB/aggregate_raw_batches", 1), + ("Datastore/operation/MongoDB/find_one_and_delete", 1), + ("Datastore/operation/MongoDB/find_one_and_replace", 1), + ("Datastore/operation/MongoDB/find_one_and_update", 1), + ("Datastore/statement/MongoDB/%s/bulk_write" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/replace_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/update_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/create_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/list_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_delete" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_replace" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_update" % MONGODB_COLLECTION, 1), +] + +_test_pymongo_rollup_metrics_v4 = [ + ("Function/pymongo.mongo_client:MongoClient.__init__", 1), + ("Datastore/all", 25), + ("Datastore/allOther", 25), + ("Datastore/MongoDB/all", 25), + ("Datastore/MongoDB/allOther", 25), + ("Datastore/operation/MongoDB/create_index", 1), + ("Datastore/operation/MongoDB/find", 3), + ("Datastore/operation/MongoDB/find_one", 1), + ("Datastore/statement/MongoDB/%s/create_index" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find" % MONGODB_COLLECTION, 3), + ("Datastore/statement/MongoDB/%s/find_one" % MONGODB_COLLECTION, 1), + ("Datastore/operation/MongoDB/bulk_write", 1), + ("Datastore/operation/MongoDB/insert_one", 4), + ("Datastore/operation/MongoDB/insert_many", 1), + ("Datastore/operation/MongoDB/replace_one", 1), + ("Datastore/operation/MongoDB/update_one", 1), + ("Datastore/operation/MongoDB/delete_one", 1), + ("Datastore/operation/MongoDB/delete_many", 1), + ("Datastore/operation/MongoDB/find_raw_batches", 1), + ("Datastore/operation/MongoDB/create_indexes", 1), + ("Datastore/operation/MongoDB/list_indexes", 1), + ("Datastore/operation/MongoDB/aggregate", 1), + ("Datastore/operation/MongoDB/aggregate_raw_batches", 1), + ("Datastore/operation/MongoDB/find_one_and_delete", 1), + ("Datastore/operation/MongoDB/find_one_and_replace", 1), + ("Datastore/operation/MongoDB/find_one_and_update", 1), + ("Datastore/statement/MongoDB/%s/bulk_write" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_one" % MONGODB_COLLECTION, 4), + ("Datastore/statement/MongoDB/%s/insert_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/replace_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/update_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/create_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/list_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_delete" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_replace" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_update" % MONGODB_COLLECTION, 1), +] + + +def test_mongodb_client_operation(): + if pymongo.version_tuple < (4, 0): + _test_pymongo_client_scoped_metrics = _test_pymongo_scoped_metrics_v3 + _test_pymongo_client_rollup_metrics = _test_pymongo_rollup_metrics_v3 + else: + _test_pymongo_client_scoped_metrics = _test_pymongo_scoped_metrics_v4 + _test_pymongo_client_rollup_metrics = _test_pymongo_rollup_metrics_v4 + + txn_name = "test_pymongo:test_mongodb_client_operation.._test" if six.PY3 else "test_pymongo:_test" + + @validate_transaction_errors(errors=[]) + @validate_transaction_metrics( + txn_name, + scoped_metrics=_test_pymongo_client_scoped_metrics, + rollup_metrics=_test_pymongo_client_rollup_metrics, + background_task=True, + ) + @background_task() + def _test(): + client = pymongo.MongoClient(MONGODB_HOST, MONGODB_PORT) + db = client.test + _exercise_mongo(db) + + _test() @validate_database_duration() diff --git a/tests/framework_aiohttp/_target_application.py b/tests/framework_aiohttp/_target_application.py index 207c75486..895260798 100644 --- a/tests/framework_aiohttp/_target_application.py +++ b/tests/framework_aiohttp/_target_application.py @@ -174,8 +174,8 @@ def multi_fetch_handler(request): return web.Response(text=responses, content_type='text/html') -def make_app(middlewares=None, loop=None): - app = web.Application(middlewares=middlewares, loop=loop) +def make_app(middlewares=None): + app = web.Application(middlewares=middlewares) app.router.add_route('*', '/coro', index) app.router.add_route('*', '/class', HelloWorldView) app.router.add_route('*', '/error', error) diff --git a/tests/framework_aiohttp/conftest.py b/tests/framework_aiohttp/conftest.py index eccf71a72..b4a31d7e2 100644 --- a/tests/framework_aiohttp/conftest.py +++ b/tests/framework_aiohttp/conftest.py @@ -19,8 +19,10 @@ from _target_application import make_app from aiohttp.test_utils import AioHTTPTestCase from aiohttp.test_utils import TestClient as _TestClient -from testing_support.fixture.event_loop import event_loop -from testing_support.fixtures import ( +from testing_support.fixture.event_loop import ( # noqa: F401 pylint: disable=W0611 + event_loop, +) +from testing_support.fixtures import ( # noqa: F401 pylint: disable=W0611 code_coverage_fixture, collector_agent_registration_fixture, collector_available_fixture, @@ -62,10 +64,17 @@ def __init__(self, server_cls, middleware, *args, **kwargs): def setUp(self): super(SimpleAiohttpApp, self).setUp() + if hasattr(self, "asyncSetUp"): + asyncio.get_event_loop().run_until_complete(self.asyncSetUp()) asyncio.set_event_loop(self.loop) def get_app(self, *args, **kwargs): - return make_app(self.middleware, loop=self.loop) + return make_app(self.middleware) + + def tearDown(self): + super(SimpleAiohttpApp, self).tearDown() + if hasattr(self, "asyncTearDown"): + asyncio.get_event_loop().run_until_complete(self.asyncTearDown()) @asyncio.coroutine def _get_client(self, app_or_server): @@ -79,10 +88,7 @@ def _get_client(self, app_or_server): test_server = self.server_cls(app_or_server, scheme=scheme, host=host, **server_kwargs) client_constructor_arg = test_server - try: - return _TestClient(client_constructor_arg, loop=self.loop) - except TypeError: - return _TestClient(client_constructor_arg) + return _TestClient(client_constructor_arg) get_client = _get_client diff --git a/tests/framework_grpc/_test_common.py b/tests/framework_grpc/_test_common.py index 117d2d2b0..a71bb2f50 100644 --- a/tests/framework_grpc/_test_common.py +++ b/tests/framework_grpc/_test_common.py @@ -12,23 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -import grpc -import threading import functools +import threading + from newrelic.api.application import application_instance def create_request(streaming_request, count=1, timesout=False): - from sample_application.sample_application_pb2 import Message + from sample_application import Message def _message_stream(): for i in range(count): - yield Message(text='Hello World', count=count, timesout=timesout) + yield Message(text="Hello World", count=count, timesout=timesout) if streaming_request: request = _message_stream() else: - request = Message(text='Hello World', count=count, timesout=timesout) + request = Message(text="Hello World", count=count, timesout=timesout) return request diff --git a/tests/framework_grpc/conftest.py b/tests/framework_grpc/conftest.py index 1d54103ce..3e27d134d 100644 --- a/tests/framework_grpc/conftest.py +++ b/tests/framework_grpc/conftest.py @@ -13,52 +13,56 @@ # limitations under the License. import gc + import grpc import pytest -import random - -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import ( # noqa + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) from testing_support.mock_external_grpc_server import MockExternalgRPCServer + import newrelic.packages.six as six _coverage_source = [ - 'newrelic.hooks.framework_grpc', + "newrelic.hooks.framework_grpc", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True, + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (framework_grpc)', - default_settings=_default_settings) + app_name="Python Agent Test (framework_grpc)", default_settings=_default_settings +) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def grpc_app_server(): with MockExternalgRPCServer() as server: yield server, server.port -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def mock_grpc_server(grpc_app_server): - from sample_application.sample_application_pb2_grpc import ( - add_SampleApplicationServicer_to_server) - from sample_application import SampleApplicationServicer + from sample_application import ( + SampleApplicationServicer, + add_SampleApplicationServicer_to_server, + ) + server, port = grpc_app_server - add_SampleApplicationServicer_to_server( - SampleApplicationServicer(), server) + add_SampleApplicationServicer_to_server(SampleApplicationServicer(), server) return port -@pytest.fixture(scope='function', autouse=True) +@pytest.fixture(scope="function", autouse=True) def gc_garbage_empty(): yield @@ -72,8 +76,8 @@ def gc_garbage_empty(): pass from grpc._channel import _Rendezvous - rendezvous_stored = sum(1 for o in gc.get_objects() - if hasattr(o, '__class__') and isinstance(o, _Rendezvous)) + + rendezvous_stored = sum(1 for o in gc.get_objects() if hasattr(o, "__class__") and isinstance(o, _Rendezvous)) assert rendezvous_stored == 0 @@ -89,17 +93,14 @@ def stub(stub_and_channel): @pytest.fixture(scope="session") def stub_and_channel(mock_grpc_server): port = mock_grpc_server - from sample_application.sample_application_pb2_grpc import ( - SampleApplicationStub) - stub, channel = create_stub_and_channel(port) with channel: yield stub, channel + def create_stub_and_channel(port): - from sample_application.sample_application_pb2_grpc import ( - SampleApplicationStub) + from sample_application import SampleApplicationStub - channel = grpc.insecure_channel('localhost:%s' % port) + channel = grpc.insecure_channel("localhost:%s" % port) stub = SampleApplicationStub(channel) return stub, channel diff --git a/tests/framework_grpc/sample_application/__init__.py b/tests/framework_grpc/sample_application/__init__.py index 32f13b3cd..cd5d3de10 100644 --- a/tests/framework_grpc/sample_application/__init__.py +++ b/tests/framework_grpc/sample_application/__init__.py @@ -14,84 +14,94 @@ import json import time -from newrelic.api.transaction import current_transaction + import grpc +import sample_application_pb2_grpc -from sample_application_pb2 import Message -from sample_application_pb2_grpc import ( - SampleApplicationServicer as _SampleApplicationServicer) +from newrelic.api.transaction import current_transaction + +# This import format is to resolve a bug within protobuf 4 +# Issues for reference: +# https://github.com/protocolbuffers/protobuf/issues/10075 +# https://github.com/protocolbuffers/protobuf/issues/10151 +# Within sample_application_pb2.py, the protobuf import can only +# be done once before the DESCRIPTOR value is set to None +# (in subsequent imports) instead of overriding/ignoring the imports. +# This ensures that the imports happen once. +Message = sample_application_pb2_grpc.sample__application__pb2.Message +add_SampleApplicationServicer_to_server = sample_application_pb2_grpc.add_SampleApplicationServicer_to_server +SampleApplicationStub = sample_application_pb2_grpc.SampleApplicationStub class Status(object): code = grpc.StatusCode.ABORTED - details = 'abort_with_status' + details = "abort_with_status" trailing_metadata = {} -class SampleApplicationServicer(_SampleApplicationServicer): - +class SampleApplicationServicer(sample_application_pb2_grpc.SampleApplicationServicer): def DoUnaryUnary(self, request, context): - context.set_trailing_metadata([('content-type', 'text/plain')]) + context.set_trailing_metadata([("content-type", "text/plain")]) if request.timesout: while context.is_active(): time.sleep(0.1) - return Message(text='unary_unary: %s' % request.text) + return Message(text="unary_unary: %s" % request.text) def DoUnaryStream(self, request, context): - context.set_trailing_metadata([('content-type', 'text/plain')]) + context.set_trailing_metadata([("content-type", "text/plain")]) if request.timesout: while context.is_active(): time.sleep(0.1) for i in range(request.count): - yield Message(text='unary_stream: %s' % request.text) + yield Message(text="unary_stream: %s" % request.text) def DoStreamUnary(self, request_iter, context): - context.set_trailing_metadata([('content-type', 'text/plain')]) + context.set_trailing_metadata([("content-type", "text/plain")]) for request in request_iter: if request.timesout: while context.is_active(): time.sleep(0.1) - return Message(text='stream_unary: %s' % request.text) + return Message(text="stream_unary: %s" % request.text) def DoStreamStream(self, request_iter, context): - context.set_trailing_metadata([('content-type', 'text/plain')]) + context.set_trailing_metadata([("content-type", "text/plain")]) for request in request_iter: if request.timesout: while context.is_active(): time.sleep(0.1) - yield Message(text='stream_stream: %s' % request.text) + yield Message(text="stream_stream: %s" % request.text) def DoUnaryUnaryRaises(self, request, context): - raise AssertionError('unary_unary: %s' % request.text) + raise AssertionError("unary_unary: %s" % request.text) def DoUnaryStreamRaises(self, request, context): - raise AssertionError('unary_stream: %s' % request.text) + raise AssertionError("unary_stream: %s" % request.text) def DoStreamUnaryRaises(self, request_iter, context): for request in request_iter: - raise AssertionError('stream_unary: %s' % request.text) + raise AssertionError("stream_unary: %s" % request.text) def DoStreamStreamRaises(self, request_iter, context): for request in request_iter: - raise AssertionError('stream_stream: %s' % request.text) + raise AssertionError("stream_stream: %s" % request.text) def NoTxnUnaryUnaryRaises(self, request, context): current_transaction().ignore_transaction = True - raise AssertionError('unary_unary: %s' % request.text) + raise AssertionError("unary_unary: %s" % request.text) def NoTxnUnaryStreamRaises(self, request, context): current_transaction().ignore_transaction = True - raise AssertionError('unary_stream: %s' % request.text) + raise AssertionError("unary_stream: %s" % request.text) def NoTxnStreamUnaryRaises(self, request_iter, context): current_transaction().ignore_transaction = True for request in request_iter: - raise AssertionError('stream_unary: %s' % request.text) + raise AssertionError("stream_unary: %s" % request.text) def NoTxnStreamStreamRaises(self, request_iter, context): current_transaction().ignore_transaction = True for request in request_iter: - raise AssertionError('stream_stream: %s' % request.text) + raise AssertionError("stream_stream: %s" % request.text) def NoTxnUnaryUnary(self, request, context): current_transaction().ignore_transaction = True @@ -110,16 +120,16 @@ def NoTxnStreamStream(self, request_iter, context): return self.DoStreamStream(request_iter, context) def DoUnaryUnaryAbort(self, request, context): - context.abort(grpc.StatusCode.ABORTED, 'aborting') + context.abort(grpc.StatusCode.ABORTED, "aborting") def DoUnaryStreamAbort(self, request, context): - context.abort(grpc.StatusCode.ABORTED, 'aborting') + context.abort(grpc.StatusCode.ABORTED, "aborting") def DoStreamUnaryAbort(self, request_iter, context): - context.abort(grpc.StatusCode.ABORTED, 'aborting') + context.abort(grpc.StatusCode.ABORTED, "aborting") def DoStreamStreamAbort(self, request_iter, context): - context.abort(grpc.StatusCode.ABORTED, 'aborting') + context.abort(grpc.StatusCode.ABORTED, "aborting") def DoUnaryUnaryAbortWithStatus(self, request, context): context.abort_with_status(Status) diff --git a/tests/framework_sanic/_target_application.py b/tests/framework_sanic/_target_application.py index 03f3e4771..001ff9b23 100644 --- a/tests/framework_sanic/_target_application.py +++ b/tests/framework_sanic/_target_application.py @@ -15,11 +15,18 @@ from sanic import Blueprint, Sanic from sanic.exceptions import NotFound, SanicException, ServerError from sanic.handlers import ErrorHandler -from sanic.response import json, stream +from sanic.response import json from sanic.router import Router from sanic.views import HTTPMethodView +try: + # Old style response streaming + from sanic.response import stream +except ImportError: + stream = None + + class MethodView(HTTPMethodView): async def get(self, request): return json({"hello": "world"}) @@ -93,7 +100,7 @@ def get(self, *args): error_handler = CustomErrorHandler() router = CustomRouter() -app = Sanic(name="test app", error_handler=error_handler, router=router) +app = Sanic(name="test-app", error_handler=error_handler, router=router) router.app = app blueprint = Blueprint("test_bp") @@ -139,13 +146,25 @@ async def blueprint_middleware(request): app.register_middleware(request_middleware) +async def do_streaming(request): + if stream is not None: + # Old style response streaming + async def streaming_fn(response): + response.write("foo") + response.write("bar") + + return stream(streaming_fn) + else: + # New style response streaming + response = await request.respond(content_type="text/plain") + await response.send("foo") + await response.send("bar") + await response.eof() + + @app.route("/streaming") async def streaming(request): - async def streaming_fn(response): - response.write("foo") - response.write("bar") - - return stream(streaming_fn) + return await do_streaming(request) # Fake websocket endpoint to enable websockets on the server @@ -200,17 +219,11 @@ async def async_error(request): @blueprint.route("/blueprint") async def blueprint_route(request): - async def streaming_fn(response): - response.write("foo") - - return stream(streaming_fn) - + return await do_streaming(request) app.blueprint(blueprint) app.add_route(MethodView.as_view(), "/method_view") -if not getattr(router, "finalized", True): - router.finalize() if __name__ == "__main__": app.run(host="127.0.0.1", port=8000) diff --git a/tests/framework_sanic/conftest.py b/tests/framework_sanic/conftest.py index f8b5dac37..434528bac 100644 --- a/tests/framework_sanic/conftest.py +++ b/tests/framework_sanic/conftest.py @@ -15,13 +15,15 @@ import asyncio import pytest -from testing_support.fixtures import ( +from testing_support.fixtures import ( # noqa: F401 pylint: disable=W0611 code_coverage_fixture, collector_agent_registration_fixture, collector_available_fixture, ) -from newrelic.common.object_wrapper import transient_function_wrapper +from newrelic.common.object_wrapper import ( # noqa: F401 pylint: disable=W0611 + transient_function_wrapper, +) _coverage_source = [ "newrelic.hooks.framework_sanic", @@ -74,16 +76,19 @@ def create_request_class(app, method, url, headers=None, loop=None): from sanic.server import HttpProtocol class MockProtocol(HttpProtocol): - async def send(*args, **kwargs): + async def send(*args, **kwargs): # pylint: disable=E0211 return proto = MockProtocol(loop=loop, app=app) proto.recv_buffer = bytearray() http = Http(proto) + + if hasattr(http, "init_for_request"): + http.init_for_request() + http.stage = Stage.HANDLER http.response_func = http.http1_response_header _request.stream = http - pass except ImportError: pass @@ -123,6 +128,21 @@ def request(app, method, url, headers=None): if loop is None: loop = asyncio.new_event_loop() + if not getattr(app.router, "finalized", True): + # Handle startup if the router hasn't been finalized. + # Older versions don't have this requirement or variable so + # the default should be True. + if hasattr(app, "_startup"): + loop.run_until_complete(app._startup()) + else: + app.router.finalize() + # Starting in 22.9.0 sanic introduced an API to control middleware ordering. + # This included a new method called finalize_middleware that sets the middleware + # to be used on the request.route during the app._startup. In order to register + # new middleware the finalize_middleware must be called. + elif hasattr(app, "finalize_middleware"): + app.finalize_middleware() + coro = create_request_coroutine(app, method, url, headers, loop) loop.run_until_complete(coro) return RESPONSES.pop() diff --git a/tests/framework_sanic/test_application.py b/tests/framework_sanic/test_application.py index ac2726bd9..eebbde003 100644 --- a/tests/framework_sanic/test_application.py +++ b/tests/framework_sanic/test_application.py @@ -12,142 +12,147 @@ # See the License for the specific language governing permissions and # limitations under the License. +from collections import deque + import pytest import sanic - -from newrelic.core.config import global_settings -from collections import deque +from testing_support.fixtures import ( + function_not_called, + override_application_settings, + override_generic_settings, + override_ignore_status_codes, + validate_transaction_errors, + validate_transaction_event_attributes, + validate_transaction_metrics, +) +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) from newrelic.api.application import application_instance -from newrelic.api.transaction import Transaction from newrelic.api.external_trace import ExternalTrace - -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings, validate_transaction_errors, - validate_transaction_event_attributes, - override_ignore_status_codes, override_generic_settings, - function_not_called) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics - - -sanic_21 = int(sanic.__version__.split('.', 1)[0]) >= 21 - +from newrelic.api.transaction import Transaction +from newrelic.core.config import global_settings BASE_METRICS = [ - ('Function/_target_application:index', 1), - ('Function/_target_application:request_middleware', 1 if int(sanic.__version__.split('.', 1)[0]) > 18 else 2), + ("Function/_target_application:index", 1), + ("Function/_target_application:request_middleware", 1 if int(sanic.__version__.split(".", 1)[0]) > 18 else 2), ] FRAMEWORK_METRICS = [ - ('Python/Framework/Sanic/%s' % sanic.__version__, 1), + ("Python/Framework/Sanic/%s" % sanic.__version__, 1), ] -BASE_ATTRS = ['response.status', 'response.headers.contentType', - 'response.headers.contentLength'] +BASE_ATTRS = ["response.status", "response.headers.contentType", "response.headers.contentLength"] validate_base_transaction_event_attr = validate_transaction_event_attributes( - required_params={'agent': BASE_ATTRS, 'user': [], 'intrinsic': []}, + required_params={"agent": BASE_ATTRS, "user": [], "intrinsic": []}, ) @validate_code_level_metrics("_target_application", "index") @validate_transaction_metrics( - '_target_application:index', + "_target_application:index", scoped_metrics=BASE_METRICS, rollup_metrics=BASE_METRICS + FRAMEWORK_METRICS, ) @validate_base_transaction_event_attr def test_simple_request(app): - response = app.fetch('get', '/') + response = app.fetch("get", "/") assert response.status == 200 -@function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') +@function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") def test_websocket(app): - headers = {'upgrade': 'WebSocket'} - response = app.fetch('get', '/', headers=headers) + headers = {"upgrade": "WebSocket"} + response = app.fetch("get", "/", headers=headers) assert response.status == 200 -@pytest.mark.parametrize('method', ( - 'get', - 'post', - 'put', - 'patch', - 'delete', -)) +@pytest.mark.parametrize( + "method", + ( + "get", + "post", + "put", + "patch", + "delete", + ), +) def test_method_view(app, method): - metric_name = 'Function/_target_application:MethodView.' + method + metric_name = "Function/_target_application:MethodView." + method @validate_code_level_metrics("_target_application.MethodView", method) @validate_transaction_metrics( - '_target_application:MethodView.' + method, + "_target_application:MethodView." + method, scoped_metrics=[(metric_name, 1)], rollup_metrics=[(metric_name, 1)], ) @validate_base_transaction_event_attr def _test(): - response = app.fetch(method, '/method_view') + response = app.fetch(method, "/method_view") assert response.status == 200 _test() DT_METRICS = [ - ('Supportability/DistributedTrace/AcceptPayload/Success', None), - ('Supportability/TraceContext/TraceParent/Accept/Success', 1), + ("Supportability/DistributedTrace/AcceptPayload/Success", None), + ("Supportability/TraceContext/TraceParent/Accept/Success", 1), ] @validate_transaction_metrics( - '_target_application:index', + "_target_application:index", scoped_metrics=BASE_METRICS, rollup_metrics=BASE_METRICS + DT_METRICS + FRAMEWORK_METRICS, ) @validate_base_transaction_event_attr -@override_application_settings({ - 'distributed_tracing.enabled': True, -}) +@override_application_settings( + { + "distributed_tracing.enabled": True, + } +) def test_inbound_distributed_trace(app): transaction = Transaction(application_instance()) dt_headers = ExternalTrace.generate_request_headers(transaction) - response = app.fetch('get', '/', headers=dict(dt_headers)) + response = app.fetch("get", "/", headers=dict(dt_headers)) assert response.status == 200 -_params = ["error"] -if not sanic_21: - _params.append('write_response_error') -@pytest.mark.parametrize('endpoint', _params) -def test_recorded_error(app, endpoint): + +@pytest.mark.parametrize("endpoint", ["error", "write_response_error"]) +def test_recorded_error(app, endpoint, sanic_version): + if sanic_version >= (21, 0, 0) and endpoint == "write_response_error": + pytest.skip() + ERROR_METRICS = [ - ('Function/_target_application:%s' % endpoint, 1), + ("Function/_target_application:%s" % endpoint, 1), ] - @validate_transaction_errors(errors=['builtins:ValueError']) + @validate_transaction_errors(errors=["builtins:ValueError"]) @validate_base_transaction_event_attr @validate_transaction_metrics( - '_target_application:%s' % endpoint, + "_target_application:%s" % endpoint, scoped_metrics=ERROR_METRICS, rollup_metrics=ERROR_METRICS + FRAMEWORK_METRICS, ) def _test(): - if endpoint == 'write_response_error': + if endpoint == "write_response_error": with pytest.raises(ValueError): - response = app.fetch('get', '/' + endpoint) + response = app.fetch("get", "/" + endpoint) else: - response = app.fetch('get', '/' + endpoint) + response = app.fetch("get", "/" + endpoint) assert response.status == 500 _test() NOT_FOUND_METRICS = [ - ('Function/_target_application:not_found', 1), + ("Function/_target_application:not_found", 1), ] @validate_transaction_metrics( - '_target_application:not_found', + "_target_application:not_found", scoped_metrics=NOT_FOUND_METRICS, rollup_metrics=NOT_FOUND_METRICS + FRAMEWORK_METRICS, ) @@ -155,88 +160,90 @@ def _test(): @override_ignore_status_codes([404]) @validate_transaction_errors(errors=[]) def test_ignored_by_status_error(app): - response = app.fetch('get', '/404') + response = app.fetch("get", "/404") assert response.status == 404 DOUBLE_ERROR_METRICS = [ - ('Function/_target_application:zero_division_error', 1), + ("Function/_target_application:zero_division_error", 1), ] @validate_transaction_metrics( - '_target_application:zero_division_error', + "_target_application:zero_division_error", scoped_metrics=DOUBLE_ERROR_METRICS, rollup_metrics=DOUBLE_ERROR_METRICS, ) -@validate_transaction_errors( - errors=['builtins:ValueError', 'builtins:ZeroDivisionError']) +@validate_transaction_errors(errors=["builtins:ValueError", "builtins:ZeroDivisionError"]) def test_error_raised_in_error_handler(app): # Because of a bug in Sanic versions <0.8.0, the response.status value is # inconsistent. Rather than assert the status value, we rely on the # transaction errors validator to confirm the application acted as we'd # expect it to. - app.fetch('get', '/zero') + app.fetch("get", "/zero") -STREAMING_ATTRS = ['response.status', 'response.headers.contentType'] +STREAMING_ATTRS = ["response.status", "response.headers.contentType"] STREAMING_METRICS = [ - ('Function/_target_application:streaming', 1), + ("Function/_target_application:streaming", 1), ] @validate_code_level_metrics("_target_application", "streaming") @validate_transaction_metrics( - '_target_application:streaming', + "_target_application:streaming", scoped_metrics=STREAMING_METRICS, rollup_metrics=STREAMING_METRICS, ) @validate_transaction_event_attributes( - required_params={'agent': STREAMING_ATTRS, 'user': [], 'intrinsic': []}, + required_params={"agent": STREAMING_ATTRS, "user": [], "intrinsic": []}, ) def test_streaming_response(app): # streaming responses do not have content-length headers - response = app.fetch('get', '/streaming') + response = app.fetch("get", "/streaming") assert response.status == 200 ERROR_IN_ERROR_TESTS = [ - ('/sync-error', '_target_application:sync_error', - [('Function/_target_application:sync_error', 1), - ('Function/_target_application:handle_custom_exception_sync', 1)], - ['_target_application:CustomExceptionSync', - 'sanic.exceptions:SanicException']), - - ('/async-error', '_target_application:async_error', - [('Function/_target_application:async_error', 1), - ('Function/_target_application:handle_custom_exception_async', 1)], - ['_target_application:CustomExceptionAsync']), + ( + "/sync-error", + "_target_application:sync_error", + [ + ("Function/_target_application:sync_error", 1), + ("Function/_target_application:handle_custom_exception_sync", 1), + ], + ["_target_application:CustomExceptionSync", "sanic.exceptions:SanicException"], + ), + ( + "/async-error", + "_target_application:async_error", + [ + ("Function/_target_application:async_error", 1), + ("Function/_target_application:handle_custom_exception_async", 1), + ], + ["_target_application:CustomExceptionAsync"], + ), ] -@pytest.mark.parametrize('url,metric_name,metrics,errors', - ERROR_IN_ERROR_TESTS) -@pytest.mark.parametrize('nr_enabled', (True, False)) -def test_errors_in_error_handlers( - nr_enabled, app, url, metric_name, metrics, errors): +@pytest.mark.parametrize("url,metric_name,metrics,errors", ERROR_IN_ERROR_TESTS) +@pytest.mark.parametrize("nr_enabled", (True, False)) +def test_errors_in_error_handlers(nr_enabled, app, url, metric_name, metrics, errors): settings = global_settings() - @override_generic_settings(settings, {'enabled': nr_enabled}) + @override_generic_settings(settings, {"enabled": nr_enabled}) def _test(): # Because of a bug in Sanic versions <0.8.0, the response.status value # is inconsistent. Rather than assert the status value, we rely on the # transaction errors validator to confirm the application acted as we'd # expect it to. - app.fetch('get', url) + app.fetch("get", url) if nr_enabled: _test = validate_transaction_errors(errors=errors)(_test) - _test = validate_transaction_metrics(metric_name, - scoped_metrics=metrics, - rollup_metrics=metrics)(_test) + _test = validate_transaction_metrics(metric_name, scoped_metrics=metrics, rollup_metrics=metrics)(_test) else: - _test = function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction')(_test) + _test = function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction")(_test) _test() @@ -244,63 +251,82 @@ def _test(): def test_no_transaction_when_nr_disabled(app): settings = global_settings() - @function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') - @override_generic_settings(settings, {'enabled': False}) + @function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") + @override_generic_settings(settings, {"enabled": False}) def _test(): - app.fetch('GET', '/') + app.fetch("GET", "/") _test() async def async_returning_middleware(*args, **kwargs): from sanic.response import json - return json({'oops': 'I returned it again'}) + + return json({"oops": "I returned it again"}) def sync_returning_middleware(*args, **kwargs): from sanic.response import json - return json({'oops': 'I returned it again'}) + + return json({"oops": "I returned it again"}) def sync_failing_middleware(*args, **kwargs): from sanic.exceptions import SanicException - raise SanicException('Everything is ok', status_code=200) - - -@pytest.mark.parametrize('middleware,attach_to,metric_name,transaction_name', [ - (async_returning_middleware, 'request', - 'test_application:async_returning_middleware', - 'test_application:async_returning_middleware'), - (sync_returning_middleware, 'request', - 'test_application:sync_returning_middleware', - 'test_application:sync_returning_middleware'), - (sync_failing_middleware, 'request', - 'test_application:sync_failing_middleware', - 'test_application:sync_failing_middleware'), - (async_returning_middleware, 'response', - 'test_application:async_returning_middleware', - '_target_application:index'), - (sync_returning_middleware, 'response', - 'test_application:sync_returning_middleware', - '_target_application:index'), -]) -def test_returning_middleware(app, middleware, attach_to, metric_name, - transaction_name): + + raise SanicException("Everything is ok", status_code=200) + + +@pytest.mark.parametrize( + "middleware,attach_to,metric_name,transaction_name", + [ + ( + async_returning_middleware, + "request", + "test_application:async_returning_middleware", + "test_application:async_returning_middleware", + ), + ( + sync_returning_middleware, + "request", + "test_application:sync_returning_middleware", + "test_application:sync_returning_middleware", + ), + ( + sync_failing_middleware, + "request", + "test_application:sync_failing_middleware", + "test_application:sync_failing_middleware", + ), + ( + async_returning_middleware, + "response", + "test_application:async_returning_middleware", + "_target_application:index", + ), + ( + sync_returning_middleware, + "response", + "test_application:sync_returning_middleware", + "_target_application:index", + ), + ], +) +def test_returning_middleware(app, middleware, attach_to, metric_name, transaction_name): metrics = [ - ('Function/%s' % metric_name, 1), + ("Function/%s" % metric_name, 1), ] @validate_code_level_metrics(*metric_name.split(":")) @validate_transaction_metrics( - transaction_name, - scoped_metrics=metrics, - rollup_metrics=metrics, + transaction_name, + scoped_metrics=metrics, + rollup_metrics=metrics, ) @validate_base_transaction_event_attr def _test(): - response = app.fetch('get', '/') + response = app.fetch("get", "/") assert response.status == 200 original_request_middleware = deque(app.app.request_middleware) @@ -319,17 +345,17 @@ def error_middleware(*args, **kwargs): def test_errors_in_middleware(app): - metrics = [('Function/test_application:error_middleware', 1)] + metrics = [("Function/test_application:error_middleware", 1)] @validate_transaction_metrics( - 'test_application:error_middleware', - scoped_metrics=metrics, - rollup_metrics=metrics, + "test_application:error_middleware", + scoped_metrics=metrics, + rollup_metrics=metrics, ) @validate_base_transaction_event_attr - @validate_transaction_errors(errors=['builtins:ValueError']) + @validate_transaction_errors(errors=["builtins:ValueError"]) def _test(): - response = app.fetch('get', '/') + response = app.fetch("get", "/") assert response.status == 500 original_request_middleware = deque(app.app.request_middleware) @@ -358,31 +384,38 @@ def _test(): ) @validate_transaction_errors(errors=[]) def test_blueprint_middleware(app): - response = app.fetch('get', '/blueprint') + response = app.fetch("get", "/blueprint") assert response.status == 200 -def test_unknown_route(app): - import sanic - sanic_version = [int(x) for x in sanic.__version__.split(".")] - _tx_name = "_target_application:CustomRouter.get" if sanic_version[0] < 21 else "_target_application:request_middleware" - +def test_unknown_route(app, sanic_version): + _tx_name = ( + "_target_application:CustomRouter.get" if sanic_version[0] < 21 else "_target_application:request_middleware" + ) + @validate_transaction_metrics(_tx_name) def _test(): - response = app.fetch('get', '/what-route') + response = app.fetch("get", "/what-route") assert response.status == 404 - - _test() -def test_bad_method(app): - import sanic - sanic_version = [int(x) for x in sanic.__version__.split(".")] - _tx_name = "_target_application:CustomRouter.get" if sanic_version[0] < 21 else "_target_application:request_middleware" + _test() + + +def test_bad_method(app, sanic_version): + _tx_name = ( + "_target_application:CustomRouter.get" if sanic_version[0] < 21 else "_target_application:request_middleware" + ) @validate_transaction_metrics(_tx_name) @override_ignore_status_codes([405]) @validate_transaction_errors(errors=[]) def _test(): - response = app.fetch('post', '/') + response = app.fetch("post", "/") assert response.status == 405 + _test() + + +@pytest.fixture +def sanic_version(): + return tuple(int(v) for v in sanic.__version__.split(".")) diff --git a/tests/framework_starlette/test_bg_tasks.py b/tests/framework_starlette/test_bg_tasks.py index af929895f..308f67d10 100644 --- a/tests/framework_starlette/test_bg_tasks.py +++ b/tests/framework_starlette/test_bg_tasks.py @@ -1,4 +1,4 @@ - # Copyright 2010 New Relic, Inc. +# Copyright 2010 New Relic, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,26 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import sys + +import pytest +from starlette import __version__ from testing_support.fixtures import validate_transaction_metrics from testing_support.validators.validate_transaction_count import ( validate_transaction_count, ) -from starlette import __version__ starlette_version = tuple(int(x) for x in __version__.split(".")) try: - from starlette.middleware import Middleware # Ignore Flake8 Error + from starlette.middleware import Middleware # noqa: F401 no_middleware = False except ImportError: no_middleware = True -skip_if_no_middleware = pytest.mark.skipif( - no_middleware, reason="These tests verify middleware functionality" -) +skip_if_no_middleware = pytest.mark.skipif(no_middleware, reason="These tests verify middleware functionality") @pytest.fixture(scope="session") @@ -45,12 +44,8 @@ def target_application(): def test_simple(target_application, route): route_metrics = [("Function/_test_bg_tasks:run_%s_bg_task" % route, 1)] - @validate_transaction_metrics( - "_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=route_metrics - ) - @validate_transaction_metrics( - "_test_bg_tasks:%s_bg_task" % route, background_task=True - ) + @validate_transaction_metrics("_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=route_metrics) + @validate_transaction_metrics("_test_bg_tasks:%s_bg_task" % route, background_task=True) @validate_transaction_count(2) def _test(): app = target_application["none"] @@ -65,12 +60,8 @@ def _test(): def test_asgi_style_middleware(target_application, route): route_metrics = [("Function/_test_bg_tasks:run_%s_bg_task" % route, 1)] - @validate_transaction_metrics( - "_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=route_metrics - ) - @validate_transaction_metrics( - "_test_bg_tasks:%s_bg_task" % route, background_task=True - ) + @validate_transaction_metrics("_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=route_metrics) + @validate_transaction_metrics("_test_bg_tasks:%s_bg_task" % route, background_task=True) @validate_transaction_count(2) def _test(): app = target_application["asgi"] @@ -83,34 +74,42 @@ def _test(): @skip_if_no_middleware @pytest.mark.parametrize("route", ["async", "sync"]) def test_basehttp_style_middleware(target_application, route): - route_metrics = [("Function/_test_bg_tasks:run_%s_bg_task" % route, 1)] - old_metrics = [ - ("Function/_test_bg_tasks:%s_bg_task" % route, 1), - ("Function/_test_bg_tasks:run_%s_bg_task" % route, 1), - ] + route_metric = ("Function/_test_bg_tasks:run_%s_bg_task" % route, 1) + # A function trace metric that appears only when the bug below is present, causing background tasks to be + # completed inside web transactions, requiring a function trace to be used for timing + # instead of a background task transaction. Should not be present at all when bug is fixed. + bg_task_metric = ("Function/_test_bg_tasks:%s_bg_task" % route, 1) def _test(): app = target_application["basehttp"] response = app.get("/" + route) assert response.status == 200 - if starlette_version >= (0, 20, 1): - if sys.version_info[:2] > (3, 7): - _test = validate_transaction_metrics( - "_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=route_metrics - )(_test) - _test = validate_transaction_metrics( - "_test_bg_tasks:%s_bg_task" % route, background_task=True - )(_test) - _test = validate_transaction_count(2)(_test) - else: # Python <= 3.7 requires this specific configuration with starlette 0.20.1 - _test = validate_transaction_metrics( - "_test_bg_tasks:run_%s_bg_task" % route, scoped_metrics=route_metrics - )(_test) - _test = validate_transaction_count(1)(_test) + BUG_COMPLETELY_FIXED = (starlette_version >= (0, 21, 0)) or ( + starlette_version >= (0, 20, 1) and sys.version_info[:2] > (3, 7) + ) + BUG_PARTIALLY_FIXED = (0, 20, 1) <= starlette_version < (0, 21, 0) and sys.version_info[:2] <= (3, 7) + + if BUG_COMPLETELY_FIXED: + # Assert both web transaction and background task transactions are present. + _test = validate_transaction_metrics( + "_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=[route_metric] + )(_test) + _test = validate_transaction_metrics("_test_bg_tasks:%s_bg_task" % route, background_task=True)(_test) + _test = validate_transaction_count(2)(_test) + elif BUG_PARTIALLY_FIXED: + # The background task no longer blocks the completion of the web request/web transaction. + # However, the BaseHTTPMiddleware causes the task to be cancelled when the web request disconnects, so there are no + # longer function traces or background task transactions. + _test = validate_transaction_metrics("_test_bg_tasks:run_%s_bg_task" % route, scoped_metrics=[route_metric])( + _test + ) + _test = validate_transaction_count(1)(_test) else: + # The BaseHTTPMiddleware causes the background task to execute within the web request + # with the web transaction still active. _test = validate_transaction_metrics( - "_test_bg_tasks:run_%s_bg_task" % route, scoped_metrics=old_metrics + "_test_bg_tasks:run_%s_bg_task" % route, scoped_metrics=[route_metric, bg_task_metric] )(_test) _test = validate_transaction_count(1)(_test) diff --git a/tests/logger_loguru/test_stack_inspection.py b/tests/logger_loguru/test_stack_inspection.py new file mode 100644 index 000000000..fb2738ac2 --- /dev/null +++ b/tests/logger_loguru/test_stack_inspection.py @@ -0,0 +1,56 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from conftest import CaplogHandler + +from newrelic.api.background_task import background_task +from testing_support.fixtures import reset_core_stats_engine +from testing_support.validators.validate_log_event_count import validate_log_event_count +from testing_support.validators.validate_log_events import validate_log_events +from testing_support.fixtures import override_application_settings + + + +@pytest.fixture(scope="function") +def filepath_logger(): + import loguru + _logger = loguru.logger + caplog = CaplogHandler() + handler_id = _logger.add(caplog, level="WARNING", format="{file}:{function} - {message}") + _logger.caplog = caplog + yield _logger + del caplog.records[:] + _logger.remove(handler_id) + + +@override_application_settings({ + "application_logging.local_decorating.enabled": False, +}) +@reset_core_stats_engine() +def test_filepath_inspection(filepath_logger): + # Test for regression in stack inspection that caused log messages. + # See https://github.com/newrelic/newrelic-python-agent/issues/603 + + @validate_log_events([{"message": "A", "level": "ERROR"}]) + @validate_log_event_count(1) + @background_task() + def test(): + filepath_logger.error("A") + assert len(filepath_logger.caplog.records) == 1 + record = filepath_logger.caplog.records[0] + assert record == "test_stack_inspection.py:test - A", record + + test() diff --git a/tests/messagebroker_confluentkafka/conftest.py b/tests/messagebroker_confluentkafka/conftest.py new file mode 100644 index 000000000..a86af3ff9 --- /dev/null +++ b/tests/messagebroker_confluentkafka/conftest.py @@ -0,0 +1,283 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import uuid + +import pytest +from testing_support.db_settings import kafka_settings +from testing_support.fixtures import ( # noqa: F401, pylint: disable=W0611 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) + +from newrelic.api.transaction import current_transaction +from newrelic.common.object_wrapper import transient_function_wrapper + +DB_SETTINGS = kafka_settings()[0] + +BROKER = "%s:%s" % (DB_SETTINGS["host"], DB_SETTINGS["port"]) + +_coverage_source = [ + "newrelic.hooks.messagebroker_confluentkafka", +] + +code_coverage = code_coverage_fixture(source=_coverage_source) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (messagebroker_confluentkafka)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (messagebroker_confluentkafka)"], +) + + +@pytest.fixture(scope="session", params=["cimpl", "serializer_function", "serializer_object"]) +def client_type(request): + return request.param + + +@pytest.fixture() +def skip_if_not_serializing(client_type): + if client_type == "cimpl": + pytest.skip("Only serializing clients supported.") + + +@pytest.fixture(scope="function") +def producer(topic, client_type, json_serializer): + from confluent_kafka import Producer, SerializingProducer + + if client_type == "cimpl": + producer = Producer({"bootstrap.servers": BROKER}) + elif client_type == "serializer_function": + producer = SerializingProducer( + { + "bootstrap.servers": BROKER, + "value.serializer": lambda v, c: json.dumps(v).encode("utf-8"), + "key.serializer": lambda v, c: json.dumps(v).encode("utf-8") if v is not None else None, + } + ) + elif client_type == "serializer_object": + producer = SerializingProducer( + { + "bootstrap.servers": BROKER, + "value.serializer": json_serializer, + "key.serializer": json_serializer, + } + ) + + yield producer + + if hasattr(producer, "purge"): + producer.purge() + + +@pytest.fixture(scope="function") +def consumer(topic, producer, client_type, json_deserializer): + from confluent_kafka import Consumer, DeserializingConsumer + + if client_type == "cimpl": + consumer = Consumer( + { + "bootstrap.servers": BROKER, + "auto.offset.reset": "earliest", + "heartbeat.interval.ms": 1000, + "group.id": "test", + } + ) + elif client_type == "serializer_function": + consumer = DeserializingConsumer( + { + "bootstrap.servers": BROKER, + "auto.offset.reset": "earliest", + "heartbeat.interval.ms": 1000, + "group.id": "test", + "value.deserializer": lambda v, c: json.loads(v.decode("utf-8")), + "key.deserializer": lambda v, c: json.loads(v.decode("utf-8")) if v is not None else None, + } + ) + elif client_type == "serializer_object": + consumer = DeserializingConsumer( + { + "bootstrap.servers": BROKER, + "auto.offset.reset": "earliest", + "heartbeat.interval.ms": 1000, + "group.id": "test", + "value.deserializer": json_deserializer, + "key.deserializer": json_deserializer, + } + ) + + consumer.subscribe([topic]) + + yield consumer + + consumer.close() + + +@pytest.fixture(scope="session") +def serialize(client_type): + if client_type == "cimpl": + return lambda v: json.dumps(v).encode("utf-8") + else: + return lambda v: v + + +@pytest.fixture(scope="session") +def deserialize(client_type): + if client_type == "cimpl": + return lambda v: json.loads(v.decode("utf-8")) + else: + return lambda v: v + + +@pytest.fixture(scope="session") +def json_serializer(): + from confluent_kafka.serialization import Serializer + + class JSONSerializer(Serializer): + def __call__(self, obj, ctx): + return json.dumps(obj).encode("utf-8") if obj is not None else None + + return JSONSerializer() + + +@pytest.fixture(scope="session") +def json_deserializer(): + from confluent_kafka.serialization import Deserializer + + class JSONDeserializer(Deserializer): + def __call__(self, obj, ctx): + return json.loads(obj.decode("utf-8")) if obj is not None else None + + return JSONDeserializer() + + +@pytest.fixture(scope="function") +def topic(): + from confluent_kafka.admin import AdminClient, NewTopic + + topic = "test-topic-%s" % str(uuid.uuid4()) + + admin = AdminClient({"bootstrap.servers": BROKER}) + new_topics = [NewTopic(topic, num_partitions=1, replication_factor=1)] + topics = admin.create_topics(new_topics) + for _, f in topics.items(): + f.result() # Block until topic is created. + + yield topic + + admin.delete_topics(new_topics) + + +@pytest.fixture() +def send_producer_message(topic, producer, serialize, client_type): + callback_called = [] + + def producer_callback(err, msg): + callback_called.append(True) + + def _test(): + if client_type == "cimpl": + producer.produce(topic, value=serialize({"foo": 1}), callback=producer_callback) + else: + producer.produce(topic, value=serialize({"foo": 1}), on_delivery=producer_callback) + producer.flush() + assert callback_called + + return _test + + +@pytest.fixture() +def get_consumer_record(topic, send_producer_message, consumer, deserialize): + def _test(): + send_producer_message() + + record_count = 0 + + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + record = consumer.poll(0.5) + if not record: + attempts += 1 + continue + assert not record.error() + + assert deserialize(record.value()) == {"foo": 1} + record_count += 1 + consumer.poll(0.5) # Exit the transaction. + + assert record_count == 1, "Incorrect count of records consumed: %d. Expected 1." % record_count + + return _test + + +def cache_kafka_producer_headers(): + import confluent_kafka.cimpl + + @transient_function_wrapper(confluent_kafka.cimpl, "Producer.produce.__wrapped__") + # Place transient wrapper underneath instrumentation + def _cache_kafka_producer_headers(wrapped, instance, args, kwargs): + transaction = current_transaction() + + if transaction is None: + return wrapped(*args, **kwargs) + + ret = wrapped(*args, **kwargs) + headers = kwargs.get("headers", []) + headers = dict(headers) + transaction._test_request_headers = headers + return ret + + return _cache_kafka_producer_headers + + +def cache_kafka_consumer_headers(): + import confluent_kafka.cimpl + + @transient_function_wrapper(confluent_kafka.cimpl, "Consumer.poll") + # Place transient wrapper underneath instrumentation + def _cache_kafka_consumer_headers(wrapped, instance, args, kwargs): + record = wrapped(*args, **kwargs) + transaction = current_transaction() + + if transaction is None: + return record + + headers = dict(record.headers()) + transaction._test_request_headers = headers + return record + + return _cache_kafka_consumer_headers + + +@pytest.fixture(autouse=True) +def assert_no_active_transaction(): + # Run before test + assert not current_transaction(active_only=False), "Transaction exists before test run." + + yield # Run test + + # Run after test + assert not current_transaction(active_only=False), "Transaction was not properly exited." diff --git a/tests/messagebroker_confluentkafka/test_consumer.py b/tests/messagebroker_confluentkafka/test_consumer.py new file mode 100644 index 000000000..61f532a78 --- /dev/null +++ b/tests/messagebroker_confluentkafka/test_consumer.py @@ -0,0 +1,184 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from conftest import cache_kafka_consumer_headers +from testing_support.fixtures import ( + reset_core_stats_engine, + validate_attributes, + validate_error_event_attributes_outside_transaction, + validate_transaction_errors, + validate_transaction_metrics, +) +from testing_support.validators.validate_distributed_trace_accepted import ( + validate_distributed_trace_accepted, +) +from testing_support.validators.validate_transaction_count import ( + validate_transaction_count, +) + +from newrelic.api.background_task import background_task +from newrelic.api.transaction import end_of_transaction +from newrelic.common.object_names import callable_name +from newrelic.packages import six + + +def test_custom_metrics(get_consumer_record, topic): + custom_metrics = [ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, 1), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, 1), + ] + + @validate_transaction_metrics( + "Named/%s" % topic, + group="Message/Kafka/Topic", + custom_metrics=custom_metrics, + background_task=True, + ) + @validate_transaction_count(1) + def _test(): + get_consumer_record() + + _test() + + +def test_multiple_transactions(get_consumer_record, topic): + @validate_transaction_count(2) + def _test(): + get_consumer_record() + get_consumer_record() + + _test() + + +def test_custom_metrics_on_existing_transaction(get_consumer_record, topic): + transaction_name = ( + "test_consumer:test_custom_metrics_on_existing_transaction.._test" if six.PY3 else "test_consumer:_test" + ) + + @validate_transaction_metrics( + transaction_name, + custom_metrics=[ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, 1), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, 1), + ], + background_task=True, + ) + @validate_transaction_count(1) + @background_task() + def _test(): + get_consumer_record() + + _test() + + +def test_custom_metrics_inactive_transaction(get_consumer_record, topic): + transaction_name = ( + "test_consumer:test_custom_metrics_inactive_transaction.._test" if six.PY3 else "test_consumer:_test" + ) + + @validate_transaction_metrics( + transaction_name, + custom_metrics=[ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, None), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, None), + ], + background_task=True, + ) + @validate_transaction_count(1) + @background_task() + def _test(): + end_of_transaction() + get_consumer_record() + + _test() + + +def test_agent_attributes(get_consumer_record): + @validate_attributes("agent", ["kafka.consume.byteCount"]) + def _test(): + get_consumer_record() + + _test() + + +def test_consumer_errors(topic, consumer, producer): + # Close the consumer in order to force poll to raise an exception. + consumer.close() + + expected_error = RuntimeError + + @reset_core_stats_engine() + @validate_error_event_attributes_outside_transaction( + num_errors=1, exact_attrs={"intrinsic": {"error.class": callable_name(expected_error)}, "agent": {}, "user": {}} + ) + def _test(): + with pytest.raises(expected_error): + producer.produce(topic, value="A") + producer.flush() + while consumer.poll(0.5): + pass + + _test() + + +def test_consumer_handled_errors_not_recorded(get_consumer_record): + # It's important to check that we do not notice the StopIteration error. + @validate_transaction_errors([]) + def _test(): + get_consumer_record() + + _test() + + +def test_distributed_tracing_headers(topic, producer, consumer, serialize): + # Produce the messages inside a transaction, making sure to close it. + @validate_transaction_count(1) + @background_task() + def _produce(): + producer.produce(topic, key="bar", value=serialize({"foo": 1})) + producer.flush() + + @validate_transaction_metrics( + "Named/%s" % topic, + group="Message/Kafka/Topic", + rollup_metrics=[ + ("Supportability/DistributedTrace/AcceptPayload/Success", None), + ("Supportability/TraceContext/Accept/Success", 1), + ], + background_task=True, + ) + @validate_transaction_count(1) + def _consume(): + @validate_distributed_trace_accepted(transport_type="Kafka") + @cache_kafka_consumer_headers() + def _test(): + # Start the transaction but don't exit it. + # Keep polling until we get the record or the timeout is exceeded. + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + record = consumer.poll(0.5) + if not record: + attempts += 1 + continue + + _test() + + # Exit the transaction. + consumer.poll(0.5) + + _produce() + _consume() diff --git a/tests/messagebroker_confluentkafka/test_producer.py b/tests/messagebroker_confluentkafka/test_producer.py new file mode 100644 index 000000000..71b674e80 --- /dev/null +++ b/tests/messagebroker_confluentkafka/test_producer.py @@ -0,0 +1,117 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + +import pytest +from conftest import cache_kafka_producer_headers +from testing_support.fixtures import ( + validate_transaction_errors, + validate_transaction_metrics, +) +from testing_support.validators.validate_messagebroker_headers import ( + validate_messagebroker_headers, +) + +from newrelic.api.background_task import background_task +from newrelic.common.object_names import callable_name +from newrelic.packages import six + + +@pytest.mark.parametrize( + "headers", [[("MY-HEADER", "nonsense")], {"MY-HEADER": "nonsense"}], ids=["list of tuples headers", "dict headers"] +) +@background_task() +def test_produce_arguments(topic, producer, client_type, serialize, headers): + callback_called = threading.Event() + + def producer_callback(err, msg): + callback_called.set() + + if client_type == "cimpl": + producer.produce( + topic, + value=serialize({"foo": 1}), + key=serialize("my-key"), + callback=producer_callback, + partition=1, + timestamp=1, + headers=headers, + ) + else: + producer.produce( + topic, + value=serialize({"foo": 1}), + key=serialize("my-key"), + partition=1, + on_delivery=producer_callback, + timestamp=1, + headers=headers, + ) + producer.flush() + + assert callback_called.wait(5), "Callback never called." + + +def test_trace_metrics(topic, send_producer_message): + scoped_metrics = [("MessageBroker/Kafka/Topic/Produce/Named/%s" % topic, 1)] + unscoped_metrics = scoped_metrics + txn_name = "test_producer:test_trace_metrics..test" if six.PY3 else "test_producer:test" + + @validate_transaction_metrics( + txn_name, + scoped_metrics=scoped_metrics, + rollup_metrics=unscoped_metrics, + background_task=True, + ) + @background_task() + def test(): + send_producer_message() + + test() + + +def test_distributed_tracing_headers(topic, send_producer_message): + txn_name = "test_producer:test_distributed_tracing_headers..test" if six.PY3 else "test_producer:test" + + @validate_transaction_metrics( + txn_name, + rollup_metrics=[ + ("Supportability/TraceContext/Create/Success", 1), + ("Supportability/DistributedTrace/CreatePayload/Success", 1), + ], + background_task=True, + ) + @background_task() + @cache_kafka_producer_headers() + @validate_messagebroker_headers + def test(): + send_producer_message() + + test() + + +def test_producer_errors(topic, producer, monkeypatch): + if hasattr(producer, "_value_serializer"): + # Remove serializer to intentionally cause a type error in underlying producer implementation + monkeypatch.setattr(producer, "_value_serializer", None) + + @validate_transaction_errors([callable_name(TypeError)]) + @background_task() + def test(): + with pytest.raises(TypeError): + producer.produce(topic, value=object()) + producer.flush() + + test() diff --git a/tests/messagebroker_confluentkafka/test_serialization.py b/tests/messagebroker_confluentkafka/test_serialization.py new file mode 100644 index 000000000..4d948713d --- /dev/null +++ b/tests/messagebroker_confluentkafka/test_serialization.py @@ -0,0 +1,152 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from testing_support.fixtures import ( + validate_transaction_errors, + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task +from newrelic.common.object_names import callable_name +from newrelic.packages import six + + +def test_serialization_metrics(skip_if_not_serializing, topic, send_producer_message): + txn_name = "test_serialization:test_serialization_metrics..test" if six.PY3 else "test_serialization:test" + + _metrics = [ + ("MessageBroker/Kafka/Topic/Named/%s/Serialization/Value" % topic, 1), + ("MessageBroker/Kafka/Topic/Named/%s/Serialization/Key" % topic, 1), + ] + + @validate_transaction_metrics( + txn_name, + scoped_metrics=_metrics, + rollup_metrics=_metrics, + background_task=True, + ) + @background_task() + def test(): + send_producer_message() + + test() + + +def test_deserialization_metrics(skip_if_not_serializing, topic, get_consumer_record): + _metrics = [ + ("Message/Kafka/Topic/Named/%s/Deserialization/Value" % topic, 1), + ("Message/Kafka/Topic/Named/%s/Deserialization/Key" % topic, 1), + ] + + @validate_transaction_metrics( + "Named/%s" % topic, + group="Message/Kafka/Topic", + scoped_metrics=_metrics, + rollup_metrics=_metrics, + background_task=True, + ) + def test(): + get_consumer_record() + + test() + + +@pytest.mark.parametrize( + "key,value,error", + ( + (object(), "A", "KeySerializationError"), + ("A", object(), "ValueSerializationError"), + ), +) +def test_serialization_errors(skip_if_not_serializing, topic, producer, key, value, error): + import confluent_kafka.error + + error_cls = getattr(confluent_kafka.error, error) + + @validate_transaction_errors([callable_name(error_cls)]) + @background_task() + def test(): + with pytest.raises(error_cls): + producer.produce(topic=topic, key=key, value=value) + + test() + + +@pytest.mark.parametrize( + "key,value,error", + ( + ("%", "{}", "KeyDeserializationError"), + ("{}", "%", "ValueDeserializationError"), + ), +) +def test_deserialization_errors(skip_if_not_serializing, monkeypatch, topic, producer, consumer, key, value, error): + import confluent_kafka.error + + error_cls = getattr(confluent_kafka.error, error) + + # Remove serializers to cause intentional issues + monkeypatch.setattr(producer, "_value_serializer", None) + monkeypatch.setattr(producer, "_key_serializer", None) + + producer.produce(topic=topic, key=key, value=value) + producer.flush() + + @validate_transaction_errors([callable_name(error_cls)]) + @background_task() + def test(): + with pytest.raises(error_cls): + timeout = 10 + attempts = 0 + while attempts < timeout: + if not consumer.poll(0.5): + attempts += 1 + continue + + test() + + +@pytest.fixture +def send_producer_message(topic, producer): + def _test(): + producer.produce(topic, value={"foo": 1}) + producer.flush() + + return _test + + +@pytest.fixture() +def get_consumer_record(topic, send_producer_message, consumer): + def _test(): + send_producer_message() + + record_count = 0 + + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + record = consumer.poll(0.5) + if not record: + attempts += 1 + continue + assert not record.error() + + assert record.value() == {"foo": 1} + record_count += 1 + consumer.poll(0.5) # Exit the transaction. + + assert record_count == 1, "Incorrect count of records consumed: %d. Expected 1." % record_count + + return _test diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py new file mode 100644 index 000000000..098486f34 --- /dev/null +++ b/tests/messagebroker_kafkapython/conftest.py @@ -0,0 +1,281 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import uuid + +import kafka +import pytest +from testing_support.db_settings import kafka_settings +from testing_support.fixtures import ( # noqa: F401, pylint: disable=W0611 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) + +from newrelic.api.transaction import current_transaction +from newrelic.common.object_wrapper import transient_function_wrapper + +DB_SETTINGS = kafka_settings()[0] + +BOOTSTRAP_SERVER = "%s:%s" % (DB_SETTINGS["host"], DB_SETTINGS["port"]) +BROKER = [BOOTSTRAP_SERVER] + +_coverage_source = [ + "newrelic.hooks.messagebroker_kafkapython", +] + +code_coverage = code_coverage_fixture(source=_coverage_source) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (messagebroker_kafkapython)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (messagebroker_kafkapython)"], +) + + +@pytest.fixture( + scope="session", params=["no_serializer", "serializer_function", "callable_object", "serializer_object"] +) +def client_type(request): + return request.param + + +@pytest.fixture() +def skip_if_not_serializing(client_type): + if client_type == "no_serializer": + pytest.skip("Only serializing clients supported.") + + +@pytest.fixture(scope="function") +def producer(client_type, json_serializer, json_callable_serializer): + if client_type == "no_serializer": + producer = kafka.KafkaProducer(bootstrap_servers=BROKER) + elif client_type == "serializer_function": + producer = kafka.KafkaProducer( + bootstrap_servers=BROKER, + value_serializer=lambda v: json.dumps(v).encode("utf-8") if v else None, + key_serializer=lambda v: json.dumps(v).encode("utf-8") if v else None, + ) + elif client_type == "callable_object": + producer = kafka.KafkaProducer( + bootstrap_servers=BROKER, + value_serializer=json_callable_serializer, + key_serializer=json_callable_serializer, + ) + elif client_type == "serializer_object": + producer = kafka.KafkaProducer( + bootstrap_servers=BROKER, + value_serializer=json_serializer, + key_serializer=json_serializer, + ) + + yield producer + producer.close() + + +@pytest.fixture(scope="function") +def consumer(topic, producer, client_type, json_deserializer, json_callable_deserializer): + if client_type == "no_serializer": + consumer = kafka.KafkaConsumer( + topic, + bootstrap_servers=BROKER, + auto_offset_reset="earliest", + consumer_timeout_ms=100, + heartbeat_interval_ms=1000, + group_id="test", + ) + elif client_type == "serializer_function": + consumer = kafka.KafkaConsumer( + topic, + bootstrap_servers=BROKER, + key_deserializer=lambda v: json.loads(v.decode("utf-8")) if v else None, + value_deserializer=lambda v: json.loads(v.decode("utf-8")) if v else None, + auto_offset_reset="earliest", + consumer_timeout_ms=100, + heartbeat_interval_ms=1000, + group_id="test", + ) + elif client_type == "callable_object": + consumer = kafka.KafkaConsumer( + topic, + bootstrap_servers=BROKER, + key_deserializer=json_callable_deserializer, + value_deserializer=json_callable_deserializer, + auto_offset_reset="earliest", + consumer_timeout_ms=100, + heartbeat_interval_ms=1000, + group_id="test", + ) + elif client_type == "serializer_object": + consumer = kafka.KafkaConsumer( + topic, + bootstrap_servers=BROKER, + key_deserializer=json_deserializer, + value_deserializer=json_deserializer, + auto_offset_reset="earliest", + consumer_timeout_ms=100, + heartbeat_interval_ms=1000, + group_id="test", + ) + + yield consumer + consumer.close() + + +@pytest.fixture(scope="session") +def serialize(client_type): + if client_type == "no_serializer": + return lambda v: json.dumps(v).encode("utf-8") + else: + return lambda v: v + + +@pytest.fixture(scope="session") +def deserialize(client_type): + if client_type == "no_serializer": + return lambda v: json.loads(v.decode("utf-8")) + else: + return lambda v: v + + +@pytest.fixture(scope="session") +def json_serializer(): + class JSONSerializer(kafka.serializer.Serializer): + def serialize(self, topic, obj): + return json.dumps(obj).encode("utf-8") if obj is not None else None + + return JSONSerializer() + + +@pytest.fixture(scope="session") +def json_deserializer(): + class JSONDeserializer(kafka.serializer.Deserializer): + def deserialize(self, topic, bytes_): + return json.loads(bytes_.decode("utf-8")) if bytes_ is not None else None + + return JSONDeserializer() + + +@pytest.fixture(scope="session") +def json_callable_serializer(): + class JSONCallableSerializer(object): + def __call__(self, obj): + return json.dumps(obj).encode("utf-8") if obj is not None else None + + return JSONCallableSerializer() + + +@pytest.fixture(scope="session") +def json_callable_deserializer(): + class JSONCallableDeserializer(object): + def __call__(self, obj): + return json.loads(obj.decode("utf-8")) if obj is not None else None + + return JSONCallableDeserializer() + + +@pytest.fixture(scope="function") +def topic(): + from kafka.admin.client import KafkaAdminClient + from kafka.admin.new_topic import NewTopic + + topic = "test-topic-%s" % str(uuid.uuid4()) + + admin = KafkaAdminClient(bootstrap_servers=BROKER) + new_topics = [NewTopic(topic, num_partitions=1, replication_factor=1)] + admin.create_topics(new_topics) + + yield topic + + admin.delete_topics([topic]) + + +@pytest.fixture() +def send_producer_message(topic, producer, serialize): + def _test(): + producer.send(topic, key=serialize("bar"), value=serialize({"foo": 1})) + producer.flush() + + return _test + + +@pytest.fixture() +def get_consumer_record(topic, send_producer_message, consumer, deserialize): + def _test(): + send_producer_message() + + record_count = 0 + + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + for record in consumer: + assert deserialize(record.value) == {"foo": 1} + record_count += 1 + attempts += 1 + + assert record_count == 1, "Incorrect count of records consumed: %d. Expected 1." % record_count + + return _test + + +@transient_function_wrapper(kafka.producer.kafka, "KafkaProducer.send.__wrapped__") +# Place transient wrapper underneath instrumentation +def cache_kafka_producer_headers(wrapped, instance, args, kwargs): + transaction = current_transaction() + + if transaction is None: + return wrapped(*args, **kwargs) + + ret = wrapped(*args, **kwargs) + headers = kwargs.get("headers", []) + headers = dict(headers) + transaction._test_request_headers = headers + return ret + + +@transient_function_wrapper(kafka.consumer.group, "KafkaConsumer.__next__") +# Place transient wrapper underneath instrumentation +def cache_kafka_consumer_headers(wrapped, instance, args, kwargs): + record = wrapped(*args, **kwargs) + transaction = current_transaction() + + if transaction is None: + return record + + headers = record.headers + headers = dict(headers) + transaction._test_request_headers = headers + return record + + +@pytest.fixture(autouse=True) +def assert_no_active_transaction(): + # Run before test + assert not current_transaction(active_only=False), "Transaction exists before test run." + + yield # Run test + + # Run after test + assert not current_transaction(active_only=False), "Transaction was not properly exited." diff --git a/tests/messagebroker_kafkapython/test_consumer.py b/tests/messagebroker_kafkapython/test_consumer.py new file mode 100644 index 000000000..f53b2acb3 --- /dev/null +++ b/tests/messagebroker_kafkapython/test_consumer.py @@ -0,0 +1,185 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from conftest import cache_kafka_consumer_headers +from testing_support.fixtures import ( + reset_core_stats_engine, + validate_attributes, + validate_error_event_attributes_outside_transaction, + validate_transaction_errors, + validate_transaction_metrics, +) +from testing_support.validators.validate_distributed_trace_accepted import ( + validate_distributed_trace_accepted, +) +from testing_support.validators.validate_transaction_count import ( + validate_transaction_count, +) + +from newrelic.api.background_task import background_task +from newrelic.api.transaction import end_of_transaction +from newrelic.common.object_names import callable_name +from newrelic.packages import six + + +def test_custom_metrics(get_consumer_record, topic): + @validate_transaction_metrics( + "Named/%s" % topic, + group="Message/Kafka/Topic", + custom_metrics=[ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, 1), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, 1), + ], + background_task=True, + ) + def _test(): + get_consumer_record() + + _test() + + +def test_multiple_transactions(get_consumer_record, topic): + @validate_transaction_count(2) + def _test(): + get_consumer_record() + get_consumer_record() + + _test() + + +def test_custom_metrics_on_existing_transaction(get_consumer_record, topic): + transaction_name = ( + "test_consumer:test_custom_metrics_on_existing_transaction.._test" if six.PY3 else "test_consumer:_test" + ) + + @validate_transaction_metrics( + transaction_name, + custom_metrics=[ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, 1), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, 1), + ], + background_task=True, + ) + @validate_transaction_count(1) + @background_task() + def _test(): + get_consumer_record() + + _test() + + +def test_custom_metrics_inactive_transaction(get_consumer_record, topic): + transaction_name = ( + "test_consumer:test_custom_metrics_inactive_transaction.._test" if six.PY3 else "test_consumer:_test" + ) + + @validate_transaction_metrics( + transaction_name, + custom_metrics=[ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, None), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, None), + ], + background_task=True, + ) + @validate_transaction_count(1) + @background_task() + def _test(): + end_of_transaction() + get_consumer_record() + + _test() + + +def test_agent_attributes(get_consumer_record): + @validate_attributes("agent", ["kafka.consume.client_id", "kafka.consume.byteCount"]) + def _test(): + get_consumer_record() + + _test() + + +def test_consumer_errors(get_consumer_record, consumer_next_raises): + exc_class = RuntimeError + + @reset_core_stats_engine() + @validate_error_event_attributes_outside_transaction( + num_errors=1, exact_attrs={"intrinsic": {"error.class": callable_name(exc_class)}, "agent": {}, "user": {}} + ) + def _test(): + with pytest.raises(exc_class): + get_consumer_record() + + _test() + + +def test_consumer_handled_errors_not_recorded(get_consumer_record): + # It's important to check that we do not notice the StopIteration error. + @validate_transaction_errors([]) + def _test(): + get_consumer_record() + + _test() + + +def test_distributed_tracing_headers(topic, producer, consumer, serialize): + # Produce the messages inside a transaction, making sure to close it. + @background_task() + def _produce(): + producer.send(topic, key=serialize("bar"), value=serialize({"foo": 1})) + producer.flush() + + @validate_transaction_metrics( + "Named/%s" % topic, + group="Message/Kafka/Topic", + rollup_metrics=[ + ("Supportability/DistributedTrace/AcceptPayload/Success", None), + ("Supportability/TraceContext/Accept/Success", 1), + ], + background_task=True, + ) + @validate_transaction_count(1) + def _consume(): + consumer_iter = iter(consumer) + + @validate_distributed_trace_accepted(transport_type="Kafka") + @cache_kafka_consumer_headers + def _test(): + # Start the transaction but don't exit it. + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + try: + record = next(consumer_iter) + except StopIteration: + attempts += 1 + + _test() + + # Exit the transaction. + with pytest.raises(StopIteration): + next(consumer_iter) + + _produce() + _consume() + + +@pytest.fixture() +def consumer_next_raises(consumer): + def _poll(*args, **kwargs): + raise RuntimeError() + + consumer.poll = _poll + return consumer diff --git a/tests/messagebroker_kafkapython/test_heartbeat.py b/tests/messagebroker_kafkapython/test_heartbeat.py new file mode 100644 index 000000000..32ac9bb7f --- /dev/null +++ b/tests/messagebroker_kafkapython/test_heartbeat.py @@ -0,0 +1,57 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +import kafka +from testing_support.validators.validate_custom_metrics_outside_transaction import ( + validate_custom_metrics_outside_transaction, +) + + +@validate_custom_metrics_outside_transaction( + [ + ("MessageBroker/Kafka/Heartbeat/Poll", "present"), + ("MessageBroker/Kafka/Heartbeat/Sent", "present"), + ("MessageBroker/Kafka/Heartbeat/Receive", "present"), + ("MessageBroker/Kafka/Heartbeat/Fail", None), + ("MessageBroker/Kafka/Heartbeat/SessionTimeout", None), + ("MessageBroker/Kafka/Heartbeat/PollTimeout", None), + ] +) +def test_successful_heartbeat_metrics_recorded(topic, get_consumer_record): + get_consumer_record() + time.sleep(1.5) + + +@validate_custom_metrics_outside_transaction( + [ + ("MessageBroker/Kafka/Heartbeat/Poll", "present"), + ("MessageBroker/Kafka/Heartbeat/Sent", "present"), + ("MessageBroker/Kafka/Heartbeat/Fail", "present"), + ("MessageBroker/Kafka/Heartbeat/Receive", "present"), + ("MessageBroker/Kafka/Heartbeat/SessionTimeout", "present"), + ("MessageBroker/Kafka/Heartbeat/PollTimeout", "present"), + ] +) +def test_fail_timeout_heartbeat_metrics_recorded(): + heartbeat = kafka.coordinator.heartbeat.Heartbeat(session_timeout_ms=0, max_poll_interval_ms=0) + + heartbeat.poll() + heartbeat.sent_heartbeat() + heartbeat.received_heartbeat() + heartbeat.fail_heartbeat() + + assert heartbeat.session_timeout_expired(), "Failed to force heartbeat to timeout." + assert heartbeat.poll_timeout_expired(), "Failed to force heartbeat to timeout." diff --git a/tests/messagebroker_kafkapython/test_producer.py b/tests/messagebroker_kafkapython/test_producer.py new file mode 100644 index 000000000..927956482 --- /dev/null +++ b/tests/messagebroker_kafkapython/test_producer.py @@ -0,0 +1,79 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from conftest import cache_kafka_producer_headers +from testing_support.fixtures import ( + validate_transaction_errors, + validate_transaction_metrics, +) +from testing_support.validators.validate_messagebroker_headers import ( + validate_messagebroker_headers, +) + +from newrelic.api.background_task import background_task +from newrelic.common.object_names import callable_name +from newrelic.packages import six + + +def test_trace_metrics(topic, send_producer_message): + scoped_metrics = [("MessageBroker/Kafka/Topic/Produce/Named/%s" % topic, 1)] + unscoped_metrics = scoped_metrics + txn_name = "test_producer:test_trace_metrics..test" if six.PY3 else "test_producer:test" + + @validate_transaction_metrics( + txn_name, + scoped_metrics=scoped_metrics, + rollup_metrics=unscoped_metrics, + background_task=True, + ) + @background_task() + def test(): + send_producer_message() + + test() + + +def test_distributed_tracing_headers(topic, send_producer_message): + txn_name = "test_producer:test_distributed_tracing_headers..test" if six.PY3 else "test_producer:test" + + @validate_transaction_metrics( + txn_name, + rollup_metrics=[ + ("Supportability/TraceContext/Create/Success", 1), + ("Supportability/DistributedTrace/CreatePayload/Success", 1), + ], + background_task=True, + ) + @background_task() + @cache_kafka_producer_headers + @validate_messagebroker_headers + def test(): + send_producer_message() + + test() + + +def test_producer_errors(topic, producer, monkeypatch): + monkeypatch.setitem(producer.config, "value_serializer", None) + monkeypatch.setitem(producer.config, "key_serializer", None) + + @validate_transaction_errors([callable_name(AssertionError)]) + @background_task() + def test(): + with pytest.raises(AssertionError): + producer.send(topic, value=object()) + producer.flush() + + test() diff --git a/tests/messagebroker_kafkapython/test_serialization.py b/tests/messagebroker_kafkapython/test_serialization.py new file mode 100644 index 000000000..b83b4e85c --- /dev/null +++ b/tests/messagebroker_kafkapython/test_serialization.py @@ -0,0 +1,101 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +import pytest +from testing_support.fixtures import ( + reset_core_stats_engine, + validate_error_event_attributes_outside_transaction, + validate_transaction_errors, + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task +from newrelic.common.object_names import callable_name +from newrelic.packages import six + + +def test_serialization_metrics(skip_if_not_serializing, topic, send_producer_message): + txn_name = "test_serialization:test_serialization_metrics..test" if six.PY3 else "test_serialization:test" + + _metrics = [ + ("MessageBroker/Kafka/Topic/Named/%s/Serialization/Value" % topic, 1), + ("MessageBroker/Kafka/Topic/Named/%s/Serialization/Key" % topic, 1), + ] + + @validate_transaction_metrics( + txn_name, + scoped_metrics=_metrics, + rollup_metrics=_metrics, + background_task=True, + ) + @background_task() + def test(): + send_producer_message() + + test() + + +@pytest.mark.parametrize( + "key,value", + ( + (object(), "A"), + ("A", object()), + ), +) +def test_serialization_errors(skip_if_not_serializing, topic, producer, key, value): + error_cls = TypeError + + @validate_transaction_errors([callable_name(error_cls)]) + @background_task() + def test(): + with pytest.raises(error_cls): + producer.send(topic=topic, key=key, value=value) + + test() + + +@pytest.mark.parametrize( + "key,value", + ( + (b"%", b"{}"), + (b"{}", b"%"), + ), +) +def test_deserialization_errors(skip_if_not_serializing, monkeypatch, topic, producer, consumer, key, value): + error_cls = json.decoder.JSONDecodeError if six.PY3 else ValueError + + # Remove serializers to cause intentional issues + monkeypatch.setitem(producer.config, "value_serializer", None) + monkeypatch.setitem(producer.config, "key_serializer", None) + + producer.send(topic=topic, key=key, value=value) + producer.flush() + + @reset_core_stats_engine() + @validate_error_event_attributes_outside_transaction( + num_errors=1, exact_attrs={"intrinsic": {"error.class": callable_name(error_cls)}, "agent": {}, "user": {}} + ) + def test(): + with pytest.raises(error_cls): + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + for record in consumer: + pass + attempts += 1 + + test() diff --git a/tests/messagebroker_pika/conftest.py b/tests/messagebroker_pika/conftest.py index a64f9e8cd..9849ee014 100644 --- a/tests/messagebroker_pika/conftest.py +++ b/tests/messagebroker_pika/conftest.py @@ -12,53 +12,55 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pika -import pytest import uuid +import pika +import pytest from testing_support.db_settings import rabbitmq_settings -from testing_support.fixtures import (code_coverage_fixture, # NOQA - collector_agent_registration_fixture, collector_available_fixture) - +from testing_support.fixtures import ( # noqa: F401 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) -QUEUE = 'test_pika-%s' % uuid.uuid4() -QUEUE_2 = 'test_pika-%s' % uuid.uuid4() +QUEUE = "test_pika-%s" % uuid.uuid4() +QUEUE_2 = "test_pika-%s" % uuid.uuid4() -EXCHANGE = 'exchange-%s' % uuid.uuid4() -EXCHANGE_2 = 'exchange-%s' % uuid.uuid4() +EXCHANGE = "exchange-%s" % uuid.uuid4() +EXCHANGE_2 = "exchange-%s" % uuid.uuid4() -CORRELATION_ID = 'test-correlation-id' -REPLY_TO = 'test-reply-to' -HEADERS = {'TestHeader': 'my test header value'} -BODY = b'test_body' +CORRELATION_ID = "test-correlation-id" +REPLY_TO = "test-reply-to" +HEADERS = {"TestHeader": "my test header value"} +BODY = b"test_body" DB_SETTINGS = rabbitmq_settings()[0] _coverage_source = [ - 'newrelic.hooks.messagebroker_pika', + "newrelic.hooks.messagebroker_pika", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (messagebroker_pika)', - default_settings=_default_settings, - linked_applications=['Python Agent Test (messagebroker)']) + app_name="Python Agent Test (messagebroker_pika)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (messagebroker_pika)"], +) @pytest.fixture() def producer(): # put something into the queue so it can be consumed - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() channel.queue_declare(queue=QUEUE, durable=False) @@ -69,10 +71,7 @@ def producer(): exchange=EXCHANGE, routing_key=QUEUE, body=BODY, - properties=pika.spec.BasicProperties( - correlation_id=CORRELATION_ID, - reply_to=REPLY_TO, - headers=HEADERS), + properties=pika.spec.BasicProperties(correlation_id=CORRELATION_ID, reply_to=REPLY_TO, headers=HEADERS), ) yield channel.queue_delete(queue=QUEUE) @@ -82,8 +81,7 @@ def producer(): @pytest.fixture() def producer_2(): # put something into the queue so it can be consumed - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() channel.queue_declare(queue=QUEUE_2, durable=False) @@ -94,10 +92,7 @@ def producer_2(): exchange=EXCHANGE_2, routing_key=QUEUE_2, body=BODY, - properties=pika.spec.BasicProperties( - correlation_id=CORRELATION_ID, - reply_to=REPLY_TO, - headers=HEADERS), + properties=pika.spec.BasicProperties(correlation_id=CORRELATION_ID, reply_to=REPLY_TO, headers=HEADERS), ) yield channel.queue_delete(queue=QUEUE_2) @@ -107,8 +102,7 @@ def producer_2(): @pytest.fixture() def produce_five(): # put something into the queue so it can be consumed - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() channel.queue_declare(queue=QUEUE, durable=False) @@ -120,10 +114,7 @@ def produce_five(): exchange=EXCHANGE, routing_key=QUEUE, body=BODY, - properties=pika.spec.BasicProperties( - correlation_id=CORRELATION_ID, - reply_to=REPLY_TO, - headers=HEADERS), + properties=pika.spec.BasicProperties(correlation_id=CORRELATION_ID, reply_to=REPLY_TO, headers=HEADERS), ) yield diff --git a/tests/testing_support/db_settings.py b/tests/testing_support/db_settings.py index 8f4c7b49a..c7c35935f 100644 --- a/tests/testing_support/db_settings.py +++ b/tests/testing_support/db_settings.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pwd import os +import pwd USER = pwd.getpwuid(os.getuid()).pw_name @@ -168,11 +168,7 @@ def mongodb_settings(): base_port = 27017 settings = [ - { - "host": "127.0.0.1", - "port": base_port + instance_num, - "collection": "mongodb_collection_" + str(os.getpid()) - } + {"host": "127.0.0.1", "port": base_port + instance_num, "collection": "mongodb_collection_" + str(os.getpid())} for instance_num in range(instances) ] return settings @@ -258,3 +254,31 @@ def rabbitmq_settings(): for instance_num in range(instances) ] return settings + + +def kafka_settings(): + """Return a list of dict of settings for connecting to kafka. + + Will return the correct settings, depending on which of the environments it + is running in. It attempts to set variables in the following order, where + later environments override earlier ones. + + 1. Local + 2. Github Actions + """ + + if "GITHUB_ACTIONS" in os.environ: + instances = 2 + base_port = 8080 + else: + instances = 1 + base_port = 9092 + + settings = [ + { + "host": "localhost", + "port": base_port + instance_num, + } + for instance_num in range(instances) + ] + return settings diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index 71bfea670..2df593abc 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -603,8 +603,12 @@ def no_op(wrapped, instance, args, kwargs): @function_wrapper def _validate_wrapper(wrapped, instance, args, kwargs): # Apply no-op wrappers to prevent new internal trace contexts from being started, preventing capture - wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__enter__")(no_op)(wrapped) - wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__exit__")(no_op)(wrapped) + wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__enter__")(no_op)( + wrapped + ) + wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__exit__")(no_op)( + wrapped + ) captured_metrics = CustomMetrics() with InternalTraceContext(captured_metrics): @@ -1736,16 +1740,27 @@ def validate_error_event_attributes_outside_transaction( required_params = required_params or {} forgone_params = forgone_params or {} + event_data = [] + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.notice_error") def _validate_error_event_attributes_outside_transaction(wrapped, instance, args, kwargs): - try: result = wrapped(*args, **kwargs) except: raise else: - event_data = list(instance.error_events) + for event in instance.error_events: + event_data.append(event) + return result + + @function_wrapper + def wrapper(wrapped, instance, args, kwargs): + try: + result = _validate_error_event_attributes_outside_transaction(wrapped)(*args, **kwargs) + except: + raise + else: if num_errors is not None: exc_message = ( "Expected: %d, Got: %d. Verify StatsEngine is being reset before using this validator." @@ -1758,7 +1773,7 @@ def _validate_error_event_attributes_outside_transaction(wrapped, instance, args return result - return _validate_error_event_attributes_outside_transaction + return wrapper def validate_request_params_omitted(): @@ -2362,14 +2377,14 @@ def cat_enabled(wrapped, instance, args, kwargs): def override_application_settings(overrides): @function_wrapper def _override_application_settings(wrapped, instance, args, kwargs): - try: - # The settings object has references from a number of - # different places. We have to create a copy, overlay - # the temporary settings and then when done clear the - # top level settings object and rebuild it when done. + # The settings object has references from a number of + # different places. We have to create a copy, overlay + # the temporary settings and then when done clear the + # top level settings object and rebuild it when done. + original_settings = application_settings() + backup = copy.deepcopy(original_settings.__dict__) - original_settings = application_settings() - backup = copy.deepcopy(original_settings.__dict__) + try: for name, value in overrides.items(): apply_config_setting(original_settings, name, value) @@ -2390,16 +2405,15 @@ def _override_application_settings(wrapped, instance, args, kwargs): def override_generic_settings(settings_object, overrides): @function_wrapper def _override_generic_settings(wrapped, instance, args, kwargs): - try: - # In some cases, a settings object may have references - # from a number of different places. We have to create - # a copy, overlay the temporary settings and then when - # done, clear the top level settings object and rebuild - # it when done. - - original = settings_object + # In some cases, a settings object may have references + # from a number of different places. We have to create + # a copy, overlay the temporary settings and then when + # done, clear the top level settings object and rebuild + # it when done. + original = settings_object + backup = copy.deepcopy(original.__dict__) - backup = copy.deepcopy(original.__dict__) + try: for name, value in overrides.items(): apply_config_setting(original, name, value) return wrapped(*args, **kwargs) @@ -2413,19 +2427,20 @@ def _override_generic_settings(wrapped, instance, args, kwargs): def override_ignore_status_codes(status_codes): @function_wrapper def _override_ignore_status_codes(wrapped, instance, args, kwargs): - try: - # Updates can be made to ignored status codes in server - # side configs. Changes will be applied to application - # settings so we first check there and if they don't - # exist, we default to global settings + # Updates can be made to ignored status codes in server + # side configs. Changes will be applied to application + # settings so we first check there and if they don't + # exist, we default to global settings - application = application_instance() - settings = application and application.settings + application = application_instance() + settings = application and application.settings + + if not settings: + settings = global_settings() - if not settings: - settings = global_settings() + original = settings.error_collector.ignore_status_codes - original = settings.error_collector.ignore_status_codes + try: settings.error_collector.ignore_status_codes = status_codes return wrapped(*args, **kwargs) finally: @@ -2434,25 +2449,28 @@ def _override_ignore_status_codes(wrapped, instance, args, kwargs): return _override_ignore_status_codes -def code_coverage_fixture(source=['newrelic']): - @pytest.fixture(scope='session') +def code_coverage_fixture(source=None): + if source is None: + source = ["newrelic"] + + @pytest.fixture(scope="session") def _code_coverage_fixture(request): if not source: return - if os.environ.get('GITHUB_ACTIONS') is not None: + if os.environ.get("GITHUB_ACTIONS") is not None: return from coverage import coverage - env_directory = os.environ.get('TOX_ENVDIR', None) + env_directory = os.environ.get("TOX_ENVDIR", None) if env_directory is not None: - coverage_directory = os.path.join(env_directory, 'htmlcov') - xml_report = os.path.join(env_directory, 'coverage.xml') + coverage_directory = os.path.join(env_directory, "htmlcov") + xml_report = os.path.join(env_directory, "coverage.xml") else: - coverage_directory = 'htmlcov' - xml_report = 'coverage.xml' + coverage_directory = "htmlcov" + xml_report = "coverage.xml" def finalize(): cov.stop() @@ -2469,18 +2487,19 @@ def finalize(): def reset_core_stats_engine(): """Reset the StatsEngine and custom StatsEngine of the core application.""" + @function_wrapper def _reset_core_stats_engine(wrapped, instance, args, kwargs): api_application = application_instance() api_name = api_application.name core_application = api_application._agent.application(api_name) - + stats = core_application._stats_engine stats.reset_stats(stats.settings) - + custom_stats = core_application._stats_custom_engine custom_stats.reset_stats(custom_stats.settings) - + return wrapped(*args, **kwargs) return _reset_core_stats_engine @@ -2625,8 +2644,8 @@ def _validate_analytics_sample_data(wrapped, instance, args, kwargs): _new_wrapped = _capture_samples(wrapped) result = _new_wrapped(*args, **kwargs) - - _samples = [s for s in samples if s[0]["type"] == "Transaction"] + # Check type of s[0] because it returns an integer if s is a LogEventNode + _samples = [s for s in samples if not isinstance(s[0], int) and s[0]["type"] == "Transaction"] assert _samples, "No Transaction events captured." for sample in _samples: assert isinstance(sample, list) diff --git a/tests/testing_support/sample_asgi_applications.py b/tests/testing_support/sample_asgi_applications.py index e43ba36d4..53bf40d33 100644 --- a/tests/testing_support/sample_asgi_applications.py +++ b/tests/testing_support/sample_asgi_applications.py @@ -12,9 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.api.time_trace import notice_error -from newrelic.api.transaction import add_custom_parameter, current_transaction from newrelic.api.asgi_application import ASGIApplicationWrapper +from newrelic.api.time_trace import notice_error +from newrelic.api.transaction import ( + add_custom_parameter, + current_transaction, + ignore_transaction, +) class simple_app_v2_raw: @@ -22,17 +26,21 @@ def __init__(self, scope): self.scope = scope async def __call__(self, receive, send): + if self.scope["type"] == "lifespan": + return await handle_lifespan(self.scope, receive, send) + if self.scope["type"] != "http": raise ValueError("unsupported") if self.scope["path"] == "/exc": raise ValueError("whoopsies") + elif self.scope["path"] == "/ignored": + ignore_transaction() + await send({"type": "http.response.start", "status": 200}) await send({"type": "http.response.body"}) - txn = current_transaction() - - assert txn is None + assert current_transaction() is None class simple_app_v2_init_exc(simple_app_v2_raw): @@ -41,19 +49,21 @@ def __init__(self, scope): async def simple_app_v3_raw(scope, receive, send): + if scope["type"] == "lifespan": + return await handle_lifespan(scope, receive, send) + if scope["type"] != "http": raise ValueError("unsupported") if scope["path"] == "/exc": raise ValueError("whoopsies") + elif scope["path"] == "/ignored": + ignore_transaction() await send({"type": "http.response.start", "status": 200}) await send({"type": "http.response.body"}) - txn = current_transaction() - - assert txn is None - + assert current_transaction() is None class AppWithDescriptor: @@ -104,7 +114,20 @@ async def normal_asgi_application(scope, receive, send): except ValueError: notice_error(attributes={"ohnoes": "param-value"}) - await send( - {"type": "http.response.start", "status": 200, "headers": response_headers} - ) + await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) + + +async def handle_lifespan(scope, receive, send): + """Handle lifespan protocol with no-ops to allow more compatibility.""" + while True: + txn = current_transaction() + if txn: + txn.ignore_transaction = True + + message = await receive() + if message["type"] == "lifespan.startup": + await send({"type": "lifespan.startup.complete"}) + elif message["type"] == "lifespan.shutdown": + await send({"type": "lifespan.shutdown.complete"}) + return diff --git a/tests/testing_support/validators/validate_distributed_trace_accepted.py b/tests/testing_support/validators/validate_distributed_trace_accepted.py new file mode 100644 index 000000000..0898f5f72 --- /dev/null +++ b/tests/testing_support/validators/validate_distributed_trace_accepted.py @@ -0,0 +1,39 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from newrelic.api.transaction import current_transaction +from newrelic.common.object_wrapper import function_wrapper + + +def validate_distributed_trace_accepted(header="newrelic", transport_type="HTTP"): + @function_wrapper + def _validate_distributed_trace_accepted(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + + txn = current_transaction() + + assert txn + assert txn._distributed_trace_state + assert txn.parent_type == "App" + assert txn._trace_id.startswith(txn.parent_tx) + assert txn.parent_span is not None + assert txn.parent_account == txn.settings.account_id + assert txn.parent_transport_type == transport_type + assert txn._priority is not None + assert txn._sampled is not None + + return result + + return _validate_distributed_trace_accepted diff --git a/tox.ini b/tox.ini index 38bdd4df7..c50a1b75b 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,7 @@ ; framework_aiohttp-aiohttp01: aiohttp<2 ; framework_aiohttp-aiohttp0202: aiohttp<2.3 ; 3. Python version required. Uses the standard tox definitions. (https://tox.readthedocs.io/en/latest/config.html#tox-environments) -; Examples: py27,py36,py37,py38,py39,pypy,pypy36 +; Examples: py27,py37,py38,py39,pypy,pypy37 ; 4. Library and version (Optional). Used when testing multiple versions of the library, and may be omitted when only testing a single version. ; Versions should be specified with 2 digits per version number, so <3 becomes 02 and <3.5 becomes 0304. latest and master are also acceptable versions. ; Examples: uvicorn03, CherryPy0302, uvicornlatest @@ -28,7 +28,7 @@ ; 5. With or without New Relic C extensions (Optional). Used for testing agent features. ; Examples: with_extensions, without_extensions ; envlist = -; python-agent_features-pypy36-without_extensions, +; python-agent_features-pypy37-without_extensions, ; python-agent_streaming-py37-{with,without}_extensions, ; ; Full Format: @@ -42,112 +42,122 @@ [tox] setupdir = {toxinidir} envlist = - python-adapter_cheroot-{py27,py36,py37,py38,py39,py310}, - python-adapter_gevent-{py27,py36,py37,py38,py310}, - python-adapter_gunicorn-{py36}-aiohttp1-gunicorn{19,latest}, - python-adapter_gunicorn-{py36,py37,py38,py39,py310}-aiohttp3-gunicornlatest, - python-adapter_uvicorn-{py36,py37}-uvicorn03, - ; Temporarily testing py36 on the uvicorn version preceeding v0.15 - python-adapter_uvicorn-{py36}-uvicorn014 + python-adapter_cheroot-{py27,py37,py38,py39,py310}, + python-adapter_daphne-{py37,py38,py39,py310}-daphnelatest, + python-adapter_daphne-py38-daphne{0204,0205}, + python-adapter_gevent-{py27,py37,py38,py310}, + python-adapter_gunicorn-{py37,py38,py39,py310}-aiohttp3-gunicornlatest, + python-adapter_hypercorn-{py37,py38,py39,py310}-hypercornlatest, + python-adapter_hypercorn-py38-hypercorn{0010,0011,0012,0013}, + python-adapter_uvicorn-py37-uvicorn03, python-adapter_uvicorn-{py37,py38,py39,py310}-uvicornlatest, - python-agent_features-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, - python-agent_features-{pypy,pypy36}-without_extensions, + python-agent_features-{py27,py37,py38,py39,py310}-{with,without}_extensions, + python-agent_features-{pypy,pypy37}-without_extensions, python-agent_streaming-py27-grpc0125-{with,without}_extensions, - python-agent_streaming-{py36,py37,py38,py39,py310}-{with,without}_extensions, - python-agent_unittests-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, - python-agent_unittests-{pypy,pypy36}-without_extensions, - python-application_celery-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, + python-agent_streaming-{py37,py38,py39,py310}-protobuf04-{with,without}_extensions, + python-agent_streaming-py39-protobuf{03,0319}-{with,without}_extensions, + python-agent_unittests-{py27,py37,py38,py39,py310}-{with,without}_extensions, + python-agent_unittests-{pypy,pypy37}-without_extensions, + python-application_celery-{py27,py37,py38,py39,py310,pypy,pypy37}, gearman-application_gearman-{py27,pypy}, python-component_djangorestframework-py27-djangorestframework0300, - python-component_djangorestframework-{py36,py37,py38,py39,py310}-djangorestframeworklatest, - python-component_flask_rest-{py27,py36,py37,py38,py39,pypy,pypy36}, - python-component_graphqlserver-{py36,py37,py38,py39,py310}, + python-component_djangorestframework-{py37,py38,py39,py310}-djangorestframeworklatest, + python-component_flask_rest-{py27,py37,py38,py39,pypy,pypy37}, + python-component_graphqlserver-{py37,py38,py39,py310}, python-component_tastypie-{py27,pypy}-tastypie0143, - python-component_tastypie-{py36,py37,py38,py39,pypy36}-tastypie{0143,latest}, - python-coroutines_asyncio-{py36,py37,py38,py39,py310,pypy36}, - python-cross_agent-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, + python-component_tastypie-{py37,py38,py39,pypy37}-tastypie{0143,latest}, + python-coroutines_asyncio-{py37,py38,py39,py310,pypy37}, + python-cross_agent-{py27,py37,py38,py39,py310}-{with,without}_extensions, python-cross_agent-pypy-without_extensions, - postgres-datastore_asyncpg-{py36,py37,py38,py39,py310}, - memcached-datastore_bmemcached-{pypy,py27,py36,py37,py38,py39,py310}-memcached030, - elasticsearchserver01-datastore_pyelasticsearch-{py27,py36,pypy}, + postgres-datastore_asyncpg-{py37,py38,py39,py310}, + memcached-datastore_bmemcached-{pypy,py27,py37,py38,py39,py310}-memcached030, + elasticsearchserver01-datastore_pyelasticsearch-{py27,pypy}, elasticsearchserver01-datastore_elasticsearch-py27-elasticsearch{00,01,02,05}, - elasticsearchserver07-datastore_elasticsearch-{py27,py36,py37,py38,py39,py310,pypy,pypy36}-elasticsearch{07}, - memcached-datastore_memcache-{py27,py36,py37,py38,py39,py310,pypy,pypy36}-memcached01, + elasticsearchserver07-datastore_elasticsearch-{py27,py37,py38,py39,py310,pypy,pypy37}-elasticsearch{07}, + memcached-datastore_memcache-{py27,py37,py38,py39,py310,pypy,pypy37}-memcached01, mysql-datastore_mysql-mysql080023-py27, - mysql-datastore_mysql-mysqllatest-{py36,py37,py38,py39,py310}, - postgres-datastore_postgresql-{py36,py37,py38,py39}, - postgres-datastore_psycopg2-{py27,py36,py37,py38,py39,py310}-psycopg20208, - postgres-datastore_psycopg2cffi-{py27,py36,pypy}-psycopg2cffi{0207,0208}, + mysql-datastore_mysql-mysqllatest-{py37,py38,py39,py310}, + postgres-datastore_postgresql-{py37,py38,py39}, + postgres-datastore_psycopg2-{py27,py37,py38,py39,py310}-psycopg20208, + postgres-datastore_psycopg2cffi-{py27,pypy}-psycopg2cffi{0207,0208}, postgres-datastore_psycopg2cffi-{py37,py38,py39,py310}-psycopg2cffi0208, - memcached-datastore_pylibmc-{py27,py36,py37}, - memcached-datastore_pymemcache-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - mongodb-datastore_pymongo-{py27,py36,py37,py38,py39,py310,pypy}-pymongo{03}, - mysql-datastore_pymysql-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - solr-datastore_pysolr-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - redis-datastore_redis-{py27,py36,py37,py38,pypy,pypy36}-redis03, - redis-datastore_redis-{py36,py37,py38,py39,py310,pypy36}-redis{0400,latest}, - redis-datastore_aioredis-{py36,py37,py38,py39,py310,pypy36}-aioredislatest, - redis-datastore_aioredis-py39-aioredis01, - redis-datastore_aredis-{py36,py37,py38,py39,pypy36}-aredislatest, + memcached-datastore_pylibmc-{py27,py37}, + memcached-datastore_pymemcache-{py27,py37,py38,py39,py310,pypy,pypy37}, + mongodb-datastore_pymongo-{py27,py37,py38,py39,py310,pypy}-pymongo{03}, + mongodb-datastore_pymongo-{py37,py38,py39,py310,pypy,pypy37}-pymongo04, + mysql-datastore_pymysql-{py27,py37,py38,py39,py310,pypy,pypy37}, + solr-datastore_pysolr-{py27,py37,py38,py39,py310,pypy,pypy37}, + redis-datastore_redis-{py27,py37,py38,pypy,pypy37}-redis03, + redis-datastore_redis-{py37,py38,py39,py310,pypy37}-redis{0400,latest}, + redis-datastore_aioredis-{py37,py38,py39,py310,pypy37}-aioredislatest, + redis-datastore_aioredis-{py37,py310}-aioredis01, + redis-datastore_aredis-{py37,py38,py39,pypy37}-aredislatest, solr-datastore_solrpy-{py27,pypy}-solrpy{00,01}, - python-datastore_sqlite-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, + python-datastore_sqlite-{py27,py37,py38,py39,py310,pypy,pypy37}, memcached-datastore_umemcache-{py27,pypy}, - python-external_boto3-{py27,py36,py37,py38,py39,py310}-boto01, - python-external_botocore-{py27,py36,py37,py38,py39,py310}, + python-external_boto3-{py27,py37,py38,py39,py310}-boto01, + python-external_botocore-{py27,py37,py38,py39,py310}, python-external_feedparser-py27-feedparser{05,06}, - python-external_http-{py27,py36,py37,py38,py39,py310,pypy}, - python-external_httplib-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - python-external_httplib2-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - python-external_httpx-{py36,py37,py38,py39,py310}, - python-external_requests-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, + python-external_http-{py27,py37,py38,py39,py310,pypy}, + python-external_httplib-{py27,py37,py38,py39,py310,pypy,pypy37}, + python-external_httplib2-{py27,py37,py38,py39,py310,pypy,pypy37}, + python-external_httpx-{py37,py38,py39,py310}, + python-external_requests-{py27,py37,py38,py39,py310,pypy,pypy37}, python-external_urllib3-{py27,py37,pypy}-urllib3{0109}, - python-external_urllib3-{py27,py36,py37,py38,py39,py310,pypy,pypy36}-urllib3latest, - python-framework_aiohttp-{py36,py37,py38,py39,py310,pypy36}-aiohttp03, - python-framework_ariadne-{py36,py37,py38,py39,py310}-ariadnelatest, + python-external_urllib3-{py27,py37,py38,py39,py310,pypy,pypy37}-urllib3latest, + python-framework_aiohttp-{py37,py38,py39,py310,pypy37}-aiohttp03, + python-framework_ariadne-{py37,py38,py39,py310}-ariadnelatest, python-framework_ariadne-py37-ariadne{0011,0012,0013}, python-framework_bottle-py27-bottle{0008,0009,0010}, - python-framework_bottle-{py27,py36,py37,py38,py39,pypy36}-bottle{0011,0012}, + python-framework_bottle-{py27,py37,py38,py39,pypy37}-bottle{0011,0012}, python-framework_bottle-py310-bottle0012, python-framework_bottle-pypy-bottle{0008,0009,0010,0011,0012}, - python-framework_cherrypy-{py36,py37,py38,py39,py310,pypy36}-CherryPy18, - python-framework_cherrypy-{py36,py37}-CherryPy0302, - python-framework_cherrypy-pypy36-CherryPy0303, + python-framework_cherrypy-{py37,py38,py39,py310,pypy37}-CherryPy18, + python-framework_cherrypy-{py37}-CherryPy0302, + python-framework_cherrypy-pypy37-CherryPy0303, python-framework_django-{pypy,py27}-Django0103, python-framework_django-{pypy,py27,py37}-Django0108, python-framework_django-{py39}-Django{0200,0201,0202,0300,0301,latest}, - python-framework_django-{py36,py37,py38,py39,py310}-Django0302, - python-framework_falcon-{py27,py36,py37,py38,py39,pypy,pypy36}-falcon0103, - python-framework_falcon-{py36,py37,py38,py39,py310,pypy36}-falcon{0200,master}, - python-framework_fastapi-{py36,py37,py38,py39,py310}, + python-framework_django-{py37,py38,py39,py310}-Django0302, + python-framework_falcon-{py27,py37,py38,py39,pypy,pypy37}-falcon0103, + python-framework_falcon-{py37,py38,py39,py310,pypy37}-falcon{0200,master}, + python-framework_fastapi-{py37,py38,py39,py310}, python-framework_flask-{pypy,py27}-flask0012, - python-framework_flask-{pypy,py27,py36,py37,py38,py39,py310,pypy36}-flask0101, + python-framework_flask-{pypy,py27,py37,py38,py39,py310,pypy37}-flask0101, ; temporarily disabling flaskmaster tests python-framework_flask-{py37,py38,py39,py310,pypy37}-flask{latest}, - python-framework_graphene-{py36,py37,py38,py39,py310}-graphenelatest, - python-framework_graphene-{py27,py36,py37,py38,py39,pypy,pypy36}-graphene{0200,0201}, + python-framework_graphene-{py37,py38,py39,py310}-graphenelatest, + python-framework_graphene-{py27,py37,py38,py39,pypy,pypy37}-graphene{0200,0201}, python-framework_graphene-py310-graphene0201, - python-framework_graphql-{py27,py36,py37,py38,py39,py310,pypy,pypy3}-graphql02, - python-framework_graphql-{py36,py37,py38,py39,py310,pypy3}-graphql03, - python-framework_graphql-py37-graphql{0202,0203,0300,0301,0302,master}, - grpc-framework_grpc-{py27,py36}-grpc0125, - grpc-framework_grpc-{py36,py37,py38,py39,py310}-grpclatest, + python-framework_graphql-{py27,py37,py38,py39,py310,pypy,pypy37}-graphql02, + python-framework_graphql-{py37,py38,py39,py310,pypy37}-graphql03, + ; temporarily disabling graphqlmaster tests + python-framework_graphql-py37-graphql{0202,0203,0300,0301,0302}, + grpc-framework_grpc-py27-grpc0125, + grpc-framework_grpc-{py37,py38,py39,py310}-grpclatest, python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, - python-framework_pyramid-{pypy,py27,pypy36,py36,py37,py38,py39,py310}-Pyramid0110-cornice, - ;temporarily disabling pypy36 on pyramid master - python-framework_pyramid-{py37,py38,py39,py310}-Pyramidmaster, - python-framework_sanic-{py38,pypy36}-sanic{190301,1906,1812,1912,200904,210300}, - python-framework_sanic-{py36,py37,py38,py310,pypy36}-saniclatest, - python-framework_starlette-{py36,py310,pypy36}-starlette{0014,0015,0019}, - python-framework_starlette-{py36,py37,py38,py39,py310,pypy36}-starlettelatest, + python-framework_pyramid-{pypy,py27,pypy37,py37,py38,py39,py310}-Pyramid0110-cornice, + python-framework_pyramid-{py37,py38,py39,py310,pypy37}-Pyramidmaster, + python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1812,1912,200904,210300,2109,2112,2203,2290}, + python-framework_sanic-{py37,py38,py39,py310,pypy37}-saniclatest, + python-framework_starlette-{py310,pypy37}-starlette{0014,0015,0019}, + python-framework_starlette-{py37,py38}-starlette{002001}, + python-framework_starlette-{py37,py38,py39,py310,pypy37}-starlettelatest, python-framework_strawberry-{py37,py38,py39,py310}-strawberrylatest, - python-logger_logging-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - python-logger_loguru-{py36,py37,py38,py39,py310,pypy36}-logurulatest, + python-logger_logging-{py27,py37,py38,py39,py310,pypy,pypy37}, + python-logger_loguru-{py37,py38,py39,py310,pypy37}-logurulatest, python-logger_loguru-py39-loguru{06,05,04,03}, - libcurl-framework_tornado-{py36,py37,py38,py39,py310,pypy36}-tornado0600, + libcurl-framework_tornado-{py37,py38,py39,py310,pypy37}-tornado0600, libcurl-framework_tornado-{py37,py38,py39,py310}-tornadomaster, - rabbitmq-messagebroker_pika-{py27,py36,py37,py38,py39,pypy,pypy36}-pika0.13, - rabbitmq-messagebroker_pika-{py36,py37,py38,py39,py310,pypy36}-pikalatest, - python-template_mako-{py27,py36,py37,py38,py39,py310} + rabbitmq-messagebroker_pika-{py27,py37,py38,py39,pypy,pypy37}-pika0.13, + rabbitmq-messagebroker_pika-{py37,py38,py39,py310,pypy37}-pikalatest, + kafka-messagebroker_confluentkafka-{py27,py37,py38,py39,py310}-confluentkafkalatest, + kafka-messagebroker_confluentkafka-{py27,py39}-confluentkafka{0107,0106}, + ; confluent-kafka had a bug in 1.8.2's setup.py file which was incompatible with 2.7. + kafka-messagebroker_confluentkafka-{py39}-confluentkafka{0108}, + kafka-messagebroker_kafkapython-{pypy,py27,py37,py38,pypy37}-kafkapythonlatest, + kafka-messagebroker_kafkapython-{py27,py38}-kafkapython{020001,020000,0104}, + python-template_mako-{py27,py37,py38,py39,py310} [pytest] usefixtures = @@ -158,7 +168,7 @@ usefixtures = [testenv] deps = # Base Dependencies - {py36,py37,py38,py39,py310,pypy36}: pytest==6.2.5 + {py37,py38,py39,py310,pypy37}: pytest==6.2.5 {py27,pypy}: pytest==4.6.11 iniconfig pytest-cov @@ -166,6 +176,9 @@ deps = # Test Suite Dependencies adapter_cheroot: cheroot + adapter_daphne-daphnelatest: daphne + adapter_daphne-daphne0205: daphne<2.6 + adapter_daphne-daphne0204: daphne<2.5 adapter_gevent: WSGIProxy2 adapter_gevent: gevent adapter_gevent: urllib3 @@ -173,11 +186,17 @@ deps = adapter_gunicorn-aiohttp3: aiohttp<4.0 adapter_gunicorn-gunicorn19: gunicorn<20 adapter_gunicorn-gunicornlatest: gunicorn + adapter_hypercorn-hypercornlatest: hypercorn + adapter_hypercorn-hypercorn0013: hypercorn<0.14 + adapter_hypercorn-hypercorn0012: hypercorn<0.13 + adapter_hypercorn-hypercorn0011: hypercorn<0.12 + adapter_hypercorn-hypercorn0010: hypercorn<0.11 adapter_uvicorn-uvicorn03: uvicorn<0.4 adapter_uvicorn-uvicorn014: uvicorn<0.15 adapter_uvicorn-uvicornlatest: uvicorn agent_features: beautifulsoup4 application_celery: celery<6.0 + application_celery-py{py37,37}: importlib-metadata<5.0 application_gearman: gearman<3.0.0 component_djangorestframework-djangorestframework0300: Django < 1.9 component_djangorestframework-djangorestframework0300: djangorestframework < 3.1 @@ -196,10 +215,10 @@ deps = component_graphqlserver: jinja2<3.1 component_tastypie-tastypie0143: django-tastypie<0.14.4 component_tastypie-{py27,pypy}-tastypie0143: django<1.12 - component_tastypie-{py36,py37,py38,py39,pypy36}-tastypie0143: django<3.0.1 + component_tastypie-{py37,py38,py39,py310,pypy37}-tastypie0143: django<3.0.1 component_tastypie-tastypielatest: django-tastypie - component_tastypie-tastypielatest: django - coroutines_asyncio: uvloop + component_tastypie-tastypielatest: django<4.1 + coroutines_asyncio-{py37,py38,py39,py310}: uvloop cross_agent: mock==1.0.1 cross_agent: requests datastore_asyncpg: asyncpg @@ -223,6 +242,7 @@ deps = datastore_pylibmc: pylibmc datastore_pymemcache: pymemcache datastore_pymongo-pymongo03: pymongo<4.0 + datastore_pymongo-pymongo04: pymongo<5.0 datastore_pymysql: PyMySQL<0.11 datastore_pysolr: pysolr<4.0 datastore_redis-redislatest: redis @@ -239,7 +259,7 @@ deps = external_boto3-boto01: moto<2.0 external_boto3-py27: rsa<4.7.1 external_botocore: botocore - external_botocore-{py36,py37,py38,py39,py310}: moto[awslambda,ec2,iam]<3.0 + external_botocore-{py37,py38,py39,py310}: moto[awslambda,ec2,iam]<3.0 external_botocore-py27: rsa<4.7.1 external_botocore-py27: moto[awslambda,ec2,iam]<2.0 external_feedparser-feedparser05: feedparser<6 @@ -250,7 +270,7 @@ deps = external_requests: requests external_urllib3-urllib30109: urllib3<1.10 external_urllib3-urllib3latest: urllib3 - framework_aiohttp-aiohttp03: aiohttp<4 + framework_aiohttp-aiohttp03: aiohttp framework_ariadne-ariadnelatest: ariadne framework_ariadne-ariadne0011: ariadne<0.12 framework_ariadne-ariadne0012: ariadne<0.13 @@ -300,11 +320,15 @@ deps = framework_graphql-graphql0301: graphql-core<3.2 framework_graphql-graphql0302: graphql-core<3.3 framework_graphql-graphqlmaster: https://github.com/graphql-python/graphql-core/archive/main.zip - framework_grpc-grpc0125: grpcio<1.26 - framework_grpc-grpc0125: grpcio-tools<1.26 - framework_grpc-grpc0125: protobuf<3.18.0 + framework_grpc-grpclatest: protobuf framework_grpc-grpclatest: grpcio framework_grpc-grpclatest: grpcio-tools + grpc0125: grpcio<1.26 + grpc0125: grpcio-tools<1.26 + grpc0125: protobuf<3.18.0 + protobuf0319: protobuf<3.20 + protobuf03: protobuf<4 + protobuf04: protobuf<5 framework_pyramid: routes framework_pyramid-cornice: cornice!=5.0.0 framework_pyramid-Pyramid0104: Pyramid<1.5 @@ -316,16 +340,19 @@ deps = framework_sanic-sanic1912: sanic<19.13 framework_sanic-sanic200904: sanic<20.9.5 framework_sanic-sanic210300: sanic<21.3.1 - ; Temporarily test older sanic version until issues are resolved - framework_sanic-saniclatest: sanic<21.9.0 + framework_sanic-sanic2109: sanic<21.10 + framework_sanic-sanic2112: sanic<21.13 + framework_sanic-sanic2203: sanic<22.4 + framework_sanic-sanic2290: sanic<22.9.1 + framework_sanic-saniclatest: sanic framework_sanic-sanic{1812,190301,1906}: aiohttp framework_starlette: graphene<3 framework_starlette-starlette0014: starlette<0.15 framework_starlette-starlette0015: starlette<0.16 framework_starlette-starlette0019: starlette<0.20 + framework_starlette-starlette002001: starlette==0.20.1 framework_starlette-starlettelatest: starlette - ; Strawberry 0.95.0 is incompatible with Starlette 0.18.0, downgrade until future release - framework_strawberry: starlette<0.18.0 + framework_strawberry: starlette framework_strawberry-strawberrylatest: strawberry-graphql framework_tornado: pycurl framework_tornado-tornado0600: tornado<6.1 @@ -340,6 +367,14 @@ deps = messagebroker_pika-pikalatest: pika messagebroker_pika: tornado<5 messagebroker_pika-{py27,pypy}: enum34 + messagebroker_confluentkafka-confluentkafkalatest: confluent-kafka + messagebroker_confluentkafka-confluentkafka0108: confluent-kafka<1.9 + messagebroker_confluentkafka-confluentkafka0107: confluent-kafka<1.8 + messagebroker_confluentkafka-confluentkafka0106: confluent-kafka<1.7 + messagebroker_kafkapython-kafkapythonlatest: kafka-python + messagebroker_kafkapython-kafkapython020001: kafka-python<2.0.2 + messagebroker_kafkapython-kafkapython020000: kafka-python<2.0.1 + messagebroker_kafkapython-kafkapython0104: kafka-python<1.5 template_mako: mako<1.2 setenv = @@ -374,8 +409,10 @@ extras = changedir = adapter_cheroot: tests/adapter_cheroot + adapter_daphne: tests/adapter_daphne adapter_gevent: tests/adapter_gevent adapter_gunicorn: tests/adapter_gunicorn + adapter_hypercorn: tests/adapter_hypercorn adapter_uvicorn: tests/adapter_uvicorn agent_features: tests/agent_features agent_streaming: tests/agent_streaming @@ -436,4 +473,6 @@ changedir = logger_logging: tests/logger_logging logger_loguru: tests/logger_loguru messagebroker_pika: tests/messagebroker_pika + messagebroker_confluentkafka: tests/messagebroker_confluentkafka + messagebroker_kafkapython: tests/messagebroker_kafkapython template_mako: tests/template_mako