Skip to content

Commit

Permalink
Merge branch '1.x' into avara1986/APPSEC-8131-rcm-forks-error
Browse files Browse the repository at this point in the history
  • Loading branch information
avara1986 committed Feb 28, 2023
2 parents 70ff85c + fa5b3d9 commit f73fc22
Show file tree
Hide file tree
Showing 168 changed files with 3,992 additions and 1,759 deletions.
26 changes: 19 additions & 7 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@ machine_executor: &machine_executor
command: |
pyenv global 3.9.4
contrib_job_large: &contrib_job_large
executor: ddtrace_dev
parallelism: 8

contrib_job: &contrib_job
executor: ddtrace_dev
parallelism: 4
Expand Down Expand Up @@ -570,7 +574,7 @@ jobs:
pattern: 'ddtracerun'

test_logging:
<<: *contrib_job_small
<<: *contrib_job
steps:
- run_test:
pattern: 'test_logging'
Expand All @@ -583,6 +587,7 @@ jobs:

asyncpg:
<<: *machine_executor
parallelism: 6
steps:
- run_test:
pattern: 'asyncpg'
Expand All @@ -597,15 +602,15 @@ jobs:

aiohttp:
<<: *machine_executor
parallelism: 6
parallelism: 10
steps:
- run_test:
pattern: 'aiohttp' # includes aiohttp_jinja2
snapshot: true
docker_services: 'httpbin_local'

asgi:
<<: *contrib_job_small
<<: *contrib_job
steps:
- run_test:
pattern: 'asgi$'
Expand Down Expand Up @@ -639,7 +644,7 @@ jobs:

celery:
<<: *contrib_job
parallelism: 7
parallelism: 13
docker:
- image: *ddtrace_dev_image
- image: redis:4.0-alpine
Expand Down Expand Up @@ -673,7 +678,7 @@ jobs:

elasticsearch:
<<: *machine_executor
parallelism: 4
parallelism: 8
steps:
- run_test:
pattern: 'elasticsearch'
Expand Down Expand Up @@ -713,6 +718,7 @@ jobs:

fastapi:
<<: *machine_executor
parallelism: 6
steps:
- run_test:
pattern: "fastapi"
Expand Down Expand Up @@ -746,6 +752,7 @@ jobs:

graphql:
<<: *machine_executor
parallelism: 8
steps:
- run_test:
pattern: "graphql"
Expand Down Expand Up @@ -904,6 +911,7 @@ jobs:

pyramid:
<<: *machine_executor
parallelism: 4
steps:
- run_test:
pattern: 'pyramid'
Expand Down Expand Up @@ -936,6 +944,7 @@ jobs:

starlette:
<<: *machine_executor
parallelism: 4
steps:
- run_test:
pattern: "starlette"
Expand All @@ -962,7 +971,7 @@ jobs:
docker_services: "postgres"

aiobotocore:
<<: *contrib_job
<<: *contrib_job_large
docker:
- image: *ddtrace_dev_image
- image: *moto_image
Expand Down Expand Up @@ -1027,6 +1036,7 @@ jobs:

rediscluster:
<<: *machine_executor
parallelism: 4
steps:
- run_test:
pattern: 'rediscluster'
Expand Down Expand Up @@ -1096,7 +1106,7 @@ jobs:
pattern: 'jinja2'

mako:
<<: *contrib_job_small
<<: *contrib_job
steps:
- run_test:
pattern: 'mako'
Expand Down Expand Up @@ -1160,10 +1170,12 @@ requires_tests: &requires_tests
- flask
- gevent
- graphql
- graphene
- grpc
- gunicorn
- httplib
- httpx
- internal
- integration_agent
- integration_testagent
- vendor
Expand Down
1 change: 1 addition & 0 deletions .circleci/dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@
"opentracing": "2.4.0",
"packaging": "21.3",
"pastedeploy": "3.0.1",
"pep562": "1.1",
"psycopg2-binary": "2.9.5",
"py": "1.11.0",
"py-cpuinfo": "9.0.0",
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}

- name: Build and push
uses: docker/build-push-action@v3
uses: docker/build-push-action@v4
with:
context: docker
file: docker/Dockerfile.buster
Expand Down
13 changes: 4 additions & 9 deletions .github/workflows/test_frameworks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ jobs:
- name: Run tests
run: ddtrace-run pytest graphene

fastapi-testsuite-0_75:
fastapi-testsuite-0_92:
runs-on: ubuntu-latest
env:
DD_TESTING_RAISE: true
Expand All @@ -169,21 +169,16 @@ jobs:
- uses: actions/checkout@v3
with:
repository: tiangolo/fastapi
ref: 0.75.0
ref: 0.92.0
path: fastapi
- uses: actions/cache@v3.2.5
id: cache
with:
path: ${{ env.pythonLocation }}
key: ${{ runner.os }}-python-${{ env.pythonLocation }}-fastapi
#This step installs Flit, a way to put Python packages and modules on PyPI (More info at https://flit.readthedocs.io/en/latest/)
- name: Install Flit
if: steps.cache.outputs.cache-hit != 'true'
run: pip install flit
#Installs all dependencies needed for FastAPI
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: flit install --symlink
run: pip install -e .[all,dev,doc,test]
- name: Inject ddtrace
run: pip install ../ddtrace
- name: Test
Expand Down Expand Up @@ -454,7 +449,7 @@ jobs:
uwsgi-testsuite-2_0_21:
name: uwsgi 2.0.21
runs-on: "ubuntu-18.04"
runs-on: "ubuntu-latest"
env:
DD_TESTING_RAISE: true
DD_PROFILING_ENABLED: true
Expand Down
26 changes: 14 additions & 12 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,16 +71,8 @@ deploy_to_docker_registries:
rules:
- if: '$POPULATE_CACHE'
when: never
# Wait 1 day to trigger the downstream job.
# This is a work-around since there isn't a way to trigger
# Gitlab from the Github workflow (build_deploy.yml:upload_pypi).
#
# The caveat here is that if there is a failure to build to PyPI
# and it isn't fixed in a day then this job will fail and images
# will not be published.
- if: '$CI_COMMIT_TAG =~ /^v.*/'
when: delayed
start_in: 1 day
when: on_success
- when: manual
allow_failure: true
trigger:
Expand All @@ -91,16 +83,23 @@ deploy_to_docker_registries:
IMG_SOURCES: ghcr.io/datadog/dd-trace-py/dd-lib-python-init:$CI_COMMIT_TAG
IMG_DESTINATIONS: dd-lib-python-init:$CI_COMMIT_TAG
IMG_SIGNING: "false"
# Wait 4 hours to trigger the downstream job.
# This is a work-around since there isn't a way to trigger
# Gitlab from the Github workflow (build_deploy.yml:upload_pypi).
#
# The caveat here is that if there is a failure to build to PyPI and it
# isn't fixed in the retry period then this job will fail and images will
# not be published.
RETRY_DELAY: 14400
RETRY_COUNT: 3

deploy_latest_tag_to_docker_registries:
stage: deploy
rules:
- if: '$POPULATE_CACHE'
when: never
# See above note in the `deploy_to_docker_registries` job.
- if: '$CI_COMMIT_TAG =~ /^v.*/'
when: delayed
start_in: 1 day
when: on_success
- when: manual
allow_failure: true
trigger:
Expand All @@ -111,3 +110,6 @@ deploy_latest_tag_to_docker_registries:
IMG_SOURCES: ghcr.io/datadog/dd-trace-py/dd-lib-python-init:$CI_COMMIT_TAG
IMG_DESTINATIONS: dd-lib-python-init:latest
IMG_SIGNING: "false"
# See above note in the `deploy_to_docker_registries` job.
RETRY_DELAY: 14400
RETRY_COUNT: 3
14 changes: 10 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,9 @@ launch them through:

#### Running Tests in docker

The dd-trace-py testrunner docker image allows you to run tests in an environment that matches CI. This is especially useful
if you are unable to install certain test dependencies on your dev machine's bare metal.

Once your docker-compose environment is running, you can use the shell script to
execute tests within a Docker image. You can start the container with a bash shell:

Expand Down Expand Up @@ -103,10 +106,13 @@ You can run multiple tests by using regular expressions:
`docker-compose up -d <SERVICE_NAME>`, where `<SERVICE_NAME>` should match a
service specified in the `docker-compose.yml` file.
5. Run a test suite: `riot -v run <RUN_FLAGS> <TEST_SUITE_NAME>`.
1. Optionally, use the `-s` and `-x` flags: `-s` prevents riot from
reinstalling the dev package; `-x` forces an exit after the first failed
test suite. To limit the tests to a particular version of Python, use the
`-p` flag: `riot -v run -p <PYTHON_VERSION>`.

You can use the `-s` and `-x` flags: `-s` prevents riot from reinstalling the dev package;
`-x` forces an exit after the first failed test suite. To limit the tests to a particular
version of Python, use the `-p` flag: `riot -v run -p <PYTHON_VERSION>`. You can also pass
command line arguments to the underlying test runner (like pytest) with the `--` argument.
For example, you can run a specific test under pytest with
`riot -v run -s gunicorn -- -k test_no_known_errors_occur`

The `run` command uses regex syntax, which in some cases will cause multiple
test suites to run. Use the following syntax to ensure only an individual suite
Expand Down
69 changes: 56 additions & 13 deletions ddtrace/appsec/_asm_request_context.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,21 @@
import contextlib
from typing import Any
from typing import Generator
from typing import Optional
from typing import TYPE_CHECKING

from ddtrace import config
from ddtrace.internal.logger import get_logger


if TYPE_CHECKING:
from typing import Any
from typing import Callable
from typing import Generator
from typing import Optional

from ddtrace.vendor import contextvars


log = get_logger(__name__)

"""
Stopgap module for providing ASM context for the blocking features wrapping some
contextvars. When using this, note that context vars are always thread-local so each
Expand All @@ -21,13 +31,15 @@
_DD_EARLY_HEADERS_CASE_SENSITIVE_CONTEXTVAR = contextvars.ContextVar(
"datadog_early_headers_casesensitive_contextvar", default=False
)
_DD_EARLY_WAF_CALLBACK = contextvars.ContextVar("datadog_early_waf_callback", default=None)
_DD_BLOCK_REQUEST_CALLABLE = contextvars.ContextVar("datadog_block_request_callable_contextvar", default=None)
_DD_WAF_CALLBACK = contextvars.ContextVar("datadog_early_waf_callback", default=None)


def reset(): # type: () -> None
_DD_EARLY_IP_CONTEXTVAR.set(None)
_DD_EARLY_HEADERS_CONTEXTVAR.set(None)
_DD_EARLY_HEADERS_CASE_SENSITIVE_CONTEXTVAR.set(False)
_DD_BLOCK_REQUEST_CALLABLE.set(None)


def set_ip(ip): # type: (Optional[str]) -> None
Expand Down Expand Up @@ -59,25 +71,56 @@ def get_headers_case_sensitive(): # type: () -> bool
return _DD_EARLY_HEADERS_CASE_SENSITIVE_CONTEXTVAR.get()


def set_callback(callback): # type: (Any) -> None
_DD_EARLY_WAF_CALLBACK.set(callback)
def set_block_request_callable(_callable): # type: (Optional[Callable]) -> None
"""
Sets a callable that could be use to do a best-effort to block the request. If
the callable need any params, like headers, they should be curried with
functools.partial.
"""
if _callable:
_DD_BLOCK_REQUEST_CALLABLE.set(_callable)


def block_request(): # type: () -> None
"""
Calls or returns the stored block request callable, if set.
"""
_callable = _DD_BLOCK_REQUEST_CALLABLE.get()
if _callable:
_callable()

log.debug("Block request called but block callable not set by framework")


def set_waf_callback(callback): # type: (Any) -> None
_DD_WAF_CALLBACK.set(callback)


def call_callback(): # type: () -> Any
return _DD_EARLY_WAF_CALLBACK.get()()
def call_waf_callback(custom_data=None):
# type: (dict[str, Any] | None) -> None
if not config._appsec_enabled:
return
callback = _DD_WAF_CALLBACK.get()
if callback:
return callback(custom_data)
else:
log.warning("WAF callback called but not set")


def asm_request_context_set(remote_ip=None, headers=None, headers_case_sensitive=False):
# type: (Optional[str], Any, bool) -> None
def asm_request_context_set(remote_ip=None, headers=None, headers_case_sensitive=False, block_request_callable=None):
# type: (Optional[str], Any, bool, Optional[Callable]) -> None
set_ip(remote_ip)
set_headers(headers)
set_headers_case_sensitive(headers_case_sensitive)
set_block_request_callable(block_request_callable)


@contextlib.contextmanager
def asm_request_context_manager(remote_ip=None, headers=None, headers_case_sensitive=False):
# type: (Optional[str], Any, bool) -> Generator[None, None, None]
asm_request_context_set(remote_ip, headers, headers_case_sensitive)
def asm_request_context_manager(
remote_ip=None, headers=None, headers_case_sensitive=False, block_request_callable=None
):
# type: (Optional[str], Any, bool, Optional[Callable]) -> Generator[None, None, None]
asm_request_context_set(remote_ip, headers, headers_case_sensitive, block_request_callable)
try:
yield
finally:
Expand Down
2 changes: 2 additions & 0 deletions ddtrace/appsec/_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ class WAF_DATA_NAMES(object):
REQUEST_PATH_PARAMS = "server.request.path_params"
REQUEST_COOKIES = "server.request.cookies"
REQUEST_HTTP_IP = "http.client_ip"
REQUEST_USER_ID = "usr.id"
RESPONSE_STATUS = "server.response.status"
RESPONSE_HEADERS_NO_COOKIES = "server.response.headers.no_cookies"

Expand All @@ -97,6 +98,7 @@ class SPAN_DATA_NAMES(object):
REQUEST_PATH_PARAMS = "http.request.path_params"
REQUEST_COOKIES = "http.request.cookies"
REQUEST_HTTP_IP = "http.request.remote_ip"
REQUEST_USER_ID = "usr.id"
RESPONSE_STATUS = "http.response.status"
RESPONSE_HEADERS_NO_COOKIES = "http.response.headers"

Expand Down
Loading

0 comments on commit f73fc22

Please sign in to comment.