Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions docs/scenarios/docker_fixtures.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
A `DockerFixturesScenario` is a scenario category where each test will use a dedicated Test Agent and Test Librariry container.

The name comes from pytest fixture, which is the mechanism that allow a test method to get information about test context :


```python
@scenarios.parametric
def test_stuff(test_library):
assert test_library.is_running() # test_library is an API against a tested container
```
6 changes: 3 additions & 3 deletions tests/parametric/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from utils import context, scenarios, logger

from utils._context._scenarios.parametric import APMLibraryTestServer
from utils.docker_fixtures._test_agent import TestAgentAPI as _TestAgentAPI
from utils.docker_fixtures import TestAgentAPI as _TestAgentAPI, get_host_port, docker_run

# Max timeout in seconds to keep a container running
default_subprocess_run_timeout = 300
Expand Down Expand Up @@ -159,9 +159,9 @@ def test_library(
elif k in env:
del env[k]

apm_test_server.host_port = scenarios.parametric.get_host_port(worker_id, 4500)
apm_test_server.host_port = get_host_port(worker_id, 4500)

with scenarios.parametric.docker_run(
with docker_run(
image=apm_test_server.container_tag,
name=apm_test_server.container_name,
command=apm_test_server.container_cmd,
Expand Down
74 changes: 74 additions & 0 deletions utils/_context/_scenarios/_docker_fixtures.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import contextlib
from collections.abc import Generator

import pytest

from utils.docker_fixtures._test_agent import TestAgentFactory, TestAgentAPI
from utils._context.docker import get_docker_client
from utils._logger import logger
from .core import Scenario


_NETWORK_PREFIX = "apm_shared_tests_network"


class DockerFixturesScenario(Scenario):
_test_agent_factory: TestAgentFactory # must be created in DockerFixtureScenario.configure

def _clean(self):
if self.is_main_worker:
self._clean_containers()
self._clean_networks()

def _clean_containers(self):
"""Some containers may still exists from previous unfinished sessions"""

for container in get_docker_client().containers.list(all=True):
if "test-client" in container.name or "test-agent" in container.name or "test-library" in container.name:
logger.info(f"Removing {container}")

container.remove(force=True)

def _clean_networks(self):
"""Some network may still exists from previous unfinished sessions"""
logger.info("Removing unused network")
get_docker_client().networks.prune()
logger.info("Removing unused network done")

@contextlib.contextmanager
def _get_docker_network(self, test_id: str) -> Generator[str, None, None]:
docker_network_name = f"{_NETWORK_PREFIX}_{test_id}"
network = get_docker_client().networks.create(name=docker_network_name, driver="bridge")

try:
yield network.name
finally:
try:
network.remove()
except:
# It's possible (why?) of having some container not stopped.
# If it happens, failing here makes stdout tough to understand.
# Let's ignore this, later calls will clean the mess
logger.info("Failed to remove network, ignoring the error")

@contextlib.contextmanager
def get_test_agent_api(
self,
worker_id: str,
request: pytest.FixtureRequest,
test_id: str,
container_otlp_http_port: int,
container_otlp_grpc_port: int,
) -> Generator[TestAgentAPI, None, None]:
with (
self._get_docker_network(test_id) as docker_network,
self._test_agent_factory.get_test_agent_api(
request=request,
worker_id=worker_id,
docker_network=docker_network,
container_name=f"ddapm-test-agent-{test_id}",
container_otlp_http_port=container_otlp_http_port,
container_otlp_grpc_port=container_otlp_grpc_port,
) as result,
):
yield result
149 changes: 7 additions & 142 deletions utils/_context/_scenarios/parametric.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import contextlib
import dataclasses
from typing import TextIO, Any
from collections.abc import Generator
from typing import Any

import json
import glob
Expand All @@ -11,26 +9,18 @@
import subprocess

import pytest
from _pytest.outcomes import Failed
from docker.models.containers import Container

from utils.docker_fixtures._test_agent import TestAgentFactory, TestAgentAPI
from utils.docker_fixtures import TestAgentFactory, compute_volumes
from utils._context.component_version import ComponentVersion
from utils._context.docker import get_docker_client
from utils._logger import logger
from .core import Scenario, scenario_groups


def _fail(message: str):
"""Used to mak a test as failed"""
logger.error(message)
raise Failed(message, pytrace=False) from None
from .core import scenario_groups
from ._docker_fixtures import DockerFixturesScenario


# Max timeout in seconds to keep a container running
default_subprocess_run_timeout = 300
_NETWORK_PREFIX = "apm_shared_tests_network"
# _TEST_CLIENT_PREFIX = "apm_shared_tests_container"


@dataclasses.dataclass
Expand All @@ -53,11 +43,9 @@ class APMLibraryTestServer:
container: Container | None = None


class ParametricScenario(Scenario):
class ParametricScenario(DockerFixturesScenario):
apm_test_server_definition: APMLibraryTestServer

_test_agent_factory: TestAgentFactory

class PersistentParametricTestConf(dict):
"""Parametric tests are executed in multiple thread, we need a mechanism to persist
each parametrized_tests_metadata on a file
Expand Down Expand Up @@ -130,16 +118,15 @@ def configure(self, config: pytest.Config):
# we are in the main worker, not in a xdist sub-worker
self._build_apm_test_server_image(config.option.github_token_file)
self._test_agent_factory.pull()
self._clean_containers()
self._clean_networks()
self._clean()

# https://github.com/DataDog/system-tests/issues/2799
if library in ("nodejs", "python", "golang", "ruby", "dotnet", "rust"):
output = get_docker_client().containers.run(
self.apm_test_server_definition.container_tag,
remove=True,
command=["./system_tests_library_version.sh"],
volumes=self.compute_volumes(self.apm_test_server_definition.volumes),
volumes=compute_volumes(self.apm_test_server_definition.volumes),
)
else:
output = get_docker_client().containers.run(
Expand All @@ -161,21 +148,6 @@ def get_warmups(self):

return result

def _clean_containers(self):
"""Some containers may still exists from previous unfinished sessions"""

for container in get_docker_client().containers.list(all=True):
if "test-client" in container.name or "test-agent" in container.name or "test-library" in container.name:
logger.info(f"Removing {container}")

container.remove(force=True)

def _clean_networks(self):
"""Some network may still exists from previous unfinished sessions"""
logger.info("Removing unused network")
get_docker_client().networks.prune()
logger.info("Removing unused network done")

@property
def library(self):
return self._library
Expand Down Expand Up @@ -253,91 +225,6 @@ def _build_apm_test_server_image(self, github_token_file: str) -> None:

logger.debug("Build tested container finished")

@contextlib.contextmanager
def _get_docker_network(self, test_id: str) -> Generator[str, None, None]:
docker_network_name = f"{_NETWORK_PREFIX}_{test_id}"
network = get_docker_client().networks.create(name=docker_network_name, driver="bridge")

try:
yield network.name
finally:
try:
network.remove()
except:
# It's possible (why?) of having some container not stopped.
# If it happens, failing here makes stdout tough to understand.
# Let's ignore this, later calls will clean the mess
logger.info("Failed to remove network, ignoring the error")

@staticmethod
def get_host_port(worker_id: str, base_port: int) -> int:
"""Deterministic port allocation for each worker"""

if worker_id == "master": # xdist disabled
return base_port

if worker_id.startswith("gw"):
return base_port + int(worker_id[2:])

raise ValueError(f"Unexpected worker_id: {worker_id}")

@staticmethod
def compute_volumes(volumes: dict[str, str]) -> dict[str, dict]:
"""Convert volumes to the format expected by the docker-py API"""
fixed_volumes: dict[str, dict] = {}
for key, value in volumes.items():
if isinstance(value, dict):
fixed_volumes[key] = value
elif isinstance(value, str):
fixed_volumes[key] = {"bind": value, "mode": "rw"}
else:
raise TypeError(f"Unexpected type for volume {key}: {type(value)}")

return fixed_volumes

@contextlib.contextmanager
def docker_run(
self,
image: str,
name: str,
env: dict[str, str],
volumes: dict[str, str],
network: str,
ports: dict[str, int],
command: list[str],
log_file: TextIO,
) -> Generator[Container, None, None]:
logger.info(f"Run container {name} from image {image} with ports {ports}")

try:
container: Container = get_docker_client().containers.run(
image,
name=name,
environment=env,
volumes=self.compute_volumes(volumes),
network=network,
ports=ports,
command=command,
detach=True,
)
logger.debug(f"Container {name} successfully started")
except Exception as e:
# at this point, even if it failed to start, the container may exists!
for container in get_docker_client().containers.list(filters={"name": name}, all=True):
container.remove(force=True)

_fail(f"Failed to run container {name}: {e}")

try:
yield container
finally:
logger.info(f"Stopping {name}")
container.stop(timeout=1)
logs = container.logs()
log_file.write(logs.decode("utf-8"))
log_file.flush()
container.remove(force=True)

def get_junit_properties(self) -> dict[str, str]:
result = super().get_junit_properties()

Expand All @@ -347,28 +234,6 @@ def get_junit_properties(self) -> dict[str, str]:

return result

@contextlib.contextmanager
def get_test_agent_api(
self,
worker_id: str,
request: pytest.FixtureRequest,
test_id: str,
container_otlp_http_port: int,
container_otlp_grpc_port: int,
) -> Generator[TestAgentAPI, None, None]:
with (
self._get_docker_network(test_id) as docker_network,
self._test_agent_factory.get_test_agent_api(
request=request,
worker_id=worker_id,
docker_network=docker_network,
container_name=f"ddapm-test-agent-{test_id}",
container_otlp_http_port=container_otlp_http_port,
container_otlp_grpc_port=container_otlp_grpc_port,
) as result,
):
yield result


def _get_base_directory() -> str:
return str(Path.cwd())
Expand Down
10 changes: 10 additions & 0 deletions utils/docker_fixtures/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from ._core import get_host_port, docker_run, compute_volumes
from ._test_agent import TestAgentAPI, TestAgentFactory

__all__ = [
"TestAgentAPI",
"TestAgentFactory",
"compute_volumes",
"docker_run",
"get_host_port",
]
4 changes: 2 additions & 2 deletions utils/docker_fixtures/_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def get_host_port(worker_id: str, base_port: int) -> int:
raise ValueError(f"Unexpected worker_id: {worker_id}")


def _compute_volumes(volumes: dict[str, str]) -> dict[str, dict]:
def compute_volumes(volumes: dict[str, str]) -> dict[str, dict]:
"""Convert volumes to the format expected by the docker-py API"""
fixed_volumes: dict[str, dict] = {}
for key, value in volumes.items():
Expand Down Expand Up @@ -57,7 +57,7 @@ def docker_run(
image,
name=name,
environment=env,
volumes=_compute_volumes(volumes),
volumes=compute_volumes(volumes),
network=network,
ports=ports,
command=command,
Expand Down
2 changes: 2 additions & 0 deletions utils/scripts/libraries_and_scenarios_rules.yml
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,8 @@ patterns:
scenario_groups: onboarding
utils/_context/_scenarios/default.py:
scenario_groups: default
utils/_context/_scenarios/_docker_fixtures.py:
scenario_groups: parametric
utils/_context/_scenarios/appsec_rasp.py:
scenario_groups: appsec_rasp_scenario
utils/_context/_scenarios/endtoend.py:
Expand Down
Loading