diff --git a/.github/workflows/compute-impacted-libraries.yml b/.github/workflows/compute-impacted-libraries.yml index 6b6d4d4797a..481f2fdab5d 100644 --- a/.github/workflows/compute-impacted-libraries.yml +++ b/.github/workflows/compute-impacted-libraries.yml @@ -43,12 +43,12 @@ jobs: # temporary print to see what's hapenning on differents events print(json.dumps(github_context, indent=2)) - libraries = "cpp|cpp_httpd|cpp_nginx|dotnet|golang|java|nodejs|php|python|ruby|java_otel|python_otel|nodejs_otel" + libraries = "cpp|cpp_httpd|cpp_nginx|dotnet|golang|java|nodejs|php|python|ruby|java_otel|python_otel|nodejs_otel|python_lambda" result = set() # do not include otel in system-tests CI by default, as the staging backend is not stable enough # all_libraries = {"cpp", "dotnet", "golang", "java", "nodejs", "php", "python", "ruby", "java_otel", "python_otel", "nodejs_otel"} - all_libraries = {"cpp", "cpp_httpd", "cpp_nginx", "dotnet", "golang", "java", "nodejs", "php", "python", "ruby"} + all_libraries = {"cpp", "cpp_httpd", "cpp_nginx", "dotnet", "golang", "java", "nodejs", "php", "python", "ruby", "python_lambda"} if github_context["ref"] == "refs/heads/main": print("Merge commit to main => run all libraries") diff --git a/.github/workflows/compute-workflow-parameters.yml b/.github/workflows/compute-workflow-parameters.yml index f3a787cc966..cae59d1143e 100644 --- a/.github/workflows/compute-workflow-parameters.yml +++ b/.github/workflows/compute-workflow-parameters.yml @@ -175,7 +175,7 @@ jobs: with: name: binaries_dev_${{ inputs.library }} path: binaries/ - include-hidden-files: ${{ inputs.library == 'python' }} + include-hidden-files: ${{ inputs.library == 'python' || inputs.library == 'python_lambda' }} - name: Set unique ID id: unique_id run: echo "value=$(openssl rand -hex 8)" >> $GITHUB_OUTPUT diff --git a/.github/workflows/run-end-to-end.yml b/.github/workflows/run-end-to-end.yml index 5913efc8f2f..bc69d19de54 100644 --- a/.github/workflows/run-end-to-end.yml +++ b/.github/workflows/run-end-to-end.yml @@ -140,6 +140,9 @@ jobs: - name: Build weblog id: build run: SYSTEM_TEST_BUILD_ATTEMPTS=3 ./build.sh ${{ inputs.library }} -i weblog -w ${{ inputs.weblog }} + - name: Build Lambda Proxy + if: ${{ endsWith(inputs.library, '_lambda') }} + run: ./build.sh python_lambda -i lambda-proxy - name: Run APPSEC_STANDALONE scenario if: always() && steps.build.outcome == 'success' && contains(inputs.scenarios, '"APPSEC_STANDALONE"') @@ -420,6 +423,9 @@ jobs: DD_APP_KEY_2: ${{ secrets.DD_APP_KEY_2 }} DD_API_KEY_3: ${{ secrets.DD_API_KEY_3 }} DD_APP_KEY_3: ${{ secrets.DD_APP_KEY_3 }} + - name: Run APPSEC_LAMBDA_DEFAULT scenario + if: always() && steps.build.outcome == 'success' && contains(inputs.scenarios, '"APPSEC_LAMBDA_DEFAULT"') + run: ./run.sh APPSEC_LAMBDA_DEFAULT - name: Run all scenarios in replay mode if: success() && steps.build.outcome == 'success' && inputs.enable_replay_scenarios run: utils/scripts/replay_scenarios.sh diff --git a/docs/scenarios/aws_lambda.md b/docs/scenarios/aws_lambda.md new file mode 100644 index 00000000000..a70244d4632 --- /dev/null +++ b/docs/scenarios/aws_lambda.md @@ -0,0 +1,63 @@ +# Lambda Testing scenario + +The Lambda scenario is a variation on the [classical architecture](../architecture/overview.md#what-are-the-components-of-a-running-test) of the system-tests tailored to evaluate the `AWS Lambda` variants of the tracers when used to serve HTTP requests. + +To achieve this we simulate the following AWS deployment architecture inside the system-tests using AWS provided tools : + +```mermaid +graph LR + A[Incoming HTTP Request] -->|HTTP| B[AWS Managed Load Balancer] + B -->|event: request as JSON| C[AWS Lambda] +``` + +The AWS Managed Load Balancer could be any of the following ones: +- API Gateway +- Application Load Balancer +- Lambda function url service + +To do this, we rely on two tools from AWS to emulate Lambda and Load Balancers: +- [AWS Lambda Runtime Interface Emulator](https://github.com/aws/aws-lambda-runtime-interface-emulator) +- [AWS SAM cli](https://github.com/aws/aws-sam-cli) + +>Note: for now only the python variant ([`datadog_lambda`](https://github.com/DataDog/datadog-lambda-python)) is being tested simulating an `API Gateway` + +## Key differences with end to end scenarios + +To replace the **AWS Managed Load Balancer**, we run a dedicated container in front of the weblog named **Lambda Proxy**. It is responsible for converting the incoming request to a *lambda event* representation, invoking the lambda function running inside the weblog and converting back the return value of function to an http response. + +The **Lambda Function** runs inside the **Weblog Container** thanks to the *AWS Lambda Runtime Interface Emumlator*. + + +There is no **Agent Container**, the **Datadog Extension** (equivalent to the **Datadog Agent** in the context of lambda) needs to run inside the **Weblog Container**, the [**Application Proxy Container**](../architecture/overview.md#application-proxy-container) therefore needs to send traces back to the **Weblog Container**. + + +```mermaid +flowchart TD + TESTS[Tests Container] -->|Send Requests| LambdaProxy + LambdaProxy[Lambda Proxy] -->|Send Lambda Event| Application + subgraph APP[Application Container] + socat[socat *:8127] --> Extension + Extension[Extension localhost:8126] + Application[Application *:8080] + end + Application --> | Send Traces | APPPROXY + APPPROXY[Application Proxy] --> | Send back traces | socat + APPPROXY -->|mitmdump| TESTS + Extension --> AGENTPROXY + AGENTPROXY[Agent Proxy] -->|remote request| BACKEND + AGENTPROXY -->|mitmdump| TESTS + BACKEND[Datadog] -->|trace API| TESTS +``` + +## Specific considerations for the weblogs + +On top of responding to the regular [`/healthcheck`](../weblog/README.md#get-healthcheck) endpoint. + +Lambda Weblogs should respond the same JSON dict response to the non HTTP event: +```json +{ + "healthcheck": true +} +``` + +This is because the healthcheck is sent by the Lambda Weblog container itself which has no knowledge of how to serialize it as the event type expected by the weblog. \ No newline at end of file diff --git a/manifests/python_lambda.yml b/manifests/python_lambda.yml new file mode 100644 index 00000000000..5568b6105a4 --- /dev/null +++ b/manifests/python_lambda.yml @@ -0,0 +1,21 @@ +--- +tests/: + appsec/: + test_alpha.py: + Test_Basic: 7.112.0 + test_only_python.py: + Test_ImportError: 7.112.0 + test_reports.py: + Test_ExtraTagsFromRule: 7.112.0 + Test_Info: 7.112.0 + Test_RequestHeaders: 7.112.0 + Test_StatusCode: 7.112.0 + test_traces.py: + Test_AppSecEventSpanTags: 7.112.0 + Test_AppSecObfuscator: 7.112.0 + Test_CollectDefaultRequestHeader: 7.112.0 + Test_CollectRespondHeaders: 7.112.0 + Test_ExternalWafRequestsIdentification: 7.112.0 + Test_RetainTraces: 7.112.0 + test_versions.py: + Test_Events: 7.112.0 diff --git a/tests/appsec/api_security/test_schemas.py b/tests/appsec/api_security/test_schemas.py index a1d31ba8399..8905165a1fa 100644 --- a/tests/appsec/api_security/test_schemas.py +++ b/tests/appsec/api_security/test_schemas.py @@ -43,6 +43,7 @@ def equal_value(t1, t2): @rfc("https://docs.google.com/document/d/1OCHPBCAErOL2FhLl64YAHB8woDyq66y5t-JGolxdf1Q/edit#heading=h.bth088vsbjrz") @scenarios.appsec_api_security +@scenarios.appsec_lambda_api_security @features.api_security_schemas class Test_Schema_Request_Headers: """Test API Security - Request Headers Schema""" @@ -63,6 +64,7 @@ def test_request_method(self): @rfc("https://docs.google.com/document/d/1OCHPBCAErOL2FhLl64YAHB8woDyq66y5t-JGolxdf1Q/edit#heading=h.bth088vsbjrz") @scenarios.appsec_api_security +@scenarios.appsec_lambda_api_security @features.api_security_schemas class Test_Schema_Request_Cookies: """Test API Security - Request Cookies Schema""" @@ -87,6 +89,7 @@ def test_request_method(self): @rfc("https://docs.google.com/document/d/1OCHPBCAErOL2FhLl64YAHB8woDyq66y5t-JGolxdf1Q/edit#heading=h.bth088vsbjrz") @scenarios.appsec_api_security +@scenarios.appsec_lambda_api_security @features.api_security_schemas class Test_Schema_Request_Query_Parameters: """Test API Security - Request Query Parameters Schema""" @@ -107,6 +110,7 @@ def test_request_method(self): @rfc("https://docs.google.com/document/d/1OCHPBCAErOL2FhLl64YAHB8woDyq66y5t-JGolxdf1Q/edit#heading=h.bth088vsbjrz") @scenarios.appsec_api_security +@scenarios.appsec_lambda_api_security @features.api_security_schemas class Test_Schema_Request_Path_Parameters: """Test API Security - Request Path Parameters Schema""" @@ -128,6 +132,7 @@ def test_request_method(self): @rfc("https://docs.google.com/document/d/1OCHPBCAErOL2FhLl64YAHB8woDyq66y5t-JGolxdf1Q/edit#heading=h.bth088vsbjrz") @scenarios.appsec_api_security +@scenarios.appsec_lambda_api_security @features.api_security_schemas class Test_Schema_Request_Json_Body: """Test API Security - Request Body and list length""" @@ -148,6 +153,7 @@ def test_request_method(self): @rfc("https://docs.google.com/document/d/1OCHPBCAErOL2FhLl64YAHB8woDyq66y5t-JGolxdf1Q/edit#heading=h.bth088vsbjrz") @scenarios.appsec_api_security +@scenarios.appsec_lambda_api_security @features.api_security_schemas class Test_Schema_Request_FormUrlEncoded_Body: """Test API Security - Request Body and list length""" @@ -188,6 +194,7 @@ def test_request_method(self): @rfc("https://docs.google.com/document/d/1OCHPBCAErOL2FhLl64YAHB8woDyq66y5t-JGolxdf1Q/edit#heading=h.bth088vsbjrz") @scenarios.appsec_api_security +@scenarios.appsec_lambda_api_security @features.api_security_schemas class Test_Schema_Response_Headers: """Test API Security - Response Header Schema""" @@ -207,6 +214,7 @@ def test_request_method(self): @rfc("https://docs.google.com/document/d/1OCHPBCAErOL2FhLl64YAHB8woDyq66y5t-JGolxdf1Q/edit#heading=h.bth088vsbjrz") @scenarios.appsec_api_security +@scenarios.appsec_lambda_api_security @features.api_security_schemas class Test_Schema_Response_Body: """Test API Security - Response Body Schema with urlencoded body""" @@ -233,6 +241,7 @@ def test_request_method(self): @rfc("https://docs.google.com/document/d/1OCHPBCAErOL2FhLl64YAHB8woDyq66y5t-JGolxdf1Q/edit#heading=h.bth088vsbjrz") @scenarios.appsec_api_security +@scenarios.appsec_lambda_api_security @features.api_security_schemas class Test_Schema_Response_on_Block: """Test API Security - Response Schemas with urlencoded body @@ -293,6 +302,7 @@ def test_request_method(self): @rfc("https://docs.google.com/document/d/1OCHPBCAErOL2FhLl64YAHB8woDyq66y5t-JGolxdf1Q/edit#heading=h.bth088vsbjrz") @scenarios.appsec_api_security +@scenarios.appsec_lambda_api_security @features.api_security_schemas class Test_Scanners: """Test API Security - Scanners""" diff --git a/tests/appsec/test_alpha.py b/tests/appsec/test_alpha.py index 41db5ebb091..c2fcc6a00f5 100644 --- a/tests/appsec/test_alpha.py +++ b/tests/appsec/test_alpha.py @@ -9,6 +9,7 @@ @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_Basic: """Detect attacks on raw URI and headers with default rules""" diff --git a/tests/appsec/test_only_python.py b/tests/appsec/test_only_python.py index ce6cc45e51c..c840741def9 100644 --- a/tests/appsec/test_only_python.py +++ b/tests/appsec/test_only_python.py @@ -10,13 +10,14 @@ @scenarios.appsec_runtime_activation @scenarios.appsec_standalone @scenarios.default +@scenarios.appsec_lambda_default @features.language_specifics -@irrelevant(context.library != "python", reason="specific tests for python tracer") +@irrelevant(context.library not in ("python", "python_lambda"), reason="specific tests for python tracer") class Test_ImportError: """Tests to verify that we don't have import errors due to tracer instrumentation.""" @flaky(context.library == "python@3.2.1" and "flask" in context.weblog_variant, reason="APMRP-360") def test_circular_import(self): """Test to verify that we don't have a circular import in the weblog.""" - assert context.library == "python" + assert context.library in ("python", "python_lambda") interfaces.library_stdout.assert_absence("most likely due to a circular import") diff --git a/tests/appsec/test_reports.py b/tests/appsec/test_reports.py index 239d261c229..613569dbcec 100644 --- a/tests/appsec/test_reports.py +++ b/tests/appsec/test_reports.py @@ -68,6 +68,7 @@ def _check_service(span, appsec_data): # noqa: ARG001 @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_RequestHeaders: """Request Headers for IP resolution""" @@ -107,6 +108,7 @@ def test_http_request_headers(self): @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_TagsFromRule: """Tags tags from the rule""" @@ -135,6 +137,7 @@ def test_category(self): @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_ExtraTagsFromRule: """Extra tags may be added to the rule match since libddwaf 1.10.0""" @@ -164,6 +167,7 @@ def _get_appsec_triggers(request): @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_AttackTimestamp: """Attack timestamp""" diff --git a/tests/appsec/test_traces.py b/tests/appsec/test_traces.py index 8a4ff04a050..097ba8e59f5 100644 --- a/tests/appsec/test_traces.py +++ b/tests/appsec/test_traces.py @@ -16,6 +16,7 @@ @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_RetainTraces: """Retain trace (manual keep & appsec.event = true)""" @@ -59,6 +60,7 @@ def validate_appsec_event_span_tags(span): @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_AppSecEventSpanTags: """AppSec correctly fill span tags.""" @@ -71,9 +73,15 @@ def test_custom_span_tags(self): spans = [span for _, span in interfaces.library.get_root_spans()] assert spans, "No root spans to validate" - spans = [s for s in spans if s.get("type") == "web"] - assert spans, "No spans of type web to validate" + spans = [s for s in spans if s.get("type") in ("web", "serverless")] + assert spans, "No spans of type web or serverless to validate" for span in spans: + if span.get("type") == "serverless" and "_dd.appsec.unsupported_event_type" in span["metrics"]: + # For serverless, the `healthcheck` event is not supported + assert ( + span["metrics"]["_dd.appsec.unsupported_event_type"] == 1 + ), "_dd.appsec.unsupported_event_type should be 1 or 1.0" + continue assert "_dd.appsec.enabled" in span["metrics"], "Cannot find _dd.appsec.enabled in span metrics" assert span["metrics"]["_dd.appsec.enabled"] == 1, "_dd.appsec.enabled should be 1 or 1.0" assert "_dd.runtime_family" in span["meta"], "Cannot find _dd.runtime_family in span meta" @@ -84,6 +92,7 @@ def test_custom_span_tags(self): def setup_header_collection(self): self.r = weblog.get("/headers", headers={"User-Agent": "Arachni/v1", "Content-Type": "text/plain"}) + @bug(library="python_lambda", reason="APPSEC-58202") @bug(context.library < f"python@{PYTHON_RELEASE_GA_1_1}", reason="APMRP-360") @bug(context.library < "java@1.2.0", weblog_variant="spring-boot-openliberty", reason="APPSEC-6734") @bug( @@ -91,7 +100,7 @@ def setup_header_collection(self): weblog_variant="fastify", reason="APPSEC-57432", # Response headers collection not supported yet ) - @irrelevant(context.library not in ["golang", "nodejs", "java", "dotnet"], reason="test") + @irrelevant(context.library not in ["golang", "nodejs", "java", "dotnet", "python_lambda"], reason="test") @irrelevant(context.scenario is scenarios.external_processing, reason="Irrelevant tag set for golang") def test_header_collection(self): """AppSec should collect some headers for http.request and http.response and store them in span tags. @@ -113,7 +122,7 @@ def test_header_collection(self): @bug(context.library < "java@0.93.0", reason="APMRP-360") def test_root_span_coherence(self): """Appsec tags are not on span where type is not web, http or rpc""" - valid_appsec_span_types = ["web", "http", "rpc"] + valid_appsec_span_types = ["web", "http", "rpc", "serverless"] spans = [span for _, _, span in interfaces.library.get_spans()] assert spans, "No spans to validate" assert any("_dd.appsec.enabled" in s.get("metrics", {}) for s in spans), "No appsec-enabled spans found" @@ -134,6 +143,7 @@ def test_root_span_coherence(self): @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_AppSecObfuscator: """AppSec obfuscates sensitive data.""" @@ -285,6 +295,7 @@ def validate_appsec_span_tags(span, appsec_data): # noqa: ARG001 @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_CollectRespondHeaders: """AppSec should collect some headers for http.response and store them in span tags.""" @@ -295,6 +306,7 @@ def setup_header_collection(self): context.scenario is scenarios.external_processing, reason="The endpoint /headers is not implemented in the weblog", ) + @bug(library="python_lambda", reason="APPSEC-58202") def test_header_collection(self): def assert_header_in_span_meta(span, header): if header not in span["meta"]: @@ -313,6 +325,7 @@ def validate_response_headers(span): @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_CollectDefaultRequestHeader: HEADERS = { "User-Agent": "MyBrowser", @@ -346,6 +359,7 @@ def test_collect_default_request_headers(self): @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_ExternalWafRequestsIdentification: def setup_external_wafs_header_collection(self): self.r = weblog.get( diff --git a/tests/appsec/test_versions.py b/tests/appsec/test_versions.py index 3d91c70c406..b752dff0e15 100644 --- a/tests/appsec/test_versions.py +++ b/tests/appsec/test_versions.py @@ -9,6 +9,7 @@ @features.envoy_external_processing @scenarios.external_processing @scenarios.default +@scenarios.appsec_lambda_default class Test_Events: """AppSec events uses events in span""" diff --git a/utils/_context/_scenarios/__init__.py b/utils/_context/_scenarios/__init__.py index c6a6006643d..0e60af99e91 100644 --- a/utils/_context/_scenarios/__init__.py +++ b/utils/_context/_scenarios/__init__.py @@ -4,6 +4,7 @@ from utils.proxy.ports import ProxyPorts from utils.tools import update_environ_with_local_env +from .aws_lambda import LambdaScenario from .core import Scenario, scenario_groups from .default import DefaultScenario from .endtoend import DockerScenario, EndToEndScenario @@ -187,6 +188,7 @@ class _Scenarios: doc="Misc tests for appsec blocking", scenario_groups=[scenario_groups.appsec, scenario_groups.essentials], ) + # This GraphQL scenario can be used for any GraphQL testing, not just AppSec graphql_appsec = EndToEndScenario( "GRAPHQL_APPSEC", @@ -1077,6 +1079,29 @@ class _Scenarios: doc="Test runtime metrics", ) + # Appsec Lambda Scenarios + appsec_lambda_default = LambdaScenario( + "APPSEC_LAMBDA_DEFAULT", + doc="Default Lambda scenario", + scenario_groups=[scenario_groups.appsec], + ) + appsec_lambda_api_security = LambdaScenario( + "APPSEC_LAMBDA_API_SECURITY", + weblog_env={ + "DD_API_SECURITY_ENABLED": "true", + "DD_API_SECURITY_REQUEST_SAMPLE_RATE": "1.0", + "DD_API_SECURITY_SAMPLE_DELAY": "0.0", + "DD_API_SECURITY_MAX_CONCURRENT_REQUESTS": "50", + "DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED": "true", + "DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT": "30", + }, + doc=""" + Scenario for API Security feature in lambda, testing schema types sent into span tags if + DD_API_SECURITY_ENABLED is set to true. + """, + scenario_groups=[scenario_groups.appsec], + ) + scenarios = _Scenarios() diff --git a/utils/_context/_scenarios/aws_lambda.py b/utils/_context/_scenarios/aws_lambda.py new file mode 100644 index 00000000000..2451e072be0 --- /dev/null +++ b/utils/_context/_scenarios/aws_lambda.py @@ -0,0 +1,145 @@ +import pytest +from utils import interfaces +from utils._context._scenarios.core import ScenarioGroup +from utils._context.containers import LambdaProxyContainer, LambdaWeblogContainer +from utils._logger import logger +from .endtoend import DockerScenario, ProxyBasedInterfaceValidator +from .core import scenario_groups as all_scenario_groups + + +class LambdaScenario(DockerScenario): + """Scenario for end-to-end testing of AWS Lambda HTTP Instrumentation. + + The `LambdaScenario` sets up an environment with the following components: + - A LambdaWeblog container that runs the application using AWS Lambda RIE (Runtime Interface Emulator). + - A LambdaProxy container that converts between http requests and lambda events to invoke the function. + + In this scenario, there is no agent container, but the LambdaWeblog contains the `datadog-lambda-extension` + which is the agent in the context of Lambda. + """ + + def __init__( + self, + name: str, + *, + github_workflow: str = "endtoend", + doc: str, + scenario_groups: list[ScenarioGroup] | None = None, + weblog_env: dict[str, str | None] | None = None, + weblog_volumes: dict[str, dict[str, str]] | None = None, + ): + scenario_groups = [ + all_scenario_groups.tracer_release, + ] + (scenario_groups or []) + + super().__init__(name, github_workflow=github_workflow, doc=doc, scenario_groups=scenario_groups) + + self.lambda_weblog = LambdaWeblogContainer( + host_log_folder=self.host_log_folder, + environment=weblog_env or {}, + volumes=weblog_volumes or {}, + ) + + self.lambda_proxy_container = LambdaProxyContainer( + host_log_folder=self.host_log_folder, + lambda_weblog_host=self.lambda_weblog.name, + lambda_weblog_port=str(self.lambda_weblog.container_port), + ) + + self.lambda_proxy_container.depends_on.append(self.lambda_weblog) + self.lambda_weblog.depends_on.append(self.proxy_container) + + self.proxy_container.environment.update( + { + "PROXY_TRACING_AGENT_TARGET_HOST": self.lambda_weblog.name, + "PROXY_TRACING_AGENT_TARGET_PORT": "8127", + } + ) + + self._required_containers.extend((self.lambda_weblog, self.lambda_proxy_container)) + + def configure(self, config: pytest.Config): + super().configure(config) + + interfaces.agent.configure(self.host_log_folder, replay=self.replay) + interfaces.library.configure(self.host_log_folder, replay=self.replay) + interfaces.backend.configure(self.host_log_folder, replay=self.replay) + interfaces.library_stdout.configure(self.host_log_folder, replay=self.replay) + + def _get_weblog_system_info(self): + try: + code, (stdout, stderr) = self.lambda_weblog.exec_run("uname -a", demux=True) + if code or stdout is None: + message = f"Failed to get weblog system info: [{code}] {stderr.decode()} {stdout.decode()}" + else: + message = stdout.decode() + except BaseException: + logger.exception("can't get weblog system info") + else: + logger.stdout(f"Weblog system: {message.strip()}") + + if self.lambda_weblog.environment.get("DD_TRACE_DEBUG") == "true": + logger.stdout("\t/!\\ Debug logs are activated in weblog") + + logger.stdout("") + + def _start_interfaces_watchdog(self): + return super().start_interfaces_watchdog([interfaces.library, interfaces.agent]) + + def _set_components(self): + self.components["libary"] = self.library.version + + def _wait_for_app_readiness(self): + logger.debug("Wait for app readiness") + + if not interfaces.library.ready.wait(40): + raise ValueError("Library not ready") + + logger.debug("Library ready") + + def get_warmups(self): + warmups = super().get_warmups() + + if not self.replay: + warmups.insert(1, self._start_interfaces_watchdog) + warmups.append(self._get_weblog_system_info) + warmups.append(self._wait_for_app_readiness) + warmups.append(self._set_components) + + return warmups + + def _wait_interface(self, interface: ProxyBasedInterfaceValidator, timeout: int): + logger.terminal.write_sep("-", f"Wait for {interface.name} interface ({timeout}s)") + logger.terminal.flush() + + interface.wait(timeout) + + def _wait_and_stop_containers(self, *, force_interface_timeout_to_zero: bool = False): + if self.replay: + logger.terminal.write_sep("-", "Load all data from logs") + logger.terminal.flush() + + interfaces.library.load_data_from_logs() + interfaces.library.check_deserialization_errors() + + interfaces.backend.load_data_from_logs() + else: + self._wait_interface(interfaces.library, 0 if force_interface_timeout_to_zero else 5) + self._wait_interface(interfaces.agent, 0 if force_interface_timeout_to_zero else 5) + self.lambda_weblog.stop() + interfaces.library.check_deserialization_errors() + interfaces.agent.check_deserialization_errors() + + self._wait_interface(interfaces.backend, 0) + + def post_setup(self, session: pytest.Session): + is_empty_test_run = session.config.option.skip_empty_scenario and len(session.items) == 0 + + try: + self._wait_and_stop_containers(force_interface_timeout_to_zero=is_empty_test_run) + finally: + self.close_targets() + + @property + def library(self): + return self.lambda_weblog.library diff --git a/utils/_context/containers.py b/utils/_context/containers.py index dc2adb78cca..0a4c2e52643 100644 --- a/utils/_context/containers.py +++ b/utils/_context/containers.py @@ -605,6 +605,38 @@ def __init__( ) +class LambdaProxyContainer(TestedContainer): + def __init__( + self, + *, + host_log_folder: str, + lambda_weblog_host: str, + lambda_weblog_port: str, + ) -> None: + from utils import weblog + + self.host_port = weblog.port + self.container_port = "7777" + + super().__init__( + image_name="system_tests/lambda-proxy", + name="lambda-proxy", + host_log_folder=host_log_folder, + environment={ + "RIE_HOST": lambda_weblog_host, + "RIE_PORT": lambda_weblog_port, + }, + ports={ + f"{self.host_port}/tcp": self.container_port, + }, + healthcheck={ + "test": f"curl --fail --silent --show-error --max-time 2 localhost:{self.container_port}/healthcheck", + "retries": 60, + }, + local_image_only=True, + ) + + class AgentContainer(TestedContainer): apm_receiver_port: int = 8127 dogstatsd_port: int = 8125 @@ -1015,6 +1047,58 @@ def telemetry_heartbeat_interval(self): return 2 +class LambdaWeblogContainer(WeblogContainer): + def __init__( + self, + host_log_folder: str, + *, + environment: dict[str, str | None] | None = None, + volumes: dict | None = None, + ): + environment = (environment or {}) | { + "DD_HOSTNAME": "test", + "DD_SITE": os.environ.get("DD_SITE", "datad0g.com"), + "DD_API_KEY": os.environ.get("DD_API_KEY", _FAKE_DD_API_KEY), + "DD_SERVERLESS_FLUSH_STRATEGY": "periodically,100", + "DD_TRACE_MANAGED_SERVICES": "false", + } + + volumes = volumes or {} + + environment["DD_PROXY_HTTPS"] = f"http://proxy:{ProxyPorts.agent}" + environment["DD_LOG_LEVEL"] = "debug" + volumes.update( + { + "./utils/build/docker/agent/ca-certificates.crt": { + "bind": "/etc/ssl/certs/ca-certificates.crt", + "mode": "ro", + }, + "./utils/build/docker/agent/datadog.yaml": { + "bind": "/var/task/datadog.yaml", + "mode": "ro", + }, + } + ) + + super().__init__( + host_log_folder, + environment=environment, + volumes=volumes, + ) + + # Set the container port to the one used by the one of the Lambda RIE + self.container_port = 8080 + + # Replace healthcheck with a custom one for Lambda + healthcheck_event = json.dumps({"healthcheck": True}) + self.healthcheck = { + "test": f"curl --fail --silent --show-error --max-time 2 -XPOST -d '{healthcheck_event}' http://localhost:{self.container_port}/2015-03-31/functions/function/invocations", + "retries": 60, + } + # Remove port bindings, as only the LambdaProxyContainer needs to expose a server + self.ports = {} + + class PostgresContainer(SqlDbTestedContainer): def __init__(self, host_log_folder: str) -> None: super().__init__( diff --git a/utils/_decorators.py b/utils/_decorators.py index 029dc49caba..d0fd0c1c156 100644 --- a/utils/_decorators.py +++ b/utils/_decorators.py @@ -101,6 +101,7 @@ def _expected_to_fail(condition: bool | None = None, library: str | None = None, "java_otel", "python_otel", "nodejs_otel", + "python_lambda", ): raise ValueError(f"Unknown library: {library}") @@ -242,6 +243,7 @@ def released( agent: str | None = None, dd_apm_inject: str | None = None, k8s_cluster_agent: str | None = None, + python_lambda: str | None = None, ): """Class decorator, allow to mark a test class with a version number of a component""" @@ -296,6 +298,7 @@ def compute_declaration( compute_declaration("php", "php", php, context.library.version), compute_declaration("python", "python", python, context.library.version), compute_declaration("python_otel", "python_otel", python_otel, context.library.version), + compute_declaration("python_lambda", "python_lambda", python_lambda, context.library.version), compute_declaration("ruby", "ruby", ruby, context.library.version), compute_declaration("*", "agent", agent, context.agent_version), compute_declaration("*", "dd_apm_inject", dd_apm_inject, context.dd_apm_inject_version), diff --git a/utils/build/build.sh b/utils/build/build.sh index 4b43099bb2d..6c5480de95e 100755 --- a/utils/build/build.sh +++ b/utils/build/build.sh @@ -37,6 +37,7 @@ readonly DEFAULT_dotnet=poc readonly DEFAULT_cpp=nginx readonly DEFAULT_cpp_httpd=httpd readonly DEFAULT_cpp_nginx=nginx +readonly DEFAULT_python_lambda=apigw-rest readonly SCRIPT_NAME="${0}" readonly SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" @@ -262,6 +263,15 @@ build() { docker save system_tests/weblog | gzip > $BINARIES_FILENAME fi fi + elif [[ $IMAGE_NAME == lambda-proxy ]]; then + docker buildx build \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --load \ + --progress=plain \ + -f utils/build/docker/lambda-proxy.Dockerfile \ + -t system_tests/lambda-proxy \ + $EXTRA_DOCKER_ARGS \ + . else echo "Don't know how to build $IMAGE_NAME" exit 1 @@ -274,7 +284,7 @@ COMMAND=build while [[ "$#" -gt 0 ]]; do case $1 in - cpp_nginx|cpp_httpd|dotnet|golang|java|java_otel|nodejs|nodejs_otel|php|python|python_otel|ruby) TEST_LIBRARY="$1";; + cpp_nginx|cpp_httpd|dotnet|golang|java|java_otel|nodejs|nodejs_otel|php|python|python_lambda|python_otel|ruby) TEST_LIBRARY="$1";; -l|--library) TEST_LIBRARY="$2"; shift ;; -i|--images) BUILD_IMAGES="$2"; shift ;; -d|--docker) DOCKER_MODE=1;; diff --git a/utils/build/docker/lambda-proxy.Dockerfile b/utils/build/docker/lambda-proxy.Dockerfile new file mode 100644 index 00000000000..c29204c078f --- /dev/null +++ b/utils/build/docker/lambda-proxy.Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.13-alpine + +WORKDIR /app + +RUN apk add --no-cache curl + +COPY ./utils/build/docker/lambda_proxy/pyproject.toml ./ +RUN pip install --no-cache-dir . + +COPY utils/build/docker/lambda_proxy/main.py ./ + +ENTRYPOINT ["gunicorn", "--bind=0.0.0.0:7777", "--workers=1", "main:app"] diff --git a/utils/build/docker/lambda_proxy/main.py b/utils/build/docker/lambda_proxy/main.py new file mode 100644 index 00000000000..592bf04e413 --- /dev/null +++ b/utils/build/docker/lambda_proxy/main.py @@ -0,0 +1,69 @@ +import os + +from flask import Flask, request +from requests import post + +from samcli.local.apigw.event_constructor import construct_v1_event +from samcli.local.apigw.local_apigw_service import LocalApigwService + +PORT = 7777 + +RIE_HOST = os.environ.get("RIE_HOST", "lambda-weblog") +RIE_PORT = os.environ.get("RIE_PORT", "8080") +FUNCTION_NAME = os.environ.get("FUNCTION_NAME", "function") +RIE_URL = f"http://{RIE_HOST}:{RIE_PORT}/2015-03-31/functions/{FUNCTION_NAME}/invocations" + +app = Flask(__name__) + +app.config["PROVIDE_AUTOMATIC_OPTIONS"] = False + + +def invoke_lambda_function(): + """ + This function is used to invoke the Lambda function with the provided event. + It constructs a v1 event from the Flask request and sends it to the RIE URL. + """ + converted_event = construct_v1_event( + request, + PORT, + binary_types=["application/octet-stream"], + stage_name="Prod", + ) + + response = post( + RIE_URL, + json=converted_event, + headers={"Content-Type": "application/json"}, + ) + + (status_code, headers, body) = LocalApigwService._parse_v1_payload_format_lambda_output( + response.content.decode("utf-8"), + binary_types=[], + flask_request=request, + event_type="Api", + ) + + return app.response_class(response=body, status=status_code, headers=headers) + + +ROUTES = [ + ("/", ["GET", "POST", "OPTIONS"]), + ("/finger_print", ["GET"]), + ("/headers", ["GET"]), + ("/healthcheck", ["GET"]), + ("/params//", ["GET", "POST", "OPTIONS"]), + ("/tag_value//", ["GET", "POST", "OPTIONS"]), + ("/users", ["GET"]), + ("/waf", ["GET", "POST", "OPTIONS"]), + ("/waf/", ["GET", "POST", "OPTIONS"]), + ("/waf/", ["GET", "POST", "OPTIONS"]), + ("/.git", ["GET"]), +] + +for endpoint, methods in ROUTES: + app.add_url_rule( + endpoint, + endpoint, + lambda **kwargs: invoke_lambda_function(), + methods=methods, + ) diff --git a/utils/build/docker/lambda_proxy/pyproject.toml b/utils/build/docker/lambda_proxy/pyproject.toml new file mode 100644 index 00000000000..a709a26f7ea --- /dev/null +++ b/utils/build/docker/lambda_proxy/pyproject.toml @@ -0,0 +1,6 @@ +[project] +name = "lambda-proxy" +version = "0.1.0" +description = "Minimal Flask app to proxy requests to an AWS RIE endpoint" +requires-python = ">=3.13" +dependencies = ["aws-sam-cli<=1.141.0", "flask>=3.1.1", "gunicorn>=23.0.0"] diff --git a/utils/build/docker/python_lambda/apigw-rest.Dockerfile b/utils/build/docker/python_lambda/apigw-rest.Dockerfile new file mode 100644 index 00000000000..a06e37ac51e --- /dev/null +++ b/utils/build/docker/python_lambda/apigw-rest.Dockerfile @@ -0,0 +1,18 @@ +FROM public.ecr.aws/lambda/python:3.13 + +RUN dnf install -y unzip findutils socat + +# Add the Datadog Extension +RUN mkdir -p /opt/extensions +COPY --from=public.ecr.aws/datadog/lambda-extension:latest /opt/. /opt/ + +COPY utils/build/docker/python_lambda/install_datadog_lambda.sh binaries* /binaries/ +RUN /binaries/install_datadog_lambda.sh + +# Setup the aws_lambda handler +COPY utils/build/docker/python_lambda/function/. ${LAMBDA_TASK_ROOT} +RUN pip install -r ${LAMBDA_TASK_ROOT}/requirements.txt + +ENV DD_LAMBDA_HANDLER=handler.lambda_handler + +ENTRYPOINT ["/bin/sh"] diff --git a/utils/build/docker/python_lambda/function/app.sh b/utils/build/docker/python_lambda/function/app.sh new file mode 100644 index 00000000000..eb958bd98f3 --- /dev/null +++ b/utils/build/docker/python_lambda/function/app.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +set -eu + +export DD_LAMBDA_HANDLER=handler.lambda_handler + +socat TCP-LISTEN:8127,reuseaddr,fork,bind=0.0.0.0 TCP:127.0.0.1:8126 & + +exec /lambda-entrypoint.sh datadog_lambda.handler.handler diff --git a/utils/build/docker/python_lambda/function/handler.py b/utils/build/docker/python_lambda/function/handler.py new file mode 100644 index 00000000000..5a07baa1296 --- /dev/null +++ b/utils/build/docker/python_lambda/function/handler.py @@ -0,0 +1,147 @@ +import logging +import urllib +import urllib.parse + +from typing import Any + +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.utilities.typing.lambda_context import LambdaContext +from aws_lambda_powertools.event_handler import Response + +import datadog_lambda +from ddtrace.appsec import trace_utils as appsec_trace_utils +from ddtrace.contrib.trace_utils import set_user +from ddtrace.trace import tracer + + +logger = logging.getLogger(__name__) + + +app = APIGatewayRestResolver() + +_TRACK_CUSTOM_APPSEC_EVENT_NAME = "system_tests_appsec_event" + + +def version_info(): + return { + "status": "ok", + "library": { + "name": "python_lambda", + "version": datadog_lambda.__version__, + }, + } + + +@app.get("/") +@app.post("/") +@app.route("/", method="OPTIONS") +def root(): + return Response(status_code=200, content_type="text/plain", body="Hello, World!\n") + + +@app.get("/headers") +def headers(): + return Response(status_code=200, body="OK", headers={"Content-Language": "en-US", "Content-Type": "text/plain"}) + + +@app.get("/healthcheck") +def healthcheck_route(): + return Response( + status_code=200, + content_type="application/json", + body=version_info(), + ) + + +@app.get("/params/") +@app.post("/params/") +@app.route("/params/", method="OPTIONS") +@app.get("/waf/") +@app.post("/waf/") +@app.get("/waf/") +@app.post("/waf/") +@app.route("/waf/", method="OPTIONS") +def waf_params(path: str = ""): + return Response( + status_code=200, + content_type="text/plain", + body="Hello, World!\n", + ) + + +@app.get("/tag_value//") +@app.route("/tag_value//", method="OPTIONS") +def tag_value(tag_value: str, status_code: int): + appsec_trace_utils.track_custom_event( + tracer, event_name=_TRACK_CUSTOM_APPSEC_EVENT_NAME, metadata={"value": tag_value} + ) + return Response( + status_code=status_code, + content_type="text/plain", + body="Value tagged", + headers=app.current_event.query_string_parameters, + ) + + +@app.post("/tag_value//") +def tag_value_post(tag_value: str, status_code: int): + appsec_trace_utils.track_custom_event( + tracer, event_name=_TRACK_CUSTOM_APPSEC_EVENT_NAME, metadata={"value": tag_value} + ) + if tag_value.startswith("payload_in_response_body"): + # Get form data from the current event + body = app.current_event.body or "" + if app.current_event.is_base64_encoded: + import base64 + + body = base64.b64decode(body).decode("utf-8") + + form_data = urllib.parse.parse_qs(body) + + return Response( + status_code=status_code, + content_type="application/json", + body={"payload": form_data}, + headers=app.current_event.query_string_parameters or {}, + ) + return Response( + status_code=status_code, + content_type="text/plain", + body="Value tagged", + headers=app.current_event.query_string_parameters or {}, + ) + + +@app.get("/users") +def users(): + user = app.current_event.query_string_parameters.get("user") + set_user( + tracer, + user_id=user, + email="usr.email", + name="usr.name", + session_id="usr.session_id", + role="usr.role", + scope="usr.scope", + ) + return Response( + status_code=200, + content_type="text/plain", + body="Ok", + ) + + +def lambda_handler(event: dict[str, Any], context: LambdaContext): + """ + Lambda function handler for AWS Lambda Powertools API Gateway integration. + + Args: + event (dict): The event data passed to the Lambda function. + context (LambdaContext): The context object provided by AWS Lambda. + + Returns: + dict: The response from the API Gateway resolver. + """ + if event.get("healthcheck"): + return version_info() + return app.resolve(event, context) diff --git a/utils/build/docker/python_lambda/function/requirements.txt b/utils/build/docker/python_lambda/function/requirements.txt new file mode 100644 index 00000000000..be2c9d6827c --- /dev/null +++ b/utils/build/docker/python_lambda/function/requirements.txt @@ -0,0 +1 @@ +aws-lambda-powertools==3.17.0 diff --git a/utils/build/docker/python_lambda/install_datadog_lambda.sh b/utils/build/docker/python_lambda/install_datadog_lambda.sh new file mode 100755 index 00000000000..dc3a765ef0e --- /dev/null +++ b/utils/build/docker/python_lambda/install_datadog_lambda.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +set -eu + +cd /binaries + +if [ "$(find . -maxdepth 1 -name "*.zip" | wc -l)" = "1" ]; then + path=$(readlink -f "$(find . -maxdepth 1 -name "*.zip")") + echo "Install datadog_lambda from ${path}" + unzip "${path}" -d /opt +else + echo "Fetching from latest GitHub release" + curl -fsSLO https://github.com/DataDog/datadog-lambda-python/releases/latest/download/datadog_lambda_py-amd64-3.13.zip + unzip -o datadog_lambda_py-amd64-3.13.zip -d /opt + + if [ ! -f datadog_lambda_py-amd64-3.13.zip ]; then + echo "Failed to download datadog_lambda_py-amd64-3.13.zip" + exit 1 + fi +fi diff --git a/utils/proxy/core.py b/utils/proxy/core.py index af230b87b45..b20333eea0e 100644 --- a/utils/proxy/core.py +++ b/utils/proxy/core.py @@ -48,6 +48,9 @@ def __init__(self) -> None: self.rc_api_enabled = os.environ.get("SYSTEM_TESTS_RC_API_ENABLED") == "True" self.span_meta_structs_disabled = os.environ.get("SYSTEM_TESTS_AGENT_SPAN_META_STRUCTS_DISABLED") == "True" + self.tracing_agent_target_host = os.environ.get("PROXY_TRACING_AGENT_TARGET_HOST", "agent") + self.tracing_agent_target_port = int(os.environ.get("PROXY_TRACING_AGENT_TARGET_PORT", "8127")) + span_events = os.environ.get("SYSTEM_TESTS_AGENT_SPAN_EVENTS") self.span_events = span_events != "False" @@ -122,7 +125,10 @@ def request(self, flow: HTTPFlow): ProxyPorts.golang_buddy, ProxyPorts.weblog, ): - flow.request.host, flow.request.port = "agent", 8127 + flow.request.host, flow.request.port = ( + self.tracing_agent_target_host, + self.tracing_agent_target_port, + ) flow.request.scheme = "http" logger.info(f" => reverse proxy to {flow.request.pretty_url}") diff --git a/utils/scripts/ci_orchestrators/workflow_data.py b/utils/scripts/ci_orchestrators/workflow_data.py index f1465ef8f79..fb5be614ba8 100644 --- a/utils/scripts/ci_orchestrators/workflow_data.py +++ b/utils/scripts/ci_orchestrators/workflow_data.py @@ -421,6 +421,12 @@ def _filter_scenarios(scenarios: list[str], library: str, weblog: str, ci_enviro def _is_supported(library: str, weblog: str, scenario: str, _ci_environment: str) -> bool: # this function will remove some couple scenarios/weblog that are not supported + # Only Allow Lambda scenarios for the lambda libraries + is_lambda_library = library in ("python_lambda",) + is_lambda_scenario = scenario in ("APPSEC_LAMBDA_DEFAULT",) + if is_lambda_library != is_lambda_scenario: + return False + # open-telemetry-automatic if scenario == "OTEL_INTEGRATIONS": possible_values: tuple = ( diff --git a/utils/scripts/compute-workflow-parameters.py b/utils/scripts/compute-workflow-parameters.py index eaa7f1b02ef..677757dbe28 100644 --- a/utils/scripts/compute-workflow-parameters.py +++ b/utils/scripts/compute-workflow-parameters.py @@ -87,7 +87,7 @@ def __init__( "job_matrix": list(range(1, parametric_job_count + 1)), "enable": len(scenario_map["parametric"]) > 0 and "otel" not in library - and library not in ("cpp_nginx", "cpp_httpd"), + and library not in ("cpp_nginx", "cpp_httpd", "python_lambda"), } self.data["externalprocessing"] = {"scenarios": scenario_map.get("externalprocessing", [])} @@ -237,6 +237,7 @@ def _get_workflow_map( "java_otel", "nodejs_otel", "python_otel", + "python_lambda", ], ) diff --git a/utils/scripts/compute_impacted_scenario.py b/utils/scripts/compute_impacted_scenario.py index b4d1a5ece26..f1b4dcac931 100644 --- a/utils/scripts/compute_impacted_scenario.py +++ b/utils/scripts/compute_impacted_scenario.py @@ -153,6 +153,7 @@ def main() -> None: r"manifests/.*": None, # already handled by the manifest comparison r"repository\.datadog\.yml": None, r"utils/_context/_scenarios/appsec_low_waf_timeout\.py": scenarios.appsec_low_waf_timeout, + r"utils/_context/_scenarios/aws_lambda\.py": scenarios.appsec_lambda_default, r"utils/_context/_scenarios/auto_injection\.py": scenario_groups.onboarding, r"utils/_context/_scenarios/default\.py": scenarios.default, r"utils/_context/_scenarios/integrations\.py": scenario_groups.integrations, @@ -164,6 +165,7 @@ def main() -> None: r"utils/build/docker/java_otel/.*": scenario_groups.open_telemetry, r"utils/build/docker/nodejs_otel/.*": scenario_groups.open_telemetry, r"utils/build/docker/python_otel/.*": scenario_groups.open_telemetry, + r"utils/build/docker/python_lambda/.*": scenarios.appsec_lambda_default, r"utils/build/docker/\w+/parametric/.*": scenarios.parametric, r"utils/build/docker/.*": [ scenario_groups.end_to_end, diff --git a/utils/scripts/get-image-list.py b/utils/scripts/get-image-list.py index 6b8a9f89b84..ead33f700d2 100644 --- a/utils/scripts/get-image-list.py +++ b/utils/scripts/get-image-list.py @@ -54,6 +54,7 @@ def main(scenarios: list[str], library: str, weblog: str) -> None: "java_otel", "python_otel", "nodejs_otel", + "python_lambda", "", ], ) diff --git a/utils/scripts/load-binary.sh b/utils/scripts/load-binary.sh index ffd9b361d19..41efdc3e2e3 100755 --- a/utils/scripts/load-binary.sh +++ b/utils/scripts/load-binary.sh @@ -10,17 +10,18 @@ # # Binaries sources: # -# * Agent: Docker hub datadog/agent-dev:master-py3 -# * cpp_httpd: Github action artifact -# * Golang: github.com/DataDog/dd-trace-go/v2@main -# * .NET: ghcr.io/datadog/dd-trace-dotnet -# * Java: S3 -# * PHP: ghcr.io/datadog/dd-trace-php -# * Node.js: Direct from github source -# * C++: Direct from github source -# * Python: Clone locally the githu repo -# * Ruby: Direct from github source -# * WAF: Direct from github source, but not working, as this repo is now private +# * Agent: Docker hub datadog/agent-dev:master-py3 +# * cpp_httpd: Github action artifact +# * Golang: github.com/DataDog/dd-trace-go/v2@main +# * .NET: ghcr.io/datadog/dd-trace-dotnet +# * Java: S3 +# * PHP: ghcr.io/datadog/dd-trace-php +# * Node.js: Direct from github source +# * C++: Direct from github source +# * Python: Clone locally the github repo +# * Ruby: Direct from github source +# * WAF: Direct from github source, but not working, as this repo is now private +# * Python Lambda: Fetch from GitHub Actions artifact ########################################################################################## set -eu @@ -123,7 +124,8 @@ get_github_action_artifact() { SLUG=$1 WORKFLOW=$2 BRANCH=$3 - PATTERN=$4 + ARTIFACT_NAME=$4 + PATTERN=$5 # query filter seems not to be working ?? WORKFLOWS=$(curl --silent --fail --show-error -H "Authorization: token $GH_TOKEN" "https://api.github.com/repos/$SLUG/actions/workflows/$WORKFLOW/runs?per_page=100") @@ -133,8 +135,7 @@ get_github_action_artifact() { HTML_URL=$(echo $WORKFLOWS | jq -r "$QUERY | .html_url") echo "Load artifact $HTML_URL" ARTIFACTS=$(curl --silent -H "Authorization: token $GH_TOKEN" $ARTIFACT_URL) - - ARCHIVE_URL=$(echo $ARTIFACTS | jq -r '.artifacts[0].archive_download_url') + ARCHIVE_URL=$(echo $ARTIFACTS | jq -r --arg ARTIFACT_NAME "$ARTIFACT_NAME" '.artifacts | map(select(.name | contains($ARTIFACT_NAME))).[0].archive_download_url') echo "Load archive $ARCHIVE_URL" curl -H "Authorization: token $GH_TOKEN" --output artifacts.zip -L $ARCHIVE_URL @@ -282,7 +283,7 @@ elif [ "$TARGET" = "cpp" ]; then elif [ "$TARGET" = "cpp_httpd" ]; then assert_version_is_dev - get_github_action_artifact "DataDog/httpd-datadog" "dev.yml" "main" "mod_datadog.so" + get_github_action_artifact "DataDog/httpd-datadog" "dev.yml" "main" "mod_datadog_artifact" "mod_datadog.so" elif [ "$TARGET" = "cpp_nginx" ]; then assert_version_is_dev @@ -322,7 +323,11 @@ elif [ "$TARGET" = "waf_rule_set" ]; then -H "Authorization: token $GH_TOKEN" \ -H "Accept: application/vnd.github.v3.raw" \ https://api.github.com/repos/DataDog/appsec-event-rules/contents/build/recommended.json +elif [ "$TARGET" = "python_lambda" ]; then + assert_version_is_dev + assert_target_branch_is_not_set + get_github_action_artifact "DataDog/datadog-lambda-python" "build_layer.yml" "main" "datadog-lambda-python-3.13-amd64" "datadog_lambda_py-amd64-3.13.zip" else echo "Unknown target: $1" exit 1