diff --git a/.gitlab/config.yaml b/.gitlab/config.yaml index 08b7531b0..398030f65 100644 --- a/.gitlab/config.yaml +++ b/.gitlab/config.yaml @@ -9,7 +9,7 @@ outputFiles: datasources: flavors: url: .gitlab/datasources/flavors.yaml - + environments: url: .gitlab/datasources/environments.yaml diff --git a/.gitlab/templates/pipeline.yaml.tpl b/.gitlab/templates/pipeline.yaml.tpl index 1b11dc355..bb21a4c0f 100644 --- a/.gitlab/templates/pipeline.yaml.tpl +++ b/.gitlab/templates/pipeline.yaml.tpl @@ -324,48 +324,83 @@ signed layer bundle: - mkdir -p datadog_extension-signed-bundle-${CI_JOB_ID} - cp .layers/datadog_extension-*.zip datadog_extension-signed-bundle-${CI_JOB_ID} -# Integration Tests - Build Java Lambda function -build java lambda: +# Integration Tests - Build Lambda functions in parallel by runtime + +build java lambdas: + stage: integration-tests + image: registry.ddbuild.io/images/docker:27.3.1 + tags: ["docker-in-docker:arm64"] + rules: + - when: on_success + needs: [] + cache: + key: maven-cache-${CI_COMMIT_REF_SLUG} + paths: + - integration-tests/.cache/maven/ + artifacts: + expire_in: 1 hour + paths: + - integration-tests/lambda/*/target/ + script: + - cd integration-tests + - ./scripts/build-java.sh + +build dotnet lambdas: + stage: integration-tests + image: registry.ddbuild.io/images/docker:27.3.1 + tags: ["docker-in-docker:arm64"] + rules: + - when: on_success + needs: [] + cache: + key: nuget-cache-${CI_COMMIT_REF_SLUG} + paths: + - integration-tests/.cache/nuget/ + artifacts: + expire_in: 1 hour + paths: + - integration-tests/lambda/*/bin/ + script: + - cd integration-tests + - ./scripts/build-dotnet.sh + +build python lambdas: stage: integration-tests image: registry.ddbuild.io/images/docker:27.3.1 tags: ["docker-in-docker:arm64"] rules: - when: on_success needs: [] + cache: + key: pip-cache-${CI_COMMIT_REF_SLUG} + paths: + - integration-tests/.cache/pip/ artifacts: expire_in: 1 hour paths: - - integration-tests/lambda/base-java/target/ + - integration-tests/lambda/*/package/ script: - - cd integration-tests/lambda/base-java - - docker run --rm --platform linux/arm64 - -v "$(pwd)":/workspace - -w /workspace - maven:3.9-eclipse-temurin-21-alpine - mvn clean package - -# Integration Tests - Build .NET Lambda function -build dotnet lambda: + - cd integration-tests + - ./scripts/build-python.sh + +build node lambdas: stage: integration-tests image: registry.ddbuild.io/images/docker:27.3.1 tags: ["docker-in-docker:arm64"] rules: - when: on_success needs: [] + cache: + key: npm-cache-${CI_COMMIT_REF_SLUG} + paths: + - integration-tests/.cache/npm/ artifacts: expire_in: 1 hour paths: - - integration-tests/lambda/base-dotnet/bin/ + - integration-tests/lambda/*/node_modules/ script: - - cd integration-tests/lambda/base-dotnet - - docker run --rm --platform linux/arm64 - -v "$(pwd)":/workspace - -w /workspace - mcr.microsoft.com/dotnet/sdk:8.0-alpine - sh -c "apk add --no-cache zip && - dotnet tool install -g Amazon.Lambda.Tools || true && - export PATH=\"\$PATH:/root/.dotnet/tools\" && - dotnet lambda package -o bin/function.zip --function-architecture arm64" + - cd integration-tests + - ./scripts/build-node.sh # Integration Tests - Publish arm64 layer with integration test prefix publish integration layer (arm64): @@ -396,20 +431,27 @@ publish integration layer (arm64): - echo "Published layer ARN - ${LAYER_ARN}" {{ end }} -# Integration Tests - Deploy CDK stacks with commit hash prefix -integration-deploy: +# Integration Tests - Deploy CDK stacks with commit hash prefix (parallel by test suite) +integration-deploy-suite: stage: integration-tests tags: ["arch:amd64"] image: ${CI_DOCKER_TARGET_IMAGE}:${CI_DOCKER_TARGET_VERSION} + parallel: + matrix: + - TEST_SUITE: [base, otlp] rules: - when: on_success needs: - publish integration layer (arm64) - - build java lambda - - build dotnet lambda + - build java lambdas + - build dotnet lambdas + - build python lambdas + - build node lambdas dependencies: - - build java lambda - - build dotnet lambda + - build java lambdas + - build dotnet lambdas + - build python lambdas + - build node lambdas variables: IDENTIFIER: ${CI_COMMIT_SHORT_SHA} AWS_DEFAULT_REGION: us-east-1 @@ -422,23 +464,26 @@ integration-deploy: - npm ci {{ end }} script: - - echo "Deploying CDK stacks with identifier ${IDENTIFIER}..." + - echo "Deploying ${TEST_SUITE} CDK stack with identifier ${IDENTIFIER}..." - export EXTENSION_LAYER_ARN=$(aws lambda list-layer-versions --layer-name "Datadog-Extension-ARM-${CI_COMMIT_SHORT_SHA}" --query 'LayerVersions[0].LayerVersionArn' --output text --region us-east-1) - echo "Using integration test layer - ${EXTENSION_LAYER_ARN}" - export CDK_DEFAULT_ACCOUNT=$(aws sts get-caller-identity --query Account --output text) - export CDK_DEFAULT_REGION=us-east-1 - npm run build - - npx cdk deploy "integ-$IDENTIFIER-*" --require-approval never + - npx cdk deploy "integ-${IDENTIFIER}-${TEST_SUITE}" --require-approval never -# Integration Tests - Run Jest test suite -integration-test: +# Integration Tests - Run Jest test suite (parallel by test suite) +integration-test-suite: stage: integration-tests tags: ["arch:amd64"] image: ${CI_DOCKER_TARGET_IMAGE}:${CI_DOCKER_TARGET_VERSION} + parallel: + matrix: + - TEST_SUITE: [base, otlp] rules: - when: on_success needs: - - integration-deploy + - integration-deploy-suite variables: IDENTIFIER: ${CI_COMMIT_SHORT_SHA} DD_SITE: datadoghq.com @@ -450,27 +495,30 @@ integration-test: - cd integration-tests - npm ci script: - - echo "Running integration tests with identifier ${IDENTIFIER}..." - - npm run test:ci + - echo "Running ${TEST_SUITE} integration tests with identifier ${IDENTIFIER}..." + - export TEST_SUITE=${TEST_SUITE} + - npx jest tests/${TEST_SUITE}.test.ts {{ end }} artifacts: when: always paths: - integration-tests/test-results/ reports: - junit: integration-tests/test-results/junit.xml + junit: integration-tests/test-results/junit-*.xml expire_in: 30 days -# Integration Tests - Cleanup stacks -integration-cleanup-stacks: +# Integration Tests - Cleanup stacks (parallel by test suite) +integration-cleanup-suite: stage: integration-tests tags: ["arch:amd64"] image: ${CI_DOCKER_TARGET_IMAGE}:${CI_DOCKER_TARGET_VERSION} + parallel: + matrix: + - TEST_SUITE: [base, otlp] rules: - when: always needs: - - job: integration-test - optional: false + - integration-test-suite variables: IDENTIFIER: ${CI_COMMIT_SHORT_SHA} {{ with $environment := (ds "environments").environments.sandbox }} @@ -478,31 +526,27 @@ integration-cleanup-stacks: - EXTERNAL_ID_NAME={{ $environment.external_id }} ROLE_TO_ASSUME={{ $environment.role_to_assume }} AWS_ACCOUNT={{ $environment.account }} source .gitlab/scripts/get_secrets.sh {{ end }} script: - - echo "Destroying CDK stacks with identifier ${IDENTIFIER}..." + - echo "Destroying ${TEST_SUITE} CDK stack with identifier ${IDENTIFIER}..." - | - # Find all stacks matching the pattern using CloudFormation API - STACKS=$(aws cloudformation list-stacks \ - --stack-status-filter CREATE_COMPLETE UPDATE_COMPLETE UPDATE_ROLLBACK_COMPLETE \ - --query "StackSummaries[?starts_with(StackName, 'integ-${IDENTIFIER}-')].StackName" \ - --output text --region us-east-1) - - if [ -z "$STACKS" ]; then - echo "No stacks found matching pattern integ-${IDENTIFIER}-*" + STACK_NAME="integ-${IDENTIFIER}-${TEST_SUITE}" + + # Check if stack exists + STACK_STATUS=$(aws cloudformation describe-stacks \ + --stack-name "${STACK_NAME}" \ + --query 'Stacks[0].StackStatus' \ + --output text --region us-east-1 2>/dev/null || echo "DOES_NOT_EXIST") + + if [ "$STACK_STATUS" = "DOES_NOT_EXIST" ]; then + echo "Stack ${STACK_NAME} does not exist, nothing to clean up" else - echo "Found stacks to delete: ${STACKS}" - for STACK in $STACKS; do - echo "Deleting stack ${STACK}..." - aws cloudformation delete-stack --stack-name "${STACK}" --region us-east-1 || echo "Failed to delete ${STACK}, continuing..." - done + echo "Found stack ${STACK_NAME} with status ${STACK_STATUS}" + echo "Deleting stack ${STACK_NAME}..." + aws cloudformation delete-stack --stack-name "${STACK_NAME}" --region us-east-1 || echo "Failed to delete ${STACK_NAME}, continuing..." - # Wait for all deletions to complete - echo "Waiting for stack deletions to complete..." - for STACK in $STACKS; do - echo "Waiting for ${STACK}..." - aws cloudformation wait stack-delete-complete --stack-name "${STACK}" --region us-east-1 || echo "Stack ${STACK} deletion did not complete cleanly, continuing..." - done + echo "Waiting for stack deletion to complete..." + aws cloudformation wait stack-delete-complete --stack-name "${STACK_NAME}" --region us-east-1 || echo "Stack ${STACK_NAME} deletion did not complete cleanly, continuing..." - echo "All stacks deleted successfully" + echo "${TEST_SUITE} stack deleted successfully" fi # Integration Tests - Cleanup layer @@ -513,8 +557,7 @@ integration-cleanup-layer: rules: - when: always needs: - - job: integration-cleanup-stacks - optional: false + - job: integration-cleanup-suite variables: IDENTIFIER: ${CI_COMMIT_SHORT_SHA} {{ with $environment := (ds "environments").environments.sandbox }} diff --git a/integration-tests/.gitignore b/integration-tests/.gitignore index afd3f39af..7e7648668 100644 --- a/integration-tests/.gitignore +++ b/integration-tests/.gitignore @@ -38,3 +38,13 @@ Thumbs.db # Lambda artifacts response.json lambda-bundle.zip + +# Lambda build outputs +lambda/*/target/ +lambda/*/bin/ +lambda/*/obj/ +lambda/*/package/ +lambda/*/node_modules/ + +# Build caches +.cache/ diff --git a/integration-tests/README.md b/integration-tests/README.md index eea6e75b3..319088937 100644 --- a/integration-tests/README.md +++ b/integration-tests/README.md @@ -10,16 +10,26 @@ The general flow is: For simplicity, integration tests are setup to only test against ARM runtimes. +## Supported Runtimes + +All test suites cover the following Lambda runtimes: +- **Java 21** - Eclipse Temurin +- **.NET 8** - Alpine +- **Python 3.12/3.13** - Standard Python runtime +- **Node.js 20** - Alpine + ## Test Suites +Integration tests are organized into independent test suites that run in parallel in the GitLab CI/CD pipeline. Each suite has its own lifecycle (deploy → test → cleanup) and can be executed independently. + ### Base Tests -The base test suite provides basic functionality tests across all supported Lambda runtimes. Also serves as an example for other tests. +The base test suite provides basic functionality tests across all supported Lambda runtimes. These tests verify core extension functionality without additional instrumentation. -The base tests verify the extension can: -- Collect and forward logs to Datadog -- Generate and send traces with proper span structure -- Detect cold starts +**What it tests:** +- Extension can collect and forward logs to Datadog +- Extension generates and sends traces with proper span structure +- Extension detects cold starts correctly **Test Coverage:** - Lambda invocation succeeds (200 status code) @@ -27,21 +37,118 @@ The base tests verify the extension can: - One trace is sent to Datadog - `aws.lambda` span exists with correct properties including `cold_start: 'true'` - `aws.lambda.cold_start` span is created -- `aws.lambda.load` spand is created for python and node. +- `aws.lambda.load` span is created for Python and Node + +### OTLP Tests + +The OTLP test suite verifies OpenTelemetry Protocol (OTLP) integration with the Datadog Lambda Extension. These tests use Lambda functions instrumented with OpenTelemetry SDKs to ensure telemetry data flows correctly through the extension to Datadog. + +**What it tests:** +- Lambda functions instrumented with OpenTelemetry SDKs can invoke successfully +- Traces are properly sent to Datadog via OTLP +- Spans contain correct structure and attributes + +**Test Coverage:** +- Lambda invocation succeeds (200 status code) +- At least one trace is sent to Datadog +- Trace contains valid spans with proper structure + +## CI/CD Pipeline Structure + +The GitLab CI/CD pipeline runs test suites independently in parallel, providing: +- **Isolation**: One test suite failure doesn't block others +- **Speed**: Test suites run simultaneously +- **Efficiency**: Can retry individual test suites without redeploying all stacks + +### Pipeline Flow + +``` + ┌→ deploy-base → test-base → cleanup-base ┐ +publish layer → build lambdas ─────┤ ├→ cleanup-layer + └→ deploy-otlp → test-otlp → cleanup-otlp ┘ +``` + +### Test Suite Lifecycle + +Each test suite (base, otlp) follows this lifecycle: + +1. **Deploy**: Deploys only the stacks for that suite + - Pattern: `cdk deploy "integ-${IDENTIFIER}-${TEST_SUITE}-*"` + - Example: `integ-abc123-base-*` deploys all base test stacks + +2. **Test**: Runs only the tests for that suite + - Command: `jest tests/${TEST_SUITE}.test.ts` + - Example: `jest tests/base.test.ts` + +3. **Cleanup**: Removes only the stacks for that suite + - Runs with `when: always` to ensure cleanup on failure + - Pattern: Deletes all stacks matching `integ-${IDENTIFIER}-${TEST_SUITE}-*` + +### Adding a New Test Suite + +To add a new test suite to the parallel execution: + +1. **Create test file**: `tests/.test.ts` +2. **Create CDK stacks**: `lib/stacks/--stack.ts` +3. **Register stacks**: Add to `bin/app.ts` +4. **Update pipeline**: Add suite name to `.gitlab/templates/pipeline.yaml.tpl`: + ```yaml + parallel: + matrix: + - TEST_SUITE: [base, otlp, ] + ``` + +### Running Test Suites Locally + +Test individual suites using CDK patterns: + +```bash +# Deploy specific test suite +export IDENTIFIER="john" +npx cdk deploy "integ-${IDENTIFIER}-base-*" --require-approval never + +# Run specific test suite +jest tests/base.test.ts + +# Cleanup specific test suite stacks +aws cloudformation list-stacks \ + --query "StackSummaries[?starts_with(StackName, 'integ-${IDENTIFIER}-base-')].StackName" \ + --output text | xargs -n1 aws cloudformation delete-stack --stack-name +``` + +## Building Lambda Functions + +Lambda functions with compiled languages (Java, .NET) or external dependencies (Python, Node.js with packages) must be built before deployment. Build scripts use Docker for cross-platform compatibility and don't require local toolchains. + +### Build Scripts -**Build Requirements:** +Runtime-specific build scripts are located in `scripts/`: -For Java and .NET tests, Lambda functions must be built before deployment: +```bash +# Build all Java Lambda functions +./scripts/build-java.sh + +# Build all .NET Lambda functions +./scripts/build-dotnet.sh + +# Build all Python Lambda functions +./scripts/build-python.sh + +# Build all Node.js Lambda functions +./scripts/build-node.sh +``` + +You can also build a specific function by providing its path: ```bash -# Build Java Lambda (uses Docker) -cd lambda/base-java && ./build.sh +# Build a specific Java function +./scripts/build-java.sh lambda/otlp-java -# Build .NET Lambda (uses Docker) -cd lambda/base-dotnet && ./build.sh +# Build a specific .NET function +./scripts/build-dotnet.sh lambda/base-dotnet ``` -These builds use Docker to ensure cross-platform compatibility and do not require local Maven or .NET SDK installation. +**Note:** The `local_deploy.sh` script automatically builds required Lambda functions based on the stack name, so manual building is optional for local development. ## Guidelines @@ -56,48 +163,87 @@ These builds use Docker to ensure cross-platform compatibility and do not requir ## Local Development -### Prerequisites Set env variables +### Prerequisites + **Datadog API Keys**: Set environment variables - ```bash - export DD_API_KEY="your-datadog-api-key" - export DD_APP_KEY="your-datadog-app-key" - export DATADOG_API_SECRET_ARN="arn:aws:secretsmanager:us-east-1:ACCOUNT_ID:secret:YOUR_SECRET" - ``` +```bash +export DD_API_KEY="your-datadog-api-key" +export DD_APP_KEY="your-datadog-app-key" +export DATADOG_API_SECRET_ARN="arn:aws:secretsmanager:us-east-1:ACCOUNT_ID:secret:YOUR_SECRET" +``` + +**Docker**: Required for building Lambda functions. [Install Docker](https://docs.docker.com/get-docker/) -### 1. Build and Deploy Extension Layer +### Workflow -First, publish your extension layer. +#### 1. Build and Publish Extension Layer + +Publish your extension layer to AWS Lambda: ```bash ./scripts/local_publish.sh ``` -This will create and publish `Datadog-Extension-ARM-`. +This creates and publishes `Datadog-Extension-ARM-` with the latest version number. -### 2. Deploy Test Stacks +#### 2. Deploy Test Stacks -Deploy the CDK stacks that create Lambda functions for testing. +Deploy CDK stacks that create Lambda functions for testing: ```bash -./scripts/local_deploy.sh +./scripts/local_deploy.sh ``` -This will create `integ--`. The stacks will use the lambda extension created in the previous step. +This creates `integ--` and automatically: +- Builds required Lambda functions based on the stack name +- Uses the extension layer created in step 1 +- Deploys the stack to AWS -### 3. Run Integration Tests +**Examples:** +```bash +# Deploy base Java test +./scripts/local_deploy.sh base-java-stack + +# Deploy OTLP Python test +./scripts/local_deploy.sh otlp-python-stack +``` + +**Available Stacks:** +- `base-java-stack`, `otlp-java-stack` +- `base-dotnet-stack`, `otlp-dotnet-stack` +- `base-python-stack`, `otlp-python-stack` +- `base-node-stack`, `otlp-node-stack` + +#### 3. Run Integration Tests Run Jest tests that invoke Lambda functions and verify Datadog telemetry: ```bash -# All tests +# Run all tests npm test -# Single test -npm test -- +# Run specific test file +npm test -- base-java.test.ts + +# Run specific test suite +npm test -- --testNamePattern="OTLP" ``` +**Note**: Tests wait several minutes after Lambda invocation to allow telemetry to propagate to Datadog. + +### Manual Building (Optional) +If you need to build Lambda functions manually without deploying: -**Note**: Tests wait for a few minutes after Lambda invocation to allow telemetry to appear in Datadog. +```bash +# Build all functions for a runtime +./scripts/build-java.sh +./scripts/build-dotnet.sh +./scripts/build-python.sh +./scripts/build-node.sh + +# Build specific function +./scripts/build-java.sh lambda/otlp-java +``` diff --git a/integration-tests/bin/app.ts b/integration-tests/bin/app.ts index 91e435ee2..acb50f4c7 100644 --- a/integration-tests/bin/app.ts +++ b/integration-tests/bin/app.ts @@ -1,10 +1,8 @@ #!/usr/bin/env node import 'source-map-support/register'; import * as cdk from 'aws-cdk-lib'; -import {BaseNodeStack} from '../lib/stacks/base-node-stack'; -import {BasePythonStack} from '../lib/stacks/base-python-stack'; -import {BaseJavaStack} from '../lib/stacks/base-java-stack'; -import {BaseDotnetStack} from '../lib/stacks/base-dotnet-stack'; +import {BaseStack} from '../lib/stacks/base-stack'; +import {OtlpStack} from '../lib/stacks/otlp-stack'; import {getIdentifier} from '../tests/utils/config'; const app = new cdk.App(); @@ -17,16 +15,10 @@ const env = { const identifier = getIdentifier(); const stacks = [ - new BaseNodeStack(app, `integ-${identifier}-base-node`, { + new BaseStack(app, `integ-${identifier}-base`, { env, }), - new BasePythonStack(app, `integ-${identifier}-base-python`, { - env, - }), - new BaseJavaStack(app, `integ-${identifier}-base-java`, { - env, - }), - new BaseDotnetStack(app, `integ-${identifier}-base-dotnet`, { + new OtlpStack(app, `integ-${identifier}-otlp`, { env, }), ] diff --git a/integration-tests/jest.config.js b/integration-tests/jest.config.js index b537c05eb..2d10a3e21 100644 --- a/integration-tests/jest.config.js +++ b/integration-tests/jest.config.js @@ -17,16 +17,17 @@ module.exports = { 'default', // Console output ['jest-junit', { outputDirectory: './test-results', - outputName: 'junit.xml', + outputName: process.env.TEST_SUITE ? `junit-${process.env.TEST_SUITE}.xml` : 'junit.xml', classNameTemplate: '{classname}', titleTemplate: '{title}', ancestorSeparator: ' › ', usePathForSuiteName: true, + suiteName: process.env.TEST_SUITE || 'all', }], ['jest-html-reporters', { publicPath: './test-results', - filename: 'test-report.html', - pageTitle: 'Datadog Lambda Extension Test Report', + filename: process.env.TEST_SUITE ? `test-report-${process.env.TEST_SUITE}.html` : 'test-report.html', + pageTitle: `Datadog Lambda Extension Test Report${process.env.TEST_SUITE ? ` - ${process.env.TEST_SUITE}` : ''}`, expand: true, }], ], diff --git a/integration-tests/lambda/base-dotnet/build.sh b/integration-tests/lambda/base-dotnet/build.sh deleted file mode 100755 index 02d876e2e..000000000 --- a/integration-tests/lambda/base-dotnet/build.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -set -e - -echo "Building .NET Lambda with Docker (ARM64)..." - -# Check if Docker is available -if ! command -v docker &> /dev/null; then - echo "Error: Docker is not installed or not in PATH" - echo "Please install Docker: https://docs.docker.com/get-docker/" - exit 1 -fi - -# Get the directory of this script -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" - -# Clean previous build -rm -rf "$SCRIPT_DIR/bin" "$SCRIPT_DIR/obj" - -# Build and package with Docker using ARM64 platform -docker run --rm --platform linux/arm64 \ - -v "$SCRIPT_DIR":/workspace \ - -w /workspace \ - mcr.microsoft.com/dotnet/sdk:8.0-alpine \ - sh -c "apk add --no-cache zip && \ - dotnet tool install -g Amazon.Lambda.Tools || true && \ - export PATH=\"\$PATH:/root/.dotnet/tools\" && \ - dotnet lambda package -o bin/function.zip --function-architecture arm64" - -if [ -f "$SCRIPT_DIR/bin/function.zip" ]; then - echo "✓ Build complete: bin/function.zip" - ls -lh "$SCRIPT_DIR/bin/function.zip" -else - echo "✗ Build failed: bin/function.zip not found" - exit 1 -fi diff --git a/integration-tests/lambda/base-java/build.sh b/integration-tests/lambda/base-java/build.sh deleted file mode 100755 index b599643c7..000000000 --- a/integration-tests/lambda/base-java/build.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash -set -e - -echo "Building Java Lambda with Docker (ARM64)..." - -# Check if Docker is available -if ! command -v docker &> /dev/null; then - echo "Error: Docker is not installed or not in PATH" - echo "Please install Docker: https://docs.docker.com/get-docker/" - exit 1 -fi - -# Get the directory of this script -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" - -# Clean previous build -rm -rf "$SCRIPT_DIR/target" - -# Build with Docker using ARM64 platform -docker run --rm --platform linux/arm64 \ - -v "$SCRIPT_DIR":/workspace \ - -w /workspace \ - maven:3.9-eclipse-temurin-21-alpine \ - mvn clean package - -if [ -f "$SCRIPT_DIR/target/function.jar" ]; then - echo "✓ Build complete: target/function.jar" - ls -lh "$SCRIPT_DIR/target/function.jar" -else - echo "✗ Build failed: target/function.jar not found" - exit 1 -fi diff --git a/integration-tests/lambda/otlp-dotnet/.gitignore b/integration-tests/lambda/otlp-dotnet/.gitignore new file mode 100644 index 000000000..56044625e --- /dev/null +++ b/integration-tests/lambda/otlp-dotnet/.gitignore @@ -0,0 +1,4 @@ +bin/ +obj/ +*.user +*.suo diff --git a/integration-tests/lambda/otlp-dotnet/Function.cs b/integration-tests/lambda/otlp-dotnet/Function.cs new file mode 100644 index 000000000..0725c71ab --- /dev/null +++ b/integration-tests/lambda/otlp-dotnet/Function.cs @@ -0,0 +1,59 @@ +using Amazon.Lambda.Core; +using OpenTelemetry; +using OpenTelemetry.Exporter; +using OpenTelemetry.Resources; +using OpenTelemetry.Trace; +using System; +using System.Collections.Generic; +using System.Diagnostics; + +[assembly: LambdaSerializer(typeof(Amazon.Lambda.Serialization.SystemTextJson.DefaultLambdaJsonSerializer))] + +namespace Function; + +public class Handler +{ + private static readonly TracerProvider tracerProvider; + private static readonly ActivitySource activitySource; + + static Handler() + { + try + { + string endpoint = Environment.GetEnvironmentVariable("OTEL_EXPORTER_OTLP_ENDPOINT") ?? "http://localhost:4318"; + string serviceName = Environment.GetEnvironmentVariable("OTEL_SERVICE_NAME") ?? "otlp-dotnet-lambda"; + + activitySource = new ActivitySource(serviceName); + + tracerProvider = Sdk.CreateTracerProviderBuilder() + .AddSource(serviceName) + .SetResourceBuilder(ResourceBuilder.CreateDefault().AddService(serviceName)) + .AddOtlpExporter(options => + { + options.Endpoint = new Uri(endpoint + "/v1/traces"); + options.Protocol = OtlpExportProtocol.HttpProtobuf; + }) + .Build(); + } + catch (Exception ex) + { + Console.WriteLine($"[OTLP] Error initializing OpenTelemetry: {ex.Message}"); + Console.WriteLine($"[OTLP] Stack trace: {ex.StackTrace}"); + } + } + + public Dictionary FunctionHandler(Dictionary input, ILambdaContext context) + { + using (var activity = activitySource.StartActivity("handler", ActivityKind.Server)) + { + activity?.SetTag("request_id", context.AwsRequestId); + activity?.SetTag("http.status_code", 200); + } + var flushResult = tracerProvider.ForceFlush(30000); + return new Dictionary + { + ["statusCode"] = 200, + ["body"] = "{\"message\": \"Success\"}" + }; + } +} diff --git a/integration-tests/lambda/otlp-dotnet/Function.csproj b/integration-tests/lambda/otlp-dotnet/Function.csproj new file mode 100644 index 000000000..23271fd79 --- /dev/null +++ b/integration-tests/lambda/otlp-dotnet/Function.csproj @@ -0,0 +1,17 @@ + + + net8.0 + true + Lambda + true + true + + + + + + + + + + diff --git a/integration-tests/lambda/otlp-java/.gitignore b/integration-tests/lambda/otlp-java/.gitignore new file mode 100644 index 000000000..949faa73a --- /dev/null +++ b/integration-tests/lambda/otlp-java/.gitignore @@ -0,0 +1,5 @@ +target/ +*.class +.classpath +.project +.settings/ diff --git a/integration-tests/lambda/otlp-java/pom.xml b/integration-tests/lambda/otlp-java/pom.xml new file mode 100644 index 000000000..38de80066 --- /dev/null +++ b/integration-tests/lambda/otlp-java/pom.xml @@ -0,0 +1,71 @@ + + + 4.0.0 + + example + otlp-java-lambda + 1.0.0 + jar + + OTLP Java Lambda + Java Lambda function with OpenTelemetry for Datadog Extension integration testing + + + 21 + 21 + UTF-8 + 1.32.0 + + + + + com.amazonaws + aws-lambda-java-core + 1.2.3 + + + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-exporter-otlp + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk-extension-autoconfigure + ${opentelemetry.version} + + + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.0 + + + package + + shade + + + function + false + + + + + + + diff --git a/integration-tests/lambda/otlp-java/src/main/java/example/Handler.java b/integration-tests/lambda/otlp-java/src/main/java/example/Handler.java new file mode 100644 index 000000000..8c2a90a35 --- /dev/null +++ b/integration-tests/lambda/otlp-java/src/main/java/example/Handler.java @@ -0,0 +1,65 @@ +package example; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; + +import java.util.HashMap; +import java.util.Map; + +public class Handler implements RequestHandler, Map> { + private static final Tracer tracer; + private static final OpenTelemetrySdk sdk; + + static { + String endpoint = System.getenv().getOrDefault("OTEL_EXPORTER_OTLP_ENDPOINT", "http://localhost:4318"); + String serviceName = System.getenv().getOrDefault("OTEL_SERVICE_NAME", "otlp-java-lambda"); + + Resource resource = Resource.getDefault().toBuilder() + .put("service.name", serviceName) + .build(); + + OtlpHttpSpanExporter spanExporter = OtlpHttpSpanExporter.builder() + .setEndpoint(endpoint + "/v1/traces") + .build(); + + SdkTracerProvider tracerProvider = SdkTracerProvider.builder() + .addSpanProcessor(BatchSpanProcessor.builder(spanExporter).build()) + .setResource(resource) + .build(); + + sdk = OpenTelemetrySdk.builder() + .setTracerProvider(tracerProvider) + .build(); + + GlobalOpenTelemetry.set(sdk); + tracer = sdk.getTracer("otlp-java-lambda"); + } + + @Override + public Map handleRequest(Map event, Context context) { + Span span = tracer.spanBuilder("handler").startSpan(); + + try (Scope scope = span.makeCurrent()) { + String requestId = context.getAwsRequestId(); + span.setAttribute("request_id", requestId); + span.setAttribute("http.status_code", 200); + + Map response = new HashMap<>(); + response.put("statusCode", 200); + response.put("body", "{\"message\": \"Success\"}"); + return response; + } finally { + span.end(); + sdk.getSdkTracerProvider().forceFlush().join(30, java.util.concurrent.TimeUnit.SECONDS); + } + } +} diff --git a/integration-tests/lambda/otlp-node/index.js b/integration-tests/lambda/otlp-node/index.js new file mode 100644 index 000000000..f7ba89674 --- /dev/null +++ b/integration-tests/lambda/otlp-node/index.js @@ -0,0 +1,44 @@ +const { NodeTracerProvider } = require('@opentelemetry/sdk-trace-node'); +const { BatchSpanProcessor } = require('@opentelemetry/sdk-trace-base'); +const { OTLPTraceExporter } = require('@opentelemetry/exporter-trace-otlp-proto'); +const { Resource } = require('@opentelemetry/resources'); +const { ATTR_SERVICE_NAME } = require('@opentelemetry/semantic-conventions'); +const api = require('@opentelemetry/api'); + +const resource = new Resource({ + [ATTR_SERVICE_NAME]: process.env.OTEL_SERVICE_NAME || 'otlp-node-lambda', +}); + +const provider = new NodeTracerProvider({ resource }); +const processor = new BatchSpanProcessor( + new OTLPTraceExporter({ + url: process.env.OTEL_EXPORTER_OTLP_ENDPOINT + '/v1/traces', + }) +); +provider.addSpanProcessor(processor); +provider.register(); + +api.trace.setGlobalTracerProvider(provider); + +exports.handler = async (event, context) => { + const tracer = api.trace.getTracer('otlp-node-lambda'); + + await tracer.startActiveSpan('handler', async (span) => { + try { + span.setAttribute('request_id', context.awsRequestId); + span.setAttribute('http.status_code', 200); + } catch (error) { + span.recordException(error); + span.setStatus({ code: api.SpanStatusCode.ERROR, message: error.message }); + throw error; + } finally { + span.end(); + } + }); + await provider.forceFlush(); + + return { + statusCode: 200, + body: JSON.stringify({ message: 'Success' }) + }; +}; diff --git a/integration-tests/lambda/otlp-node/package-lock.json b/integration-tests/lambda/otlp-node/package-lock.json new file mode 100644 index 000000000..39333ced5 --- /dev/null +++ b/integration-tests/lambda/otlp-node/package-lock.json @@ -0,0 +1,1482 @@ +{ + "name": "otlp-node-lambda", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "otlp-node-lambda", + "version": "1.0.0", + "dependencies": { + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/exporter-trace-otlp-proto": "^0.54.2", + "@opentelemetry/resources": "^1.28.0", + "@opentelemetry/sdk-node": "^0.54.2", + "@opentelemetry/sdk-trace-base": "^1.28.0", + "@opentelemetry/sdk-trace-node": "^1.28.0", + "@opentelemetry/semantic-conventions": "^1.28.0" + } + }, + "node_modules/@grpc/grpc-js": { + "version": "1.14.2", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.14.2.tgz", + "integrity": "sha512-QzVUtEFyu05UNx2xr0fCQmStUO17uVQhGNowtxs00IgTZT6/W2PBLfUkj30s0FKJ29VtTa3ArVNIhNP6akQhqA==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/proto-loader": "^0.8.0", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/@grpc/proto-loader": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.8.0.tgz", + "integrity": "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==", + "license": "Apache-2.0", + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.5.3", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "license": "Apache-2.0", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@opentelemetry/api-logs": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.54.2.tgz", + "integrity": "sha512-4MTVwwmLgUh5QrJnZpYo6YRO5IBLAggf2h8gWDblwRagDStY13aEvt7gGk3jewrMaPlHiF83fENhIx0HO97/cQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api": "^1.3.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/context-async-hooks": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-1.30.1.tgz", + "integrity": "sha512-s5vvxXPVdjqS3kTLKMeBMvop9hbWkwzBpu+mUO2M7sZtlkyDJGwFe33wRKnbaYDo8ExRVBIIdwIGrqpxHuKttA==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/core": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.27.0.tgz", + "integrity": "sha512-yQPKnK5e+76XuiqUH/gKyS8wv/7qITd5ln56QkBTf3uggr0VkXOXfcaAuG330UfdYu83wsyoBwqwxigpIG+Jkg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/core/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz", + "integrity": "sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-grpc": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-logs-otlp-grpc/-/exporter-logs-otlp-grpc-0.54.2.tgz", + "integrity": "sha512-MQNmV5r96+5n3axLFgNYtVy62x8Ru7VERZH3zgC50KDcIKWCiQT3vHOtzakhzd1Wq0HqOgu6bzKdwzneSoDrEQ==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "1.27.0", + "@opentelemetry/otlp-grpc-exporter-base": "0.54.2", + "@opentelemetry/otlp-transformer": "0.54.2", + "@opentelemetry/sdk-logs": "0.54.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-http": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-logs-otlp-http/-/exporter-logs-otlp-http-0.54.2.tgz", + "integrity": "sha512-wYeCSbX2XWX2wFslnfQ/YFUolO0fj2nUiGI7oEQWpLKSg40Lc4xOOW14X/EXOkCCijhP7bigo6nvyEQlxEVLjA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.54.2", + "@opentelemetry/core": "1.27.0", + "@opentelemetry/otlp-exporter-base": "0.54.2", + "@opentelemetry/otlp-transformer": "0.54.2", + "@opentelemetry/sdk-logs": "0.54.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-proto": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-logs-otlp-proto/-/exporter-logs-otlp-proto-0.54.2.tgz", + "integrity": "sha512-agrzFbSNmIy6dhkyg41ERlEDUDqkaUJj2n/tVRFp9Tl+6wyNVPsqmwU5RWJOXpyK+lYH/znv6A47VpTeJF0lrw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.54.2", + "@opentelemetry/core": "1.27.0", + "@opentelemetry/otlp-exporter-base": "0.54.2", + "@opentelemetry/otlp-transformer": "0.54.2", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/sdk-logs": "0.54.2", + "@opentelemetry/sdk-trace-base": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-proto/node_modules/@opentelemetry/resources": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.27.0.tgz", + "integrity": "sha512-jOwt2VJ/lUD5BLc+PMNymDrUCpm5PKi1E9oSVYAvz01U/VdndGmrtV3DU1pG4AwlYhJRHbHfOUIlpBeXCPw6QQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-proto/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.27.0.tgz", + "integrity": "sha512-btz6XTQzwsyJjombpeqCX6LhiMQYpzt2pIYNPnw0IPO/3AhT6yjnf8Mnv3ZC2A4eRYOjqrg+bfaXg9XHDRJDWQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-proto/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz", + "integrity": "sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-grpc/-/exporter-trace-otlp-grpc-0.54.2.tgz", + "integrity": "sha512-tmxiCYhQdPrzwlM6O7VQeNP9PBjKhaiOo54wFxQFZQcoVaDiOOES4+6PwHU1eW+43mDsgdQHN5AHSRHVLe9jDA==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "1.27.0", + "@opentelemetry/otlp-grpc-exporter-base": "0.54.2", + "@opentelemetry/otlp-transformer": "0.54.2", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/sdk-trace-base": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/resources": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.27.0.tgz", + "integrity": "sha512-jOwt2VJ/lUD5BLc+PMNymDrUCpm5PKi1E9oSVYAvz01U/VdndGmrtV3DU1pG4AwlYhJRHbHfOUIlpBeXCPw6QQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.27.0.tgz", + "integrity": "sha512-btz6XTQzwsyJjombpeqCX6LhiMQYpzt2pIYNPnw0IPO/3AhT6yjnf8Mnv3ZC2A4eRYOjqrg+bfaXg9XHDRJDWQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz", + "integrity": "sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-http/-/exporter-trace-otlp-http-0.54.2.tgz", + "integrity": "sha512-BgWKKyD/h2zpISdmYHN/sapwTjvt1P4p5yx4xeBV8XAEqh4OQUhOtSGFG80+nPQ1F8of3mKOT1DDoDbJp1u25w==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/otlp-exporter-base": "0.54.2", + "@opentelemetry/otlp-transformer": "0.54.2", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/sdk-trace-base": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/resources": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.27.0.tgz", + "integrity": "sha512-jOwt2VJ/lUD5BLc+PMNymDrUCpm5PKi1E9oSVYAvz01U/VdndGmrtV3DU1pG4AwlYhJRHbHfOUIlpBeXCPw6QQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.27.0.tgz", + "integrity": "sha512-btz6XTQzwsyJjombpeqCX6LhiMQYpzt2pIYNPnw0IPO/3AhT6yjnf8Mnv3ZC2A4eRYOjqrg+bfaXg9XHDRJDWQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz", + "integrity": "sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-proto/-/exporter-trace-otlp-proto-0.54.2.tgz", + "integrity": "sha512-XSmm1N2wAhoWDXP1q/N6kpLebWaxl6VIADv4WA5QWKHLRpF3gLz5NAWNJBR8ygsvv8jQcrwnXgwfnJ18H3v1fg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/otlp-exporter-base": "0.54.2", + "@opentelemetry/otlp-transformer": "0.54.2", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/sdk-trace-base": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/resources": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.27.0.tgz", + "integrity": "sha512-jOwt2VJ/lUD5BLc+PMNymDrUCpm5PKi1E9oSVYAvz01U/VdndGmrtV3DU1pG4AwlYhJRHbHfOUIlpBeXCPw6QQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.27.0.tgz", + "integrity": "sha512-btz6XTQzwsyJjombpeqCX6LhiMQYpzt2pIYNPnw0IPO/3AhT6yjnf8Mnv3ZC2A4eRYOjqrg+bfaXg9XHDRJDWQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz", + "integrity": "sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/exporter-zipkin": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-zipkin/-/exporter-zipkin-1.27.0.tgz", + "integrity": "sha512-eGMY3s4QprspFZojqsuQyQpWNFpo+oNVE/aosTbtvAlrJBAlvXcwwsOROOHOd8Y9lkU4i0FpQW482rcXkgwCSw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/sdk-trace-base": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin/node_modules/@opentelemetry/resources": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.27.0.tgz", + "integrity": "sha512-jOwt2VJ/lUD5BLc+PMNymDrUCpm5PKi1E9oSVYAvz01U/VdndGmrtV3DU1pG4AwlYhJRHbHfOUIlpBeXCPw6QQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.27.0.tgz", + "integrity": "sha512-btz6XTQzwsyJjombpeqCX6LhiMQYpzt2pIYNPnw0IPO/3AhT6yjnf8Mnv3ZC2A4eRYOjqrg+bfaXg9XHDRJDWQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz", + "integrity": "sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/instrumentation": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.54.2.tgz", + "integrity": "sha512-go6zpOVoZVztT9r1aPd79Fr3OWiD4N24bCPJsIKkBses8oyFo12F/Ew3UBTdIu6hsW4HC4MVEJygG6TEyJI/lg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.54.2", + "@types/shimmer": "^1.2.0", + "import-in-the-middle": "^1.8.1", + "require-in-the-middle": "^7.1.1", + "semver": "^7.5.2", + "shimmer": "^1.2.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/otlp-exporter-base": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-exporter-base/-/otlp-exporter-base-0.54.2.tgz", + "integrity": "sha512-NrNyxu6R/bGAwanhz1HI0aJWKR6xUED4TjCH4iWMlAfyRukGbI9Kt/Akd2sYLwRKNhfS+sKetKGCUQPMDyYYMA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/otlp-transformer": "0.54.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/otlp-grpc-exporter-base": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-grpc-exporter-base/-/otlp-grpc-exporter-base-0.54.2.tgz", + "integrity": "sha512-HZtACQuLhgDcgNa9arGnVVGV28sSGQ+iwRgICWikFKiVxUsoWffqBvTxPa6G3DUTg5R+up97j/zxubEyxSAOHg==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "1.27.0", + "@opentelemetry/otlp-exporter-base": "0.54.2", + "@opentelemetry/otlp-transformer": "0.54.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-transformer/-/otlp-transformer-0.54.2.tgz", + "integrity": "sha512-2tIjahJlMRRUz0A2SeE+qBkeBXBFkSjR0wqJ08kuOqaL8HNGan5iZf+A8cfrfmZzPUuMKCyY9I+okzFuFs6gKQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.54.2", + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/sdk-logs": "0.54.2", + "@opentelemetry/sdk-metrics": "1.27.0", + "@opentelemetry/sdk-trace-base": "1.27.0", + "protobufjs": "^7.3.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/resources": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.27.0.tgz", + "integrity": "sha512-jOwt2VJ/lUD5BLc+PMNymDrUCpm5PKi1E9oSVYAvz01U/VdndGmrtV3DU1pG4AwlYhJRHbHfOUIlpBeXCPw6QQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.27.0.tgz", + "integrity": "sha512-btz6XTQzwsyJjombpeqCX6LhiMQYpzt2pIYNPnw0IPO/3AhT6yjnf8Mnv3ZC2A4eRYOjqrg+bfaXg9XHDRJDWQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz", + "integrity": "sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/propagator-b3": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-b3/-/propagator-b3-1.30.1.tgz", + "integrity": "sha512-oATwWWDIJzybAZ4pO76ATN5N6FFbOA1otibAVlS8v90B4S1wClnhRUk7K+2CHAwN1JKYuj4jh/lpCEG5BAqFuQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.30.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-b3/node_modules/@opentelemetry/core": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.30.1.tgz", + "integrity": "sha512-OOCM2C/QIURhJMuKaekP3TRBxBKxG/TWWA0TL2J6nXUtDnuCtccy49LUJF8xPFXMX+0LMcxFpCo8M9cGY1W6rQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-b3/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.28.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.28.0.tgz", + "integrity": "sha512-lp4qAiMTD4sNWW4DbKLBkfiMZ4jbAboJIGOQr5DvciMRI494OapieI9qiODpOt0XBr1LjIDy1xAGAnVs5supTA==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/propagator-jaeger": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-jaeger/-/propagator-jaeger-1.30.1.tgz", + "integrity": "sha512-Pj/BfnYEKIOImirH76M4hDaBSx6HyZ2CXUqk+Kj02m6BB80c/yo4BdWkn/1gDFfU+YPY+bPR2U0DKBfdxCKwmg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.30.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-jaeger/node_modules/@opentelemetry/core": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.30.1.tgz", + "integrity": "sha512-OOCM2C/QIURhJMuKaekP3TRBxBKxG/TWWA0TL2J6nXUtDnuCtccy49LUJF8xPFXMX+0LMcxFpCo8M9cGY1W6rQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-jaeger/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.28.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.28.0.tgz", + "integrity": "sha512-lp4qAiMTD4sNWW4DbKLBkfiMZ4jbAboJIGOQr5DvciMRI494OapieI9qiODpOt0XBr1LjIDy1xAGAnVs5supTA==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/resources": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.30.1.tgz", + "integrity": "sha512-5UxZqiAgLYGFjS4s9qm5mBVo433u+dSPUFWVWXmLAD4wB65oMCoXaJP1KJa9DIYYMeHu3z4BZcStG3LC593cWA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.30.1", + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/resources/node_modules/@opentelemetry/core": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.30.1.tgz", + "integrity": "sha512-OOCM2C/QIURhJMuKaekP3TRBxBKxG/TWWA0TL2J6nXUtDnuCtccy49LUJF8xPFXMX+0LMcxFpCo8M9cGY1W6rQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/resources/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.28.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.28.0.tgz", + "integrity": "sha512-lp4qAiMTD4sNWW4DbKLBkfiMZ4jbAboJIGOQr5DvciMRI494OapieI9qiODpOt0XBr1LjIDy1xAGAnVs5supTA==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/sdk-logs": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-logs/-/sdk-logs-0.54.2.tgz", + "integrity": "sha512-yIbYqDLS/AtBbPjCjh6eSToGNRMqW2VR8RrKEy+G+J7dFG7pKoptTH5T+XlKPleP9NY8JZYIpgJBlI+Osi0rFw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.54.2", + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.4.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-logs/node_modules/@opentelemetry/resources": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.27.0.tgz", + "integrity": "sha512-jOwt2VJ/lUD5BLc+PMNymDrUCpm5PKi1E9oSVYAvz01U/VdndGmrtV3DU1pG4AwlYhJRHbHfOUIlpBeXCPw6QQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-logs/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz", + "integrity": "sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/sdk-metrics": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-1.27.0.tgz", + "integrity": "sha512-JzWgzlutoXCydhHWIbLg+r76m+m3ncqvkCcsswXAQ4gqKS+LOHKhq+t6fx1zNytvLuaOUBur7EvWxECc4jPQKg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-metrics/node_modules/@opentelemetry/resources": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.27.0.tgz", + "integrity": "sha512-jOwt2VJ/lUD5BLc+PMNymDrUCpm5PKi1E9oSVYAvz01U/VdndGmrtV3DU1pG4AwlYhJRHbHfOUIlpBeXCPw6QQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-metrics/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz", + "integrity": "sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/sdk-node": { + "version": "0.54.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-node/-/sdk-node-0.54.2.tgz", + "integrity": "sha512-afn8GBpA7Gb55aU0LUxIQ+oe6QxLhsf+Te9iw12Non3ZAspzdoCcfz5+hqecwpuVpEDdnj5iSalF7VVaL2pDeg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.54.2", + "@opentelemetry/core": "1.27.0", + "@opentelemetry/exporter-logs-otlp-grpc": "0.54.2", + "@opentelemetry/exporter-logs-otlp-http": "0.54.2", + "@opentelemetry/exporter-logs-otlp-proto": "0.54.2", + "@opentelemetry/exporter-trace-otlp-grpc": "0.54.2", + "@opentelemetry/exporter-trace-otlp-http": "0.54.2", + "@opentelemetry/exporter-trace-otlp-proto": "0.54.2", + "@opentelemetry/exporter-zipkin": "1.27.0", + "@opentelemetry/instrumentation": "0.54.2", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/sdk-logs": "0.54.2", + "@opentelemetry/sdk-metrics": "1.27.0", + "@opentelemetry/sdk-trace-base": "1.27.0", + "@opentelemetry/sdk-trace-node": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/context-async-hooks": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-1.27.0.tgz", + "integrity": "sha512-CdZ3qmHCwNhFAzjTgHqrDQ44Qxcpz43cVxZRhOs+Ns/79ug+Mr84Bkb626bkJLkA3+BLimA5YAEVRlJC6pFb7g==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/propagator-b3": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-b3/-/propagator-b3-1.27.0.tgz", + "integrity": "sha512-pTsko3gnMioe3FeWcwTQR3omo5C35tYsKKwjgTCTVCgd3EOWL9BZrMfgLBmszrwXABDfUrlAEFN/0W0FfQGynQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/propagator-jaeger": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-jaeger/-/propagator-jaeger-1.27.0.tgz", + "integrity": "sha512-EI1bbK0wn0yIuKlc2Qv2LKBRw6LiUWevrjCF80fn/rlaB+7StAi8Y5s8DBqAYNpY7v1q86+NjU18v7hj2ejU3A==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/resources": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.27.0.tgz", + "integrity": "sha512-jOwt2VJ/lUD5BLc+PMNymDrUCpm5PKi1E9oSVYAvz01U/VdndGmrtV3DU1pG4AwlYhJRHbHfOUIlpBeXCPw6QQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.27.0.tgz", + "integrity": "sha512-btz6XTQzwsyJjombpeqCX6LhiMQYpzt2pIYNPnw0IPO/3AhT6yjnf8Mnv3ZC2A4eRYOjqrg+bfaXg9XHDRJDWQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.27.0", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/sdk-trace-node": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-node/-/sdk-trace-node-1.27.0.tgz", + "integrity": "sha512-dWZp/dVGdUEfRBjBq2BgNuBlFqHCxyyMc8FsN0NX15X07mxSUO0SZRLyK/fdAVrde8nqFI/FEdMH4rgU9fqJfQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/context-async-hooks": "1.27.0", + "@opentelemetry/core": "1.27.0", + "@opentelemetry/propagator-b3": "1.27.0", + "@opentelemetry/propagator-jaeger": "1.27.0", + "@opentelemetry/sdk-trace-base": "1.27.0", + "semver": "^7.5.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz", + "integrity": "sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.30.1.tgz", + "integrity": "sha512-jVPgBbH1gCy2Lb7X0AVQ8XAfgg0pJ4nvl8/IiQA6nxOsPvS+0zMJaFSs2ltXe0J6C8dqjcnpyqINDJmU30+uOg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "1.30.1", + "@opentelemetry/resources": "1.30.1", + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-base/node_modules/@opentelemetry/core": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.30.1.tgz", + "integrity": "sha512-OOCM2C/QIURhJMuKaekP3TRBxBKxG/TWWA0TL2J6nXUtDnuCtccy49LUJF8xPFXMX+0LMcxFpCo8M9cGY1W6rQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-base/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.28.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.28.0.tgz", + "integrity": "sha512-lp4qAiMTD4sNWW4DbKLBkfiMZ4jbAboJIGOQr5DvciMRI494OapieI9qiODpOt0XBr1LjIDy1xAGAnVs5supTA==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/sdk-trace-node": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-node/-/sdk-trace-node-1.30.1.tgz", + "integrity": "sha512-cBjYOINt1JxXdpw1e5MlHmFRc5fgj4GW/86vsKFxJCJ8AL4PdVtYH41gWwl4qd4uQjqEL1oJVrXkSy5cnduAnQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/context-async-hooks": "1.30.1", + "@opentelemetry/core": "1.30.1", + "@opentelemetry/propagator-b3": "1.30.1", + "@opentelemetry/propagator-jaeger": "1.30.1", + "@opentelemetry/sdk-trace-base": "1.30.1", + "semver": "^7.5.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node/node_modules/@opentelemetry/core": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.30.1.tgz", + "integrity": "sha512-OOCM2C/QIURhJMuKaekP3TRBxBKxG/TWWA0TL2J6nXUtDnuCtccy49LUJF8xPFXMX+0LMcxFpCo8M9cGY1W6rQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.28.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.28.0.tgz", + "integrity": "sha512-lp4qAiMTD4sNWW4DbKLBkfiMZ4jbAboJIGOQr5DvciMRI494OapieI9qiODpOt0XBr1LjIDy1xAGAnVs5supTA==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/semantic-conventions": { + "version": "1.38.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.38.0.tgz", + "integrity": "sha512-kocjix+/sSggfJhwXqClZ3i9Y/MI0fp7b+g7kCRm6psy2dsf8uApTRclwG18h8Avm7C9+fnt+O36PspJ/OzoWg==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", + "license": "BSD-3-Clause" + }, + "node_modules/@types/node": { + "version": "24.10.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.2.tgz", + "integrity": "sha512-WOhQTZ4G8xZ1tjJTvKOpyEVSGgOTvJAfDK3FNFgELyaTpzhdgHVHeqW8V+UJvzF5BT+/B54T/1S2K6gd9c7bbA==", + "license": "MIT", + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@types/shimmer": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@types/shimmer/-/shimmer-1.2.0.tgz", + "integrity": "sha512-UE7oxhQLLd9gub6JKIAhDq06T0F6FnztwMNRvYgjeQSBeMc1ZG/tA47EwfduvkuQS8apbkM/lpLpWsaCeYsXVg==", + "license": "MIT" + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-import-attributes": { + "version": "1.9.5", + "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", + "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", + "license": "MIT", + "peerDependencies": { + "acorn": "^8" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", + "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", + "license": "MIT" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/import-in-the-middle": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-1.15.0.tgz", + "integrity": "sha512-bpQy+CrsRmYmoPMAE/0G33iwRqwW4ouqdRg8jgbH3aKuCtOc8lxgmYXg2dMM92CRiGP660EtBcymH/eVUpCSaA==", + "license": "Apache-2.0", + "dependencies": { + "acorn": "^8.14.0", + "acorn-import-attributes": "^1.9.5", + "cjs-module-lexer": "^1.2.2", + "module-details-from-path": "^1.0.3" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "license": "MIT" + }, + "node_modules/long": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", + "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==", + "license": "Apache-2.0" + }, + "node_modules/module-details-from-path": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.4.tgz", + "integrity": "sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==", + "license": "MIT" + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "license": "MIT" + }, + "node_modules/protobufjs": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.4.tgz", + "integrity": "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-in-the-middle": { + "version": "7.5.2", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-7.5.2.tgz", + "integrity": "sha512-gAZ+kLqBdHarXB64XpAe2VCjB7rIRv+mU8tfRWziHRJ5umKsIHN2tLLv6EtMw7WCdP19S0ERVMldNvxYCHnhSQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "module-details-from-path": "^1.0.3", + "resolve": "^1.22.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/shimmer": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", + "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==", + "license": "BSD-2-Clause" + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "license": "MIT" + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "license": "ISC", + "engines": { + "node": ">=12" + } + } + } +} diff --git a/integration-tests/lambda/otlp-node/package.json b/integration-tests/lambda/otlp-node/package.json new file mode 100644 index 000000000..fccbdcb09 --- /dev/null +++ b/integration-tests/lambda/otlp-node/package.json @@ -0,0 +1,15 @@ +{ + "name": "otlp-node-lambda", + "version": "1.0.0", + "description": "OpenTelemetry Node.js Lambda test function", + "main": "index.js", + "dependencies": { + "@opentelemetry/sdk-node": "^0.54.2", + "@opentelemetry/sdk-trace-node": "^1.28.0", + "@opentelemetry/sdk-trace-base": "^1.28.0", + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/exporter-trace-otlp-proto": "^0.54.2", + "@opentelemetry/resources": "^1.28.0", + "@opentelemetry/semantic-conventions": "^1.28.0" + } +} diff --git a/integration-tests/lambda/otlp-python/lambda_function.py b/integration-tests/lambda/otlp-python/lambda_function.py new file mode 100644 index 000000000..5b56400c2 --- /dev/null +++ b/integration-tests/lambda/otlp-python/lambda_function.py @@ -0,0 +1,36 @@ +from opentelemetry import trace +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk.resources import Resource +from opentelemetry.semconv.resource import ResourceAttributes +import os + +# Initialize OpenTelemetry SDK with Protobuf/HTTP exporter +resource = Resource(attributes={ + ResourceAttributes.SERVICE_NAME: os.environ.get('OTEL_SERVICE_NAME', 'otlp-python-lambda') +}) + +provider = TracerProvider(resource=resource) +processor = BatchSpanProcessor( + OTLPSpanExporter( + endpoint=os.environ.get('OTEL_EXPORTER_OTLP_ENDPOINT', 'http://localhost:4318') + '/v1/traces' + ) +) +provider.add_span_processor(processor) +trace.set_tracer_provider(provider) +tracer = trace.get_tracer(__name__) + + +def handler(event, context): + with tracer.start_as_current_span("handler") as span: + span.set_attribute("request_id", context.aws_request_id) + span.set_attribute("http.status_code", 200) + + # Force flush to ensure traces are sent before Lambda freezes + trace.get_tracer_provider().force_flush(timeout_millis=30000) + + return { + 'statusCode': 200, + 'body': '{"message": "Success"}' + } diff --git a/integration-tests/lambda/otlp-python/requirements.txt b/integration-tests/lambda/otlp-python/requirements.txt new file mode 100644 index 000000000..d1893b13d --- /dev/null +++ b/integration-tests/lambda/otlp-python/requirements.txt @@ -0,0 +1,3 @@ +opentelemetry-api==1.32.0 +opentelemetry-sdk==1.32.0 +opentelemetry-exporter-otlp-proto-http==1.32.0 diff --git a/integration-tests/lib/stacks/base-stack.ts b/integration-tests/lib/stacks/base-stack.ts new file mode 100644 index 000000000..87d7ee579 --- /dev/null +++ b/integration-tests/lib/stacks/base-stack.ts @@ -0,0 +1,116 @@ +import * as cdk from 'aws-cdk-lib'; +import * as lambda from 'aws-cdk-lib/aws-lambda'; +import { Construct } from 'constructs'; +import { + createLogGroup, + defaultDatadogEnvVariables, + defaultDatadogSecretPolicy, + getExtensionLayer, + getNode20Layer, + getPython313Layer, + getJava21Layer, + getDotnet8Layer +} from '../util'; + +export class BaseStack extends cdk.Stack { + constructor(scope: Construct, id: string, props: cdk.StackProps) { + super(scope, id, props); + + // Get layers once for the entire stack + const extensionLayer = getExtensionLayer(this); + const node20Layer = getNode20Layer(this); + const python313Layer = getPython313Layer(this); + const java21Layer = getJava21Layer(this); + const dotnet8Layer = getDotnet8Layer(this); + + // Node.js Lambda + const nodeFunctionName = `${id}-node-lambda`; + const nodeFunction = new lambda.Function(this, nodeFunctionName, { + runtime: lambda.Runtime.NODEJS_20_X, + architecture: lambda.Architecture.ARM_64, + handler: '/opt/nodejs/node_modules/datadog-lambda-js/handler.handler', + code: lambda.Code.fromAsset('./lambda/base-node'), + functionName: nodeFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 256, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: nodeFunctionName, + DD_TRACE_ENABLED: 'true', + DD_LAMBDA_HANDLER: 'index.handler', + }, + logGroup: createLogGroup(this, nodeFunctionName) + }); + nodeFunction.addToRolePolicy(defaultDatadogSecretPolicy); + nodeFunction.addLayers(extensionLayer); + nodeFunction.addLayers(node20Layer); + + // Python Lambda + const pythonFunctionName = `${id}-python-lambda`; + const pythonFunction = new lambda.Function(this, pythonFunctionName, { + runtime: lambda.Runtime.PYTHON_3_13, + architecture: lambda.Architecture.ARM_64, + handler: 'datadog_lambda.handler.handler', + code: lambda.Code.fromAsset('./lambda/base-python'), + functionName: pythonFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 256, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: pythonFunctionName, + DD_TRACE_ENABLED: 'true', + DD_LAMBDA_HANDLER: 'lambda_function.handler', + DD_TRACE_AGENT_URL: 'http://127.0.0.1:8126', + DD_COLD_START_TRACING: 'true', + DD_MIN_COLD_START_DURATION: '0', + }, + logGroup: createLogGroup(this, pythonFunctionName) + }); + pythonFunction.addToRolePolicy(defaultDatadogSecretPolicy); + pythonFunction.addLayers(extensionLayer); + pythonFunction.addLayers(python313Layer); + + // Java Lambda + const javaFunctionName = `${id}-java-lambda`; + const javaFunction = new lambda.Function(this, javaFunctionName, { + runtime: lambda.Runtime.JAVA_21, + architecture: lambda.Architecture.ARM_64, + handler: 'example.Handler::handleRequest', + code: lambda.Code.fromAsset('./lambda/base-java/target/function.jar'), + functionName: javaFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 256, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: javaFunctionName, + AWS_LAMBDA_EXEC_WRAPPER: '/opt/datadog_wrapper', + DD_TRACE_ENABLED: 'true', + }, + logGroup: createLogGroup(this, javaFunctionName) + }); + javaFunction.addToRolePolicy(defaultDatadogSecretPolicy); + javaFunction.addLayers(extensionLayer); + javaFunction.addLayers(java21Layer); + + // .NET Lambda + const dotnetFunctionName = `${id}-dotnet-lambda`; + const dotnetFunction = new lambda.Function(this, dotnetFunctionName, { + runtime: lambda.Runtime.DOTNET_8, + architecture: lambda.Architecture.ARM_64, + handler: 'Function::Function.Handler::FunctionHandler', + code: lambda.Code.fromAsset('./lambda/base-dotnet/bin/function.zip'), + functionName: dotnetFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 256, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: dotnetFunctionName, + AWS_LAMBDA_EXEC_WRAPPER: '/opt/datadog_wrapper', + }, + logGroup: createLogGroup(this, dotnetFunctionName) + }); + dotnetFunction.addToRolePolicy(defaultDatadogSecretPolicy); + dotnetFunction.addLayers(extensionLayer); + dotnetFunction.addLayers(dotnet8Layer); + } +} diff --git a/integration-tests/lib/stacks/otlp-dotnet-stack.ts b/integration-tests/lib/stacks/otlp-dotnet-stack.ts new file mode 100644 index 000000000..112884987 --- /dev/null +++ b/integration-tests/lib/stacks/otlp-dotnet-stack.ts @@ -0,0 +1,33 @@ +import * as cdk from 'aws-cdk-lib'; +import * as lambda from 'aws-cdk-lib/aws-lambda'; +import { Construct } from 'constructs'; +import { createLogGroup, defaultDatadogEnvVariables, defaultDatadogSecretPolicy, getExtensionLayer } from '../util'; + +export class OtlpDotnetStack extends cdk.Stack { + constructor(scope: Construct, id: string, props: cdk.StackProps) { + super(scope, id, props); + + const dotnetFunctionName = `${id}-lambda`; + const dotnetFunction = new lambda.Function(this, dotnetFunctionName, { + runtime: lambda.Runtime.DOTNET_8, + architecture: lambda.Architecture.ARM_64, + handler: 'Function::Function.Handler::FunctionHandler', + code: lambda.Code.fromAsset('./lambda/otlp-dotnet/bin/function.zip'), + functionName: dotnetFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 512, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: dotnetFunctionName, + DD_OTLP_CONFIG_RECEIVER_PROTOCOLS_HTTP_ENDPOINT: 'localhost:4318', + OTEL_EXPORTER_OTLP_ENDPOINT: 'http://localhost:4318', + OTEL_EXPORTER_OTLP_PROTOCOL: 'http/protobuf', + OTEL_SERVICE_NAME: dotnetFunctionName, + }, + logGroup: createLogGroup(this, dotnetFunctionName) + }); + + dotnetFunction.addToRolePolicy(defaultDatadogSecretPolicy); + dotnetFunction.addLayers(getExtensionLayer(this)); + } +} diff --git a/integration-tests/lib/stacks/otlp-java-stack.ts b/integration-tests/lib/stacks/otlp-java-stack.ts new file mode 100644 index 000000000..66d4f45be --- /dev/null +++ b/integration-tests/lib/stacks/otlp-java-stack.ts @@ -0,0 +1,33 @@ +import * as cdk from 'aws-cdk-lib'; +import * as lambda from 'aws-cdk-lib/aws-lambda'; +import { Construct } from 'constructs'; +import { createLogGroup, defaultDatadogEnvVariables, defaultDatadogSecretPolicy, getExtensionLayer } from '../util'; + +export class OtlpJavaStack extends cdk.Stack { + constructor(scope: Construct, id: string, props: cdk.StackProps) { + super(scope, id, props); + + const javaFunctionName = `${id}-lambda`; + const javaFunction = new lambda.Function(this, javaFunctionName, { + runtime: lambda.Runtime.JAVA_21, + architecture: lambda.Architecture.ARM_64, + handler: 'example.Handler::handleRequest', + code: lambda.Code.fromAsset('./lambda/otlp-java/target/function.jar'), + functionName: javaFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 512, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: javaFunctionName, + DD_OTLP_CONFIG_RECEIVER_PROTOCOLS_HTTP_ENDPOINT: 'localhost:4318', + OTEL_EXPORTER_OTLP_ENDPOINT: 'http://localhost:4318', + OTEL_EXPORTER_OTLP_PROTOCOL: 'http/protobuf', + OTEL_SERVICE_NAME: javaFunctionName, + }, + logGroup: createLogGroup(this, javaFunctionName) + }); + + javaFunction.addToRolePolicy(defaultDatadogSecretPolicy); + javaFunction.addLayers(getExtensionLayer(this)); + } +} diff --git a/integration-tests/lib/stacks/otlp-node-stack.ts b/integration-tests/lib/stacks/otlp-node-stack.ts new file mode 100644 index 000000000..abbba86c1 --- /dev/null +++ b/integration-tests/lib/stacks/otlp-node-stack.ts @@ -0,0 +1,33 @@ +import * as cdk from 'aws-cdk-lib'; +import * as lambda from 'aws-cdk-lib/aws-lambda'; +import { Construct } from 'constructs'; +import { createLogGroup, defaultDatadogEnvVariables, defaultDatadogSecretPolicy, getExtensionLayer } from '../util'; + +export class OtlpNodeStack extends cdk.Stack { + constructor(scope: Construct, id: string, props: cdk.StackProps) { + super(scope, id, props); + + const nodeFunctionName = `${id}-lambda`; + const nodeFunction = new lambda.Function(this, nodeFunctionName, { + runtime: lambda.Runtime.NODEJS_20_X, + architecture: lambda.Architecture.ARM_64, + handler: 'index.handler', + code: lambda.Code.fromAsset('./lambda/otlp-node'), + functionName: nodeFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 256, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: nodeFunctionName, + DD_OTLP_CONFIG_RECEIVER_PROTOCOLS_HTTP_ENDPOINT: 'localhost:4318', + OTEL_EXPORTER_OTLP_ENDPOINT: 'http://localhost:4318', + OTEL_EXPORTER_OTLP_PROTOCOL: 'http/protobuf', + OTEL_SERVICE_NAME: nodeFunctionName, + }, + logGroup: createLogGroup(this, nodeFunctionName) + }); + + nodeFunction.addToRolePolicy(defaultDatadogSecretPolicy); + nodeFunction.addLayers(getExtensionLayer(this)); + } +} diff --git a/integration-tests/lib/stacks/otlp-python-stack.ts b/integration-tests/lib/stacks/otlp-python-stack.ts new file mode 100644 index 000000000..cdb3e7c21 --- /dev/null +++ b/integration-tests/lib/stacks/otlp-python-stack.ts @@ -0,0 +1,34 @@ +import * as cdk from 'aws-cdk-lib'; +import * as lambda from 'aws-cdk-lib/aws-lambda'; +import { Construct } from 'constructs'; +import { createLogGroup, defaultDatadogEnvVariables, defaultDatadogSecretPolicy, getExtensionLayer } from '../util'; + +export class OtlpPythonStack extends cdk.Stack { + constructor(scope: Construct, id: string, props: cdk.StackProps) { + super(scope, id, props); + + const pythonFunctionName = `${id}-lambda`; + + const pythonFunction = new lambda.Function(this, pythonFunctionName, { + runtime: lambda.Runtime.PYTHON_3_12, + architecture: lambda.Architecture.ARM_64, + handler: 'lambda_function.handler', + code: lambda.Code.fromAsset('./lambda/otlp-python/package'), + functionName: pythonFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 256, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: pythonFunctionName, + DD_OTLP_CONFIG_RECEIVER_PROTOCOLS_HTTP_ENDPOINT: 'localhost:4318', + OTEL_EXPORTER_OTLP_ENDPOINT: 'http://localhost:4318', + OTEL_EXPORTER_OTLP_PROTOCOL: 'http/protobuf', + OTEL_SERVICE_NAME: pythonFunctionName, + }, + logGroup: createLogGroup(this, pythonFunctionName) + }); + + pythonFunction.addToRolePolicy(defaultDatadogSecretPolicy); + pythonFunction.addLayers(getExtensionLayer(this)); + } +} diff --git a/integration-tests/lib/stacks/otlp-stack.ts b/integration-tests/lib/stacks/otlp-stack.ts new file mode 100644 index 000000000..d18961b21 --- /dev/null +++ b/integration-tests/lib/stacks/otlp-stack.ts @@ -0,0 +1,110 @@ +import * as cdk from 'aws-cdk-lib'; +import * as lambda from 'aws-cdk-lib/aws-lambda'; +import { Construct } from 'constructs'; +import { + createLogGroup, + defaultDatadogEnvVariables, + defaultDatadogSecretPolicy, + getExtensionLayer +} from '../util'; + +export class OtlpStack extends cdk.Stack { + constructor(scope: Construct, id: string, props: cdk.StackProps) { + super(scope, id, props); + + // Get extension layer once for the entire stack + const extensionLayer = getExtensionLayer(this); + + // Node.js Lambda + const nodeFunctionName = `${id}-node-lambda`; + const nodeFunction = new lambda.Function(this, nodeFunctionName, { + runtime: lambda.Runtime.NODEJS_20_X, + architecture: lambda.Architecture.ARM_64, + handler: 'index.handler', + code: lambda.Code.fromAsset('./lambda/otlp-node'), + functionName: nodeFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 256, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: nodeFunctionName, + DD_OTLP_CONFIG_RECEIVER_PROTOCOLS_HTTP_ENDPOINT: 'localhost:4318', + OTEL_EXPORTER_OTLP_ENDPOINT: 'http://localhost:4318', + OTEL_EXPORTER_OTLP_PROTOCOL: 'http/protobuf', + OTEL_SERVICE_NAME: nodeFunctionName, + }, + logGroup: createLogGroup(this, nodeFunctionName) + }); + nodeFunction.addToRolePolicy(defaultDatadogSecretPolicy); + nodeFunction.addLayers(extensionLayer); + + // Python Lambda + const pythonFunctionName = `${id}-python-lambda`; + const pythonFunction = new lambda.Function(this, pythonFunctionName, { + runtime: lambda.Runtime.PYTHON_3_12, + architecture: lambda.Architecture.ARM_64, + handler: 'lambda_function.handler', + code: lambda.Code.fromAsset('./lambda/otlp-python/package'), + functionName: pythonFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 256, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: pythonFunctionName, + DD_OTLP_CONFIG_RECEIVER_PROTOCOLS_HTTP_ENDPOINT: 'localhost:4318', + OTEL_EXPORTER_OTLP_ENDPOINT: 'http://localhost:4318', + OTEL_EXPORTER_OTLP_PROTOCOL: 'http/protobuf', + OTEL_SERVICE_NAME: pythonFunctionName, + }, + logGroup: createLogGroup(this, pythonFunctionName) + }); + pythonFunction.addToRolePolicy(defaultDatadogSecretPolicy); + pythonFunction.addLayers(extensionLayer); + + // Java Lambda + const javaFunctionName = `${id}-java-lambda`; + const javaFunction = new lambda.Function(this, javaFunctionName, { + runtime: lambda.Runtime.JAVA_21, + architecture: lambda.Architecture.ARM_64, + handler: 'example.Handler::handleRequest', + code: lambda.Code.fromAsset('./lambda/otlp-java/target/function.jar'), + functionName: javaFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 512, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: javaFunctionName, + DD_OTLP_CONFIG_RECEIVER_PROTOCOLS_HTTP_ENDPOINT: 'localhost:4318', + OTEL_EXPORTER_OTLP_ENDPOINT: 'http://localhost:4318', + OTEL_EXPORTER_OTLP_PROTOCOL: 'http/protobuf', + OTEL_SERVICE_NAME: javaFunctionName, + }, + logGroup: createLogGroup(this, javaFunctionName) + }); + javaFunction.addToRolePolicy(defaultDatadogSecretPolicy); + javaFunction.addLayers(extensionLayer); + + // .NET Lambda + const dotnetFunctionName = `${id}-dotnet-lambda`; + const dotnetFunction = new lambda.Function(this, dotnetFunctionName, { + runtime: lambda.Runtime.DOTNET_8, + architecture: lambda.Architecture.ARM_64, + handler: 'Function::Function.Handler::FunctionHandler', + code: lambda.Code.fromAsset('./lambda/otlp-dotnet/bin/function.zip'), + functionName: dotnetFunctionName, + timeout: cdk.Duration.seconds(30), + memorySize: 512, + environment: { + ...defaultDatadogEnvVariables, + DD_SERVICE: dotnetFunctionName, + DD_OTLP_CONFIG_RECEIVER_PROTOCOLS_HTTP_ENDPOINT: 'localhost:4318', + OTEL_EXPORTER_OTLP_ENDPOINT: 'http://localhost:4318', + OTEL_EXPORTER_OTLP_PROTOCOL: 'http/protobuf', + OTEL_SERVICE_NAME: dotnetFunctionName, + }, + logGroup: createLogGroup(this, dotnetFunctionName) + }); + dotnetFunction.addToRolePolicy(defaultDatadogSecretPolicy); + dotnetFunction.addLayers(extensionLayer); + } +} diff --git a/integration-tests/scripts/build-dotnet.sh b/integration-tests/scripts/build-dotnet.sh new file mode 100755 index 000000000..e100f0a68 --- /dev/null +++ b/integration-tests/scripts/build-dotnet.sh @@ -0,0 +1,132 @@ +#!/bin/bash +set -e + +# Reusable script to build .NET Lambda functions +# Usage: +# ./build-dotnet.sh # Build all .NET Lambda functions +# ./build-dotnet.sh # Build specific Lambda function +# Example: ./build-dotnet.sh lambda/otlp-dotnet + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +LAMBDA_BASE_DIR="$SCRIPT_DIR/../lambda" + +# Function to build a single .NET Lambda +build_dotnet_lambda() { + local LAMBDA_DIR="$1" + local FUNCTION_NAME=$(basename "$LAMBDA_DIR") + + if [ ! -d "$LAMBDA_DIR" ]; then + echo "Error: Directory not found: $LAMBDA_DIR" + return 1 + fi + + # Check for .csproj file + if ! ls "$LAMBDA_DIR"/*.csproj 1> /dev/null 2>&1; then + echo "Error: No .csproj file found in $LAMBDA_DIR" + echo "This script is for .NET projects only" + return 1 + fi + + echo "Building .NET Lambda: $FUNCTION_NAME" + + # Check if Docker is available + if ! command -v docker &> /dev/null; then + echo "Error: Docker is not installed or not in PATH" + echo "Please install Docker: https://docs.docker.com/get-docker/" + return 1 + fi + + # Clean previous build (idempotent) + rm -rf "$LAMBDA_DIR/bin" "$LAMBDA_DIR/obj" + + # Determine NuGet cache directory + if [ -n "$CI" ]; then + # In CI: use local cache directory + NUGET_CACHE_DIR="$SCRIPT_DIR/../.cache/nuget" + mkdir -p "$NUGET_CACHE_DIR" + else + # Local development: use host's NuGet cache + NUGET_CACHE_DIR="$HOME/.nuget" + fi + + # Build and package with Docker using ARM64 platform + # Mount NuGet cache for faster package downloads + docker run --rm --platform linux/arm64 \ + -v "$LAMBDA_DIR":/workspace \ + -v "$NUGET_CACHE_DIR":/root/.nuget \ + -w /workspace \ + mcr.microsoft.com/dotnet/sdk:8.0-alpine \ + sh -c "apk add --no-cache zip && \ + dotnet tool install -g Amazon.Lambda.Tools || true && \ + export PATH=\"\$PATH:/root/.dotnet/tools\" && \ + dotnet lambda package -o bin/function.zip --function-architecture arm64" + + if [ -f "$LAMBDA_DIR/bin/function.zip" ]; then + echo "✓ Build complete: $LAMBDA_DIR/bin/function.zip" + ls -lh "$LAMBDA_DIR/bin/function.zip" + return 0 + else + echo "✗ Build failed: bin/function.zip not found" + return 1 + fi +} + +# Main logic: build all or build one +if [ -z "$1" ]; then + # No argument: build all .NET Lambda functions + echo "==========================================" + echo "Building all .NET Lambda functions" + echo "==========================================" + echo "" + + FOUND_DOTNET=0 + FAILED_BUILDS=() + + for LAMBDA_PATH in "$LAMBDA_BASE_DIR"/*; do + if [ ! -d "$LAMBDA_PATH" ]; then + continue + fi + + FUNCTION_NAME=$(basename "$LAMBDA_PATH") + + # Check if this is a .NET function (contains "dotnet" in name) + if [[ "$FUNCTION_NAME" == *"dotnet"* ]]; then + FOUND_DOTNET=1 + echo "----------------------------------------" + if build_dotnet_lambda "$LAMBDA_PATH"; then + echo "✓ $FUNCTION_NAME succeeded" + else + echo "✗ $FUNCTION_NAME failed" + FAILED_BUILDS+=("$FUNCTION_NAME") + fi + echo "" + fi + done + + if [ $FOUND_DOTNET -eq 0 ]; then + echo "No .NET Lambda functions found (looking for directories with 'dotnet' in name)" + exit 1 + fi + + # Summary + if [ ${#FAILED_BUILDS[@]} -eq 0 ]; then + echo "✓ All .NET Lambda builds completed successfully!" + exit 0 + else + echo "✗ ${#FAILED_BUILDS[@]} .NET Lambda build(s) failed:" + for failed in "${FAILED_BUILDS[@]}"; do + echo " - $failed" + done + exit 1 + fi +else + # Argument provided: build specific Lambda function + LAMBDA_DIR="$1" + + # Convert to absolute path if relative + if [[ "$LAMBDA_DIR" != /* ]]; then + LAMBDA_DIR="$(cd "$SCRIPT_DIR/.." && pwd)/$LAMBDA_DIR" + fi + + build_dotnet_lambda "$LAMBDA_DIR" +fi diff --git a/integration-tests/scripts/build-java.sh b/integration-tests/scripts/build-java.sh new file mode 100755 index 000000000..acd723726 --- /dev/null +++ b/integration-tests/scripts/build-java.sh @@ -0,0 +1,131 @@ +#!/bin/bash +set -e + +# Reusable script to build Java Lambda functions with Maven +# Usage: +# ./build-java.sh # Build all Java Lambda functions +# ./build-java.sh # Build specific Lambda function +# Example: ./build-java.sh lambda/otlp-java + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +LAMBDA_BASE_DIR="$SCRIPT_DIR/../lambda" + +# Function to build a single Java Lambda +build_java_lambda() { + local LAMBDA_DIR="$1" + local FUNCTION_NAME=$(basename "$LAMBDA_DIR") + + if [ ! -d "$LAMBDA_DIR" ]; then + echo "Error: Directory not found: $LAMBDA_DIR" + return 1 + fi + + if [ ! -f "$LAMBDA_DIR/pom.xml" ]; then + echo "Error: pom.xml not found in $LAMBDA_DIR" + echo "This script is for Java Maven projects only" + return 1 + fi + + echo "Building Java Lambda: $FUNCTION_NAME" + + # Check if Docker is available + if ! command -v docker &> /dev/null; then + echo "Error: Docker is not installed or not in PATH" + echo "Please install Docker: https://docs.docker.com/get-docker/" + return 1 + fi + + # Clean previous build (idempotent) + rm -rf "$LAMBDA_DIR/target" + + # Determine Maven cache directory + if [ -n "$CI" ]; then + # In CI: use local cache directory + MAVEN_CACHE_DIR="$SCRIPT_DIR/../.cache/maven" + mkdir -p "$MAVEN_CACHE_DIR" + else + # Local development: use host's Maven cache + MAVEN_CACHE_DIR="$HOME/.m2" + fi + + # Determine Maven image (use AWS ECR Public to avoid Docker Hub rate limits) + MAVEN_IMAGE="public.ecr.aws/docker/library/maven:3.9-eclipse-temurin-21-alpine" + + # Build with Docker using ARM64 platform + # Mount Maven cache for faster dependency downloads + docker run --rm --platform linux/arm64 \ + -v "$LAMBDA_DIR":/workspace \ + -v "$MAVEN_CACHE_DIR":/root/.m2 \ + -w /workspace \ + "$MAVEN_IMAGE" \ + mvn clean package + + if [ -f "$LAMBDA_DIR/target/function.jar" ]; then + echo "✓ Build complete: $LAMBDA_DIR/target/function.jar" + ls -lh "$LAMBDA_DIR/target/function.jar" + return 0 + else + echo "✗ Build failed: target/function.jar not found" + return 1 + fi +} + +# Main logic: build all or build one +if [ -z "$1" ]; then + # No argument: build all Java Lambda functions + echo "==========================================" + echo "Building all Java Lambda functions" + echo "==========================================" + echo "" + + FOUND_JAVA=0 + FAILED_BUILDS=() + + for LAMBDA_PATH in "$LAMBDA_BASE_DIR"/*; do + if [ ! -d "$LAMBDA_PATH" ]; then + continue + fi + + FUNCTION_NAME=$(basename "$LAMBDA_PATH") + + # Check if this is a Java function (contains "java" in name) + if [[ "$FUNCTION_NAME" == *"java"* ]]; then + FOUND_JAVA=1 + echo "----------------------------------------" + if build_java_lambda "$LAMBDA_PATH"; then + echo "✓ $FUNCTION_NAME succeeded" + else + echo "✗ $FUNCTION_NAME failed" + FAILED_BUILDS+=("$FUNCTION_NAME") + fi + echo "" + fi + done + + if [ $FOUND_JAVA -eq 0 ]; then + echo "No Java Lambda functions found (looking for directories with 'java' in name)" + exit 1 + fi + + # Summary + if [ ${#FAILED_BUILDS[@]} -eq 0 ]; then + echo "✓ All Java Lambda builds completed successfully!" + exit 0 + else + echo "✗ ${#FAILED_BUILDS[@]} Java Lambda build(s) failed:" + for failed in "${FAILED_BUILDS[@]}"; do + echo " - $failed" + done + exit 1 + fi +else + # Argument provided: build specific Lambda function + LAMBDA_DIR="$1" + + # Convert to absolute path if relative + if [[ "$LAMBDA_DIR" != /* ]]; then + LAMBDA_DIR="$(cd "$SCRIPT_DIR/.." && pwd)/$LAMBDA_DIR" + fi + + build_java_lambda "$LAMBDA_DIR" +fi diff --git a/integration-tests/scripts/build-node.sh b/integration-tests/scripts/build-node.sh new file mode 100755 index 000000000..72dc13145 --- /dev/null +++ b/integration-tests/scripts/build-node.sh @@ -0,0 +1,156 @@ +#!/bin/bash +set -e + +# Reusable script to build Node.js Lambda functions +# Usage: +# ./build-node.sh # Build all Node.js Lambda functions +# ./build-node.sh # Build specific Lambda function +# Example: ./build-node.sh lambda/otlp-node + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +LAMBDA_BASE_DIR="$SCRIPT_DIR/../lambda" + +# Function to build a single Node.js Lambda +build_node_lambda() { + local LAMBDA_DIR="$1" + local FUNCTION_NAME=$(basename "$LAMBDA_DIR") + + if [ ! -d "$LAMBDA_DIR" ]; then + echo "Error: Directory not found: $LAMBDA_DIR" + return 1 + fi + + echo "Building Node.js Lambda: $FUNCTION_NAME" + + # Check if package.json exists + if [ ! -f "$LAMBDA_DIR/package.json" ]; then + echo "ℹ No package.json found - skipping build (no dependencies)" + return 0 + fi + + # Check if package.json has dependencies + if ! grep -q '"dependencies"' "$LAMBDA_DIR/package.json"; then + echo "ℹ No dependencies in package.json - skipping build" + return 0 + fi + + # Check if dependencies object is empty + if grep -A1 '"dependencies"' "$LAMBDA_DIR/package.json" | grep -q '{}'; then + echo "ℹ Empty dependencies in package.json - skipping build" + return 0 + fi + + echo "Found dependencies in package.json - installing modules" + + # Check if Docker is available + if ! command -v docker &> /dev/null; then + echo "Error: Docker is not installed or not in PATH" + echo "Please install Docker: https://docs.docker.com/get-docker/" + return 1 + fi + + # Clean previous build (idempotent) + rm -rf "$LAMBDA_DIR/node_modules" + + # Determine npm cache directory + if [ -n "$CI" ]; then + # In CI: use local cache directory + NPM_CACHE_DIR="$SCRIPT_DIR/../.cache/npm" + mkdir -p "$NPM_CACHE_DIR" + else + # Local development: use host's npm cache + NPM_CACHE_DIR="$HOME/.npm" + fi + + # Determine Node image (use AWS ECR Public to avoid Docker Hub rate limits) + NODE_IMAGE="public.ecr.aws/docker/library/node:20-alpine" + + # Install dependencies with Docker using ARM64 platform + # Mount npm cache for faster package downloads + docker run --rm --platform linux/arm64 \ + -v "$LAMBDA_DIR":/workspace \ + -v "$NPM_CACHE_DIR":/root/.npm \ + -w /workspace \ + "$NODE_IMAGE" \ + npm ci --production + + if [ -d "$LAMBDA_DIR/node_modules" ] && [ "$(ls -A "$LAMBDA_DIR/node_modules")" ]; then + echo "✓ Build complete: $LAMBDA_DIR/node_modules/" + echo "Installed packages:" + ls -d "$LAMBDA_DIR/node_modules"/*/ | head -10 + return 0 + else + echo "✗ Build failed: node_modules/ directory is empty" + return 1 + fi +} + +# Main logic: build all or build one +if [ -z "$1" ]; then + # No argument: build all Node.js Lambda functions + echo "==========================================" + echo "Building all Node.js Lambda functions" + echo "==========================================" + echo "" + + FOUND_NODE=0 + BUILT_COUNT=0 + SKIPPED_COUNT=0 + FAILED_BUILDS=() + + for LAMBDA_PATH in "$LAMBDA_BASE_DIR"/*; do + if [ ! -d "$LAMBDA_PATH" ]; then + continue + fi + + FUNCTION_NAME=$(basename "$LAMBDA_PATH") + + # Check if this is a Node.js function (contains "node" in name) + if [[ "$FUNCTION_NAME" == *"node"* ]]; then + FOUND_NODE=1 + echo "----------------------------------------" + if build_node_lambda "$LAMBDA_PATH"; then + # Check if it was actually built or skipped + if [ -d "$LAMBDA_PATH/node_modules" ]; then + echo "✓ $FUNCTION_NAME built successfully" + BUILT_COUNT=$((BUILT_COUNT + 1)) + else + echo "ℹ $FUNCTION_NAME skipped (no dependencies)" + SKIPPED_COUNT=$((SKIPPED_COUNT + 1)) + fi + else + echo "✗ $FUNCTION_NAME failed" + FAILED_BUILDS+=("$FUNCTION_NAME") + fi + echo "" + fi + done + + if [ $FOUND_NODE -eq 0 ]; then + echo "No Node.js Lambda functions found (looking for directories with 'node' in name)" + exit 1 + fi + + # Summary + echo "Built: $BUILT_COUNT, Skipped: $SKIPPED_COUNT" + if [ ${#FAILED_BUILDS[@]} -eq 0 ]; then + echo "✓ All Node.js Lambda builds completed successfully!" + exit 0 + else + echo "✗ ${#FAILED_BUILDS[@]} Node.js Lambda build(s) failed:" + for failed in "${FAILED_BUILDS[@]}"; do + echo " - $failed" + done + exit 1 + fi +else + # Argument provided: build specific Lambda function + LAMBDA_DIR="$1" + + # Convert to absolute path if relative + if [[ "$LAMBDA_DIR" != /* ]]; then + LAMBDA_DIR="$(cd "$SCRIPT_DIR/.." && pwd)/$LAMBDA_DIR" + fi + + build_node_lambda "$LAMBDA_DIR" +fi diff --git a/integration-tests/scripts/build-python.sh b/integration-tests/scripts/build-python.sh new file mode 100755 index 000000000..83d4b9530 --- /dev/null +++ b/integration-tests/scripts/build-python.sh @@ -0,0 +1,152 @@ +#!/bin/bash +set -e + +# Reusable script to build Python Lambda functions +# Usage: +# ./build-python.sh # Build all Python Lambda functions +# ./build-python.sh # Build specific Lambda function +# Example: ./build-python.sh lambda/otlp-python + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +LAMBDA_BASE_DIR="$SCRIPT_DIR/../lambda" + +# Function to build a single Python Lambda +build_python_lambda() { + local LAMBDA_DIR="$1" + local FUNCTION_NAME=$(basename "$LAMBDA_DIR") + + if [ ! -d "$LAMBDA_DIR" ]; then + echo "Error: Directory not found: $LAMBDA_DIR" + return 1 + fi + + echo "Building Python Lambda: $FUNCTION_NAME" + + # Check if requirements.txt exists and has actual dependencies + if [ ! -f "$LAMBDA_DIR/requirements.txt" ]; then + echo "ℹ No requirements.txt found - skipping build (no dependencies)" + return 0 + fi + + # Check if requirements.txt has any non-comment, non-empty lines + if ! grep -v '^#' "$LAMBDA_DIR/requirements.txt" | grep -v '^$' | grep -q .; then + echo "ℹ requirements.txt is empty or has only comments - skipping build" + return 0 + fi + + echo "Found dependencies in requirements.txt - building package" + + # Check if Docker is available + if ! command -v docker &> /dev/null; then + echo "Error: Docker is not installed or not in PATH" + echo "Please install Docker: https://docs.docker.com/get-docker/" + return 1 + fi + + # Clean previous build (idempotent) + rm -rf "$LAMBDA_DIR/package" + mkdir -p "$LAMBDA_DIR/package" + + # Determine pip cache directory + if [ -n "$CI" ]; then + # In CI: use local cache directory + PIP_CACHE_DIR="$SCRIPT_DIR/../.cache/pip" + mkdir -p "$PIP_CACHE_DIR" + else + # Local development: use host's pip cache + PIP_CACHE_DIR="$HOME/.cache/pip" + fi + + # Install dependencies with Docker using ARM64 platform + # Use the same image that CDK would use for consistency + # Mount pip cache for faster package downloads + docker run --rm --platform linux/arm64 \ + -v "$LAMBDA_DIR":/workspace \ + -v "$PIP_CACHE_DIR":/root/.cache/pip \ + -w /workspace \ + public.ecr.aws/sam/build-python3.12 \ + pip install -r requirements.txt -t package/ + + # Copy source files to package directory + cp -r "$LAMBDA_DIR"/*.py "$LAMBDA_DIR/package/" 2>/dev/null || true + + if [ -d "$LAMBDA_DIR/package" ] && [ "$(ls -A "$LAMBDA_DIR/package")" ]; then + echo "✓ Build complete: $LAMBDA_DIR/package/" + echo "Package contents:" + ls -lh "$LAMBDA_DIR/package" | head -10 + return 0 + else + echo "✗ Build failed: package/ directory is empty" + return 1 + fi +} + +# Main logic: build all or build one +if [ -z "$1" ]; then + # No argument: build all Python Lambda functions + echo "==========================================" + echo "Building all Python Lambda functions" + echo "==========================================" + echo "" + + FOUND_PYTHON=0 + BUILT_COUNT=0 + SKIPPED_COUNT=0 + FAILED_BUILDS=() + + for LAMBDA_PATH in "$LAMBDA_BASE_DIR"/*; do + if [ ! -d "$LAMBDA_PATH" ]; then + continue + fi + + FUNCTION_NAME=$(basename "$LAMBDA_PATH") + + # Check if this is a Python function (contains "python" in name) + if [[ "$FUNCTION_NAME" == *"python"* ]]; then + FOUND_PYTHON=1 + echo "----------------------------------------" + if build_python_lambda "$LAMBDA_PATH"; then + # Check if it was actually built or skipped + if [ -d "$LAMBDA_PATH/package" ]; then + echo "✓ $FUNCTION_NAME built successfully" + BUILT_COUNT=$((BUILT_COUNT + 1)) + else + echo "ℹ $FUNCTION_NAME skipped (no dependencies)" + SKIPPED_COUNT=$((SKIPPED_COUNT + 1)) + fi + else + echo "✗ $FUNCTION_NAME failed" + FAILED_BUILDS+=("$FUNCTION_NAME") + fi + echo "" + fi + done + + if [ $FOUND_PYTHON -eq 0 ]; then + echo "No Python Lambda functions found (looking for directories with 'python' in name)" + exit 1 + fi + + # Summary + echo "Built: $BUILT_COUNT, Skipped: $SKIPPED_COUNT" + if [ ${#FAILED_BUILDS[@]} -eq 0 ]; then + echo "✓ All Python Lambda builds completed successfully!" + exit 0 + else + echo "✗ ${#FAILED_BUILDS[@]} Python Lambda build(s) failed:" + for failed in "${FAILED_BUILDS[@]}"; do + echo " - $failed" + done + exit 1 + fi +else + # Argument provided: build specific Lambda function + LAMBDA_DIR="$1" + + # Convert to absolute path if relative + if [[ "$LAMBDA_DIR" != /* ]]; then + LAMBDA_DIR="$(cd "$SCRIPT_DIR/.." && pwd)/$LAMBDA_DIR" + fi + + build_python_lambda "$LAMBDA_DIR" +fi diff --git a/integration-tests/scripts/local_deploy.sh b/integration-tests/scripts/local_deploy.sh index 1bd01cb0f..3c4d45891 100755 --- a/integration-tests/scripts/local_deploy.sh +++ b/integration-tests/scripts/local_deploy.sh @@ -33,6 +33,36 @@ echo "Using extension layer: $EXTENSION_LAYER_ARN" FULL_STACK_NAME="integ-$IDENTIFIER-$STACK_NAME" echo "Deploying stack: $FULL_STACK_NAME" -# Build and deploy +# Get the directory of this script +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +# Build Lambda functions based on stack name +echo "" +echo "Building Lambda functions for $STACK_NAME..." +case "$STACK_NAME" in + *java*) + "$SCRIPT_DIR/build-java.sh" lambda/base-java + "$SCRIPT_DIR/build-java.sh" lambda/otlp-java + ;; + *dotnet*) + "$SCRIPT_DIR/build-dotnet.sh" lambda/base-dotnet + "$SCRIPT_DIR/build-dotnet.sh" lambda/otlp-dotnet + ;; + *python*) + "$SCRIPT_DIR/build-python.sh" lambda/base-python + "$SCRIPT_DIR/build-python.sh" lambda/otlp-python + ;; + *node*) + "$SCRIPT_DIR/build-node.sh" lambda/base-node + "$SCRIPT_DIR/build-node.sh" lambda/otlp-node + ;; + *) + echo "Warning: Unknown stack type, skipping Lambda build" + ;; +esac + +echo "" +echo "Building CDK TypeScript and deploying..." +# Build CDK TypeScript and deploy npm run build && aws-vault exec sso-serverless-sandbox-account-admin -- cdk deploy "$FULL_STACK_NAME" --require-approval never diff --git a/integration-tests/tests/base-dotnet.test.ts b/integration-tests/tests/base-dotnet.test.ts deleted file mode 100644 index 9791c8d45..000000000 --- a/integration-tests/tests/base-dotnet.test.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { invokeLambdaAndGetDatadogData, LambdaInvocationDatadogData } from './utils/util'; -import { getIdentifier } from './utils/config'; - -describe('Base .NET Lambda Integration Test', () => { - const DOTNET_FUNCTION_NAME = `integ-${getIdentifier()}-base-dotnet-lambda`; - let result: LambdaInvocationDatadogData; - - beforeAll(async () => { - console.log(`Invoking Lambda function: ${DOTNET_FUNCTION_NAME}`); - result = await invokeLambdaAndGetDatadogData(DOTNET_FUNCTION_NAME, {}, true); - }, 700000); // 11.6 minute timeout - - it('should invoke .NET Lambda successfully', () => { - expect(result.statusCode).toBe(200); - }); - - it('should have "Hello world!" log message', () => { - const helloWorldLog = result.logs?.find((log: any) => - log.message.includes('Hello world!') - ); - expect(helloWorldLog).toBeDefined(); - }); - - it('should send one trace to Datadog', () => { - expect(result.traces?.length).toEqual(1); - }); - - it('should have aws.lambda span with correct properties', () => { - const trace = result.traces![0]; - const awsLambdaSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda'); - expect(awsLambdaSpan).toBeDefined(); - expect(awsLambdaSpan).toMatchObject({ - attributes: { - operation_name: 'aws.lambda', - custom: { - cold_start: 'true' - } - } - }); - }); - - it('should have aws.lambda.cold_start span', () => { - const trace = result.traces![0]; - const awsLambdaColdStartSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.cold_start'); - expect(awsLambdaColdStartSpan).toBeDefined(); - expect(awsLambdaColdStartSpan).toMatchObject({ - attributes: { - operation_name: 'aws.lambda.cold_start', - } - }); - }); -}); diff --git a/integration-tests/tests/base-java.test.ts b/integration-tests/tests/base-java.test.ts deleted file mode 100644 index bf3bdbaa6..000000000 --- a/integration-tests/tests/base-java.test.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { invokeLambdaAndGetDatadogData, LambdaInvocationDatadogData } from './utils/util'; -import { getIdentifier } from './utils/config'; - -describe('Base Java Lambda Integration Test', () => { - const JAVA_FUNCTION_NAME = `integ-${getIdentifier()}-base-java-lambda`; - let result: LambdaInvocationDatadogData; - - beforeAll(async () => { - console.log(`Invoking Lambda function: ${JAVA_FUNCTION_NAME}`); - result = await invokeLambdaAndGetDatadogData(JAVA_FUNCTION_NAME, {}, true); - }, 700000); // 11.6 minute timeout - - it('should invoke Java Lambda successfully', () => { - expect(result.statusCode).toBe(200); - }); - - it('should have "Hello world!" log message', () => { - const helloWorldLog = result.logs?.find((log: any) => - log.message.includes('Hello world!') - ); - expect(helloWorldLog).toBeDefined(); - }); - - it('should send one trace to Datadog', () => { - expect(result.traces?.length).toEqual(1); - }); - - it('should have aws.lambda span with correct properties', () => { - const trace = result.traces![0]; - const awsLambdaSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda'); - expect(awsLambdaSpan).toBeDefined(); - expect(awsLambdaSpan).toMatchObject({ - attributes: { - operation_name: 'aws.lambda', - custom: { - cold_start: 'true' - } - } - }); - }); - - it('should have aws.lambda.cold_start span', () => { - const trace = result.traces![0]; - const awsLambdaColdStartSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.cold_start'); - expect(awsLambdaColdStartSpan).toBeDefined(); - expect(awsLambdaColdStartSpan).toMatchObject({ - attributes: { - operation_name: 'aws.lambda.cold_start', - } - }); - }); -}); diff --git a/integration-tests/tests/base-node.test.ts b/integration-tests/tests/base-node.test.ts deleted file mode 100644 index 441bb97ae..000000000 --- a/integration-tests/tests/base-node.test.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { invokeLambdaAndGetDatadogData, LambdaInvocationDatadogData } from './utils/util'; -import { getIdentifier } from './utils/config'; - -describe('Base Node Lambda Integration Test', () => { - const NODE_FUNCTION_NAME = `integ-${getIdentifier()}-base-node-lambda`; - let result: LambdaInvocationDatadogData; - - beforeAll(async () => { - console.log(`Invoking Lambda function: ${NODE_FUNCTION_NAME}`); - result = await invokeLambdaAndGetDatadogData(NODE_FUNCTION_NAME, {}, true); - }, 700000); // 11.6 minute timeout - - it('should invoke Node.js Lambda successfully', () => { - expect(result.statusCode).toBe(200); - }); - - it('should have "Hello world!" log message', () => { - const helloWorldLog = result.logs?.find((log: any) => - log.message.includes('Hello world!') - ); - expect(helloWorldLog).toBeDefined(); - }); - - it('should send one trace to Datadog', () => { - expect(result.traces?.length).toEqual(1); - }); - - it('should have aws.lambda span with correct properties', () => { - const trace = result.traces![0]; - const awsLambdaSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda'); - expect(awsLambdaSpan).toBeDefined(); - expect(awsLambdaSpan).toMatchObject({ - attributes: { - operation_name: 'aws.lambda', - custom: { - cold_start: 'true' - } - } - }); - }); - - it('should have aws.lambda.cold_start span', () => { - const trace = result.traces![0]; - const awsLambdaColdStartSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.cold_start'); - expect(awsLambdaColdStartSpan).toBeDefined(); - expect(awsLambdaColdStartSpan).toMatchObject({ - attributes: { - operation_name: 'aws.lambda.cold_start', - } - }); - }); - - it('should have aws.lambda.load span', () => { - const trace = result.traces![0]; - const awsLambdaLoadSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.load'); - expect(awsLambdaLoadSpan).toBeDefined(); - expect(awsLambdaLoadSpan).toMatchObject({ - attributes: { - operation_name: 'aws.lambda.load', - } - }); - }); -}); diff --git a/integration-tests/tests/base-python.test.ts b/integration-tests/tests/base-python.test.ts deleted file mode 100644 index 57b2870d2..000000000 --- a/integration-tests/tests/base-python.test.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { invokeLambdaAndGetDatadogData, LambdaInvocationDatadogData } from './utils/util'; -import { getIdentifier } from './utils/config'; - -describe('Base Python Lambda Integration Test', () => { - const PYTHON_FUNCTION_NAME = `integ-${getIdentifier()}-base-python-lambda`; - let result: LambdaInvocationDatadogData; - - beforeAll(async () => { - console.log(`Invoking Lambda function: ${PYTHON_FUNCTION_NAME}`); - result = await invokeLambdaAndGetDatadogData(PYTHON_FUNCTION_NAME, {}, true); - }, 700000); // 11.6 minute timeout - - it('should invoke Python Lambda successfully', () => { - expect(result.statusCode).toBe(200); - }); - - it('should have "Hello world!" log message', () => { - const helloWorldLog = result.logs?.find((log: any) => - log.message.includes('Hello world!') - ); - expect(helloWorldLog).toBeDefined(); - }); - - it('should send one trace to Datadog', () => { - expect(result.traces?.length).toEqual(1); - }); - - it('should have aws.lambda span with correct properties', () => { - const trace = result.traces![0]; - const awsLambdaSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda'); - expect(awsLambdaSpan).toBeDefined(); - expect(awsLambdaSpan).toMatchObject({ - attributes: { - operation_name: 'aws.lambda', - custom: { - cold_start: 'true' - } - } - }); - }); - - // TODO: These spans are being created but not with the same traceId as the 'aws.lambda' span - // Need to investigate why this is happening and fix it. - it.failing('[failing] should have aws.lambda.cold_start span', () => { - const trace = result.traces![0]; - const awsLambdaColdStartSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.cold_start'); - expect(awsLambdaColdStartSpan).toBeDefined(); - expect(awsLambdaColdStartSpan).toMatchObject({ - attributes: { - operation_name: 'aws.lambda.cold_start', - } - }); - }); - - // TODO: These spans are being created but not with the same traceId as the 'aws.lambda' span - // Need to investigate why this is happening and fix it. - it.failing('[failing] should have aws.lambda.load span', () => { - const trace = result.traces![0]; - const awsLambdaLoadSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.load'); - expect(awsLambdaLoadSpan).toBeDefined(); - expect(awsLambdaLoadSpan).toMatchObject({ - attributes: { - operation_name: 'aws.lambda.load', - } - }); - }); -}); diff --git a/integration-tests/tests/base.test.ts b/integration-tests/tests/base.test.ts new file mode 100644 index 000000000..1cca995b1 --- /dev/null +++ b/integration-tests/tests/base.test.ts @@ -0,0 +1,229 @@ +import { invokeLambdaAndGetDatadogData, LambdaInvocationDatadogData } from './utils/util'; +import { getIdentifier } from './utils/config'; + +describe('Base Integration Tests', () => { + const results: Record = {}; + + beforeAll(async () => { + const identifier = getIdentifier(); + const functions = { + node: `integ-${identifier}-base-node-lambda`, + python: `integ-${identifier}-base-python-lambda`, + java: `integ-${identifier}-base-java-lambda`, + dotnet: `integ-${identifier}-base-dotnet-lambda`, + }; + + console.log('Invoking all base Lambda functions in parallel...'); + + // Invoke all Lambdas in parallel + const invocationResults = await Promise.all([ + invokeLambdaAndGetDatadogData(functions.node, {}, true), + invokeLambdaAndGetDatadogData(functions.python, {}, true), + invokeLambdaAndGetDatadogData(functions.java, {}, true), + invokeLambdaAndGetDatadogData(functions.dotnet, {}, true), + ]); + + // Store results + results.node = invocationResults[0]; + results.python = invocationResults[1]; + results.java = invocationResults[2]; + results.dotnet = invocationResults[3]; + + console.log('All base Lambda invocations and data fetching completed'); + }, 700000); // 11.6 minute timeout + + describe('Node.js Runtime', () => { + it('should invoke Node.js Lambda successfully', () => { + expect(results.node.statusCode).toBe(200); + }); + + it('should have "Hello world!" log message', () => { + const helloWorldLog = results.node.logs?.find((log: any) => + log.message.includes('Hello world!') + ); + expect(helloWorldLog).toBeDefined(); + }); + + it('should send one trace to Datadog', () => { + expect(results.node.traces?.length).toEqual(1); + }); + + it('should have aws.lambda span with correct properties', () => { + const trace = results.node.traces![0]; + const awsLambdaSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda'); + expect(awsLambdaSpan).toBeDefined(); + expect(awsLambdaSpan).toMatchObject({ + attributes: { + operation_name: 'aws.lambda', + custom: { + cold_start: 'true' + } + } + }); + }); + + it('should have aws.lambda.cold_start span', () => { + const trace = results.node.traces![0]; + const awsLambdaColdStartSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.cold_start'); + expect(awsLambdaColdStartSpan).toBeDefined(); + expect(awsLambdaColdStartSpan).toMatchObject({ + attributes: { + operation_name: 'aws.lambda.cold_start', + } + }); + }); + + it('should have aws.lambda.load span', () => { + const trace = results.node.traces![0]; + const awsLambdaLoadSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.load'); + expect(awsLambdaLoadSpan).toBeDefined(); + expect(awsLambdaLoadSpan).toMatchObject({ + attributes: { + operation_name: 'aws.lambda.load', + } + }); + }); + }); + + describe('Python Runtime', () => { + it('should invoke Python Lambda successfully', () => { + expect(results.python.statusCode).toBe(200); + }); + + it('should have "Hello world!" log message', () => { + const helloWorldLog = results.python.logs?.find((log: any) => + log.message.includes('Hello world!') + ); + expect(helloWorldLog).toBeDefined(); + }); + + it('should send one trace to Datadog', () => { + expect(results.python.traces?.length).toEqual(1); + }); + + it('should have aws.lambda span with correct properties', () => { + const trace = results.python.traces![0]; + const awsLambdaSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda'); + expect(awsLambdaSpan).toBeDefined(); + expect(awsLambdaSpan).toMatchObject({ + attributes: { + operation_name: 'aws.lambda', + custom: { + cold_start: 'true' + } + } + }); + }); + + // TODO: These spans are being created but not with the same traceId as the 'aws.lambda' span + // Need to investigate why this is happening and fix it. + it.failing('[failing] should have aws.lambda.cold_start span', () => { + const trace = results.python.traces![0]; + const awsLambdaColdStartSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.cold_start'); + expect(awsLambdaColdStartSpan).toBeDefined(); + expect(awsLambdaColdStartSpan).toMatchObject({ + attributes: { + operation_name: 'aws.lambda.cold_start', + } + }); + }); + + // TODO: These spans are being created but not with the same traceId as the 'aws.lambda' span + // Need to investigate why this is happening and fix it. + it.failing('[failing] should have aws.lambda.load span', () => { + const trace = results.python.traces![0]; + const awsLambdaLoadSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.load'); + expect(awsLambdaLoadSpan).toBeDefined(); + expect(awsLambdaLoadSpan).toMatchObject({ + attributes: { + operation_name: 'aws.lambda.load', + } + }); + }); + }); + + describe('Java Runtime', () => { + it('should invoke Java Lambda successfully', () => { + expect(results.java.statusCode).toBe(200); + }); + + it('should have "Hello world!" log message', () => { + const helloWorldLog = results.java.logs?.find((log: any) => + log.message.includes('Hello world!') + ); + expect(helloWorldLog).toBeDefined(); + }); + + it('should send one trace to Datadog', () => { + expect(results.java.traces?.length).toEqual(1); + }); + + it('should have aws.lambda span with correct properties', () => { + const trace = results.java.traces![0]; + const awsLambdaSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda'); + expect(awsLambdaSpan).toBeDefined(); + expect(awsLambdaSpan).toMatchObject({ + attributes: { + operation_name: 'aws.lambda', + custom: { + cold_start: 'true' + } + } + }); + }); + + it('should have aws.lambda.cold_start span', () => { + const trace = results.java.traces![0]; + const awsLambdaColdStartSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.cold_start'); + expect(awsLambdaColdStartSpan).toBeDefined(); + expect(awsLambdaColdStartSpan).toMatchObject({ + attributes: { + operation_name: 'aws.lambda.cold_start', + } + }); + }); + }); + + describe('.NET Runtime', () => { + it('should invoke .NET Lambda successfully', () => { + expect(results.dotnet.statusCode).toBe(200); + }); + + it('should have "Hello world!" log message', () => { + const helloWorldLog = results.dotnet.logs?.find((log: any) => + log.message.includes('Hello world!') + ); + expect(helloWorldLog).toBeDefined(); + }); + + it('should send one trace to Datadog', () => { + expect(results.dotnet.traces?.length).toEqual(1); + }); + + it('should have aws.lambda span with correct properties', () => { + const trace = results.dotnet.traces![0]; + const awsLambdaSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda'); + expect(awsLambdaSpan).toBeDefined(); + expect(awsLambdaSpan).toMatchObject({ + attributes: { + operation_name: 'aws.lambda', + custom: { + cold_start: 'true' + } + } + }); + }); + + it('should have aws.lambda.cold_start span', () => { + const trace = results.dotnet.traces![0]; + const awsLambdaColdStartSpan = trace.spans.find((span: any) => span.attributes.operation_name === 'aws.lambda.cold_start'); + expect(awsLambdaColdStartSpan).toBeDefined(); + expect(awsLambdaColdStartSpan).toMatchObject({ + attributes: { + operation_name: 'aws.lambda.cold_start', + } + }); + }); + + }); +}); diff --git a/integration-tests/tests/otlp.test.ts b/integration-tests/tests/otlp.test.ts new file mode 100644 index 000000000..f755cc9ce --- /dev/null +++ b/integration-tests/tests/otlp.test.ts @@ -0,0 +1,94 @@ +import { invokeLambdaAndGetDatadogData, LambdaInvocationDatadogData } from './utils/util'; +import { getIdentifier } from './utils/config'; + +describe('OTLP Integration Tests', () => { + const results: Record = {}; + + beforeAll(async () => { + const identifier = getIdentifier(); + const functions = { + node: `integ-${identifier}-otlp-node-lambda`, + python: `integ-${identifier}-otlp-python-lambda`, + java: `integ-${identifier}-otlp-java-lambda`, + dotnet: `integ-${identifier}-otlp-dotnet-lambda`, + }; + + console.log('Invoking all OTLP Lambda functions in parallel...'); + + // Invoke all Lambdas in parallel + const invocationResults = await Promise.all([ + invokeLambdaAndGetDatadogData(functions.node, {}, true), + invokeLambdaAndGetDatadogData(functions.python, {}, true), + invokeLambdaAndGetDatadogData(functions.java, {}, true), + invokeLambdaAndGetDatadogData(functions.dotnet, {}, true), + ]); + + // Store results + results.node = invocationResults[0]; + results.python = invocationResults[1]; + results.java = invocationResults[2]; + results.dotnet = invocationResults[3]; + + console.log('All OTLP Lambda invocations and data fetching completed'); + }, 700000); // 11.6 minute timeout + + describe('Node.js Runtime', () => { + it('should invoke Node.js Lambda successfully', () => { + expect(results.node.statusCode).toBe(200); + }); + + it('should send at least one trace to Datadog', () => { + expect(results.node.traces?.length).toBeGreaterThan(0); + }); + + it('should have spans in the trace', () => { + const trace = results.node.traces![0]; + expect(trace.spans.length).toBeGreaterThan(0); + }); + }); + + describe('Python Runtime', () => { + it('should invoke Python Lambda successfully', () => { + expect(results.python.statusCode).toBe(200); + }); + + it('should send at least one trace to Datadog', () => { + expect(results.python.traces?.length).toBeGreaterThan(0); + }); + + it('should have spans in the trace', () => { + const trace = results.python.traces![0]; + expect(trace.spans.length).toBeGreaterThan(0); + }); + }); + + describe('Java Runtime', () => { + it('should invoke Java Lambda successfully', () => { + expect(results.java.statusCode).toBe(200); + }); + + it('should send at least one trace to Datadog', () => { + expect(results.java.traces?.length).toBeGreaterThan(0); + }); + + it('should have spans in the trace', () => { + const trace = results.java.traces![0]; + expect(trace.spans.length).toBeGreaterThan(0); + }); + }); + + describe('.NET Runtime', () => { + it('should invoke .NET Lambda successfully', () => { + expect(results.dotnet.statusCode).toBe(200); + }); + + it('should send at least one trace to Datadog', () => { + expect(results.dotnet.traces?.length).toBeGreaterThan(0); + }); + + it('should have spans in the trace', () => { + const trace = results.dotnet.traces![0]; + expect(trace.spans.length).toBeGreaterThan(0); + }); + }); +}); diff --git a/integration-tests/tests/utils/datadog.ts b/integration-tests/tests/utils/datadog.ts index 224431188..9efd7ded0 100644 --- a/integration-tests/tests/utils/datadog.ts +++ b/integration-tests/tests/utils/datadog.ts @@ -117,7 +117,6 @@ export async function getTraces( const traceMap = new Map(); for (const spanData of allSpans) { - console.log('Span data:', JSON.stringify(spanData, null, 2)); const attrs = spanData.attributes || {}; const span: DatadogSpan = {