+
+## Testing your code
+
+You can test your event handlers by passing a mocked or actual AppSync Events Lambda event.
+
+### Testing publish events
+
+=== "Test Publish events"
+
+ ```csharp
+ [Fact]
+ public void Should_Return_Unchanged_Payload()
+ {
+ // Arrange
+ var lambdaContext = new TestLambdaContext();
+ var app = new AppSyncEventsResolver();
+
+ app.OnPublish("/default/channel", payload =>
+ {
+ // Handle channel events
+ return payload;
+ });
+
+ // Act
+ var result = app.Resolve(_appSyncEvent, lambdaContext);
+
+ // Assert
+ Assert.Equal("123", result.Events[0].Id);
+ Assert.Equal("test data", result.Events[0].Payload?["data"].ToString());
+ }
+ ```
+
+=== "Publish event json"
+
+ ```json
+ {
+ "identity":"None",
+ "result":"None",
+ "request":{
+ "headers": {
+ "x-forwarded-for": "1.1.1.1, 2.2.2.2",
+ "cloudfront-viewer-country": "US",
+ "cloudfront-is-tablet-viewer": "false",
+ "via": "2.0 xxxxxxxxxxxxxxxx.cloudfront.net (CloudFront)",
+ "cloudfront-forwarded-proto": "https",
+ "origin": "https://us-west-1.console.aws.amazon.com",
+ "content-length": "217",
+ "accept-language": "en-US,en;q=0.9",
+ "host": "xxxxxxxxxxxxxxxx.appsync-api.us-west-1.amazonaws.com",
+ "x-forwarded-proto": "https",
+ "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36",
+ "accept": "*/*",
+ "cloudfront-is-mobile-viewer": "false",
+ "cloudfront-is-smarttv-viewer": "false",
+ "accept-encoding": "gzip, deflate, br",
+ "referer": "https://us-west-1.console.aws.amazon.com/appsync/home?region=us-west-1",
+ "content-type": "application/json",
+ "sec-fetch-mode": "cors",
+ "x-amz-cf-id": "3aykhqlUwQeANU-HGY7E_guV5EkNeMMtwyOgiA==",
+ "x-amzn-trace-id": "Root=1-5f512f51-fac632066c5e848ae714",
+ "authorization": "eyJraWQiOiJScWFCSlJqYVJlM0hrSnBTUFpIcVRXazNOW...",
+ "sec-fetch-dest": "empty",
+ "x-amz-user-agent": "AWS-Console-AppSync/",
+ "cloudfront-is-desktop-viewer": "true",
+ "sec-fetch-site": "cross-site",
+ "x-forwarded-port": "443"
+ },
+ "domainName":"None"
+ },
+ "info":{
+ "channel":{
+ "path":"/default/channel",
+ "segments":[
+ "default",
+ "channel"
+ ]
+ },
+ "channelNamespace":{
+ "name":"default"
+ },
+ "operation":"PUBLISH"
+ },
+ "error":"None",
+ "prev":"None",
+ "stash":{
+
+ },
+ "outErrors":[
+
+ ],
+ "events":[
+ {
+ "payload":{
+ "data": "test data"
+ },
+ "id":"123"
+ }
+ ]
+ }
+ ```
+
+### Testing subscribe events
+
+=== "Test Subscribe with code payload mock"
+
+ ```csharp
+ [Fact]
+ public async Task Should_Authorize_Subscription()
+ {
+ // Arrange
+ var lambdaContext = new TestLambdaContext();
+ var app = new AppSyncEventsResolver();
+
+ app.OnSubscribeAsync("/default/*", async (info) => true);
+
+ var subscribeEvent = new AppSyncEventsRequest
+ {
+ Info = new Information
+ {
+ Channel = new Channel
+ {
+ Path = "/default/channel",
+ Segments = ["default", "channel"]
+ },
+ Operation = AppSyncEventsOperation.Subscribe,
+ ChannelNamespace = new ChannelNamespace { Name = "default" }
+ }
+ };
+ // Act
+ var result = await app.ResolveAsync(subscribeEvent, lambdaContext);
+
+ // Assert
+ Assert.Null(result);
+ }
+ ```
\ No newline at end of file
diff --git a/docs/core/event_handler/bedrock_agent_function.md b/docs/core/event_handler/bedrock_agent_function.md
new file mode 100644
index 000000000..e68843686
--- /dev/null
+++ b/docs/core/event_handler/bedrock_agent_function.md
@@ -0,0 +1,589 @@
+---
+title: Bedrock Agent Function Resolver
+description: Event Handler - Bedrock Agent Function Resolver
+---
+
+# AWS Lambda Powertools for .NET - Bedrock Agent Function Resolver
+
+## Overview
+
+The Bedrock Agent Function Resolver is a utility for AWS Lambda that simplifies building serverless applications working with Amazon Bedrock Agents. This library eliminates boilerplate code typically required when implementing Lambda functions that serve as action groups for Bedrock Agents.
+
+Amazon Bedrock Agents can invoke functions to perform tasks based on user input. This library provides an elegant way to register, manage, and execute these functions with minimal code, handling all the parameter extraction and response formatting automatically.
+
+Create [Amazon Bedrock Agents](https://docs.aws.amazon.com/bedrock/latest/userguide/agents.html#agents-how) and focus on building your agent's logic without worrying about parsing and routing requests.
+
+```mermaid
+flowchart LR
+ Bedrock[LLM] <-- uses --> Agent
+ You[User input] --> Agent
+ Agent[Bedrock Agent] <-- tool use --> Lambda
+ subgraph Agent[Bedrock Agent]
+ ToolDescriptions[Tool Definitions]
+ end
+ subgraph Lambda[Lambda Function]
+ direction TB
+ Parsing[Parameter Parsing] --> Routing
+ Routing --> Code[Your code]
+ Code --> ResponseBuilding[Response Building]
+ end
+ style You stroke:#0F0,stroke-width:2px
+```
+
+## Features
+
+* Easily expose tools for your Large Language Model (LLM) agents
+* Automatic routing based on tool name and function details
+* Graceful error handling and response formatting
+* Fully compatible with .NET 8 AOT compilation through source generation
+
+## Terminology
+
+**Event handler** is a Powertools for AWS feature that processes an event, runs data parsing and validation, routes the request to a specific function, and returns a response to the caller in the proper format.
+
+**Function details** consist of a list of parameters, defined by their name, data type, and whether they are required. The agent uses these configurations to determine what information it needs to elicit from the user.
+
+**Action group** is a collection of two resources where you define the actions that the agent should carry out: an OpenAPI schema to define the APIs that the agent can invoke to carry out its tasks, and a Lambda function to execute those actions.
+
+**Large Language Models (LLM)** are very large deep learning models that are pre-trained on vast amounts of data, capable of extracting meanings from a sequence of text and understanding the relationship between words and phrases on it.
+
+**Amazon Bedrock Agent** is an Amazon Bedrock feature to build and deploy conversational agents that can interact with your customers using Large Language Models (LLM) and AWS Lambda functions.
+
+
+## Installation
+
+Install the package via NuGet:
+
+```bash
+dotnet add package AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction
+```
+
+### Required resources
+
+You must create an Amazon Bedrock Agent with at least one action group. Each action group can contain up to 5 tools, which in turn need to match the ones defined in your Lambda function. Bedrock must have permission to invoke your Lambda function.
+
+??? note "Click to see example SAM template"
+ ```yaml
+ AWSTemplateFormatVersion: '2010-09-09'
+ Transform: AWS::Serverless-2016-10-31
+
+ Globals:
+ Function:
+ Timeout: 30
+ MemorySize: 256
+ Runtime: dotnet8
+
+ Resources:
+ HelloWorldFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: FunctionHandler
+ CodeUri: hello_world
+
+ AirlineAgentRole:
+ Type: AWS::IAM::Role
+ Properties:
+ RoleName: !Sub '${AWS::StackName}-AirlineAgentRole'
+ Description: 'Role for Bedrock Airline agent'
+ AssumeRolePolicyDocument:
+ Version: '2012-10-17'
+ Statement:
+ - Effect: Allow
+ Principal:
+ Service: bedrock.amazonaws.com
+ Action: sts:AssumeRole
+ Policies:
+ - PolicyName: bedrock
+ PolicyDocument:
+ Version: '2012-10-17'
+ Statement:
+ - Effect: Allow
+ Action: 'bedrock:*'
+ Resource:
+ - !Sub 'arn:aws:bedrock:us-*::foundation-model/*'
+ - !Sub 'arn:aws:bedrock:us-*:*:inference-profile/*'
+
+ BedrockAgentInvokePermission:
+ Type: AWS::Lambda::Permission
+ Properties:
+ FunctionName: !Ref HelloWorldFunction
+ Action: lambda:InvokeFunction
+ Principal: bedrock.amazonaws.com
+ SourceAccount: !Ref 'AWS::AccountId'
+ SourceArn: !Sub 'arn:aws:bedrock:${AWS::Region}:${AWS::AccountId}:agent/${AirlineAgent}'
+
+ # Bedrock Agent
+ AirlineAgent:
+ Type: AWS::Bedrock::Agent
+ Properties:
+ AgentName: AirlineAgent
+ Description: 'A simple Airline agent'
+ FoundationModel: !Sub 'arn:aws:bedrock:us-west-2:${AWS::AccountId}:inference-profile/us.amazon.nova-pro-v1:0'
+ Instruction: |
+ You are an airport traffic control agent. You will be given a city name and you will return the airport code for that city.
+ AgentResourceRoleArn: !GetAtt AirlineAgentRole.Arn
+ AutoPrepare: true
+ ActionGroups:
+ - ActionGroupName: AirlineActionGroup
+ ActionGroupExecutor:
+ Lambda: !GetAtt AirlineAgentFunction.Arn
+ FunctionSchema:
+ Functions:
+ - Name: getAirportCodeForCity
+ Description: 'Get the airport code for a given city'
+ Parameters:
+ city:
+ Type: string
+ Description: 'The name of the city to get the airport code for'
+ Required: true
+ ```
+
+## Basic Usage
+
+To create an agent, use the `BedrockAgentFunctionResolver` to register your tools and handle the requests. The resolver will automatically parse the request, route it to the appropriate function, and return a well-formed response that includes the tool's output and any existing session attributes.
+
+=== "Executable asembly"
+
+ ```csharp
+ using Amazon.Lambda.Core;
+ using Amazon.Lambda.RuntimeSupport;
+ using AWS.Lambda.Powertools.EventHandler.Resolvers;
+ using AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.Models;
+
+ var resolver = new BedrockAgentFunctionResolver();
+
+ resolver
+ .Tool("GetWeather", (string city) => $"The weather in {city} is sunny")
+ .Tool("CalculateSum", (int a, int b) => $"The sum of {a} and {b} is {a + b}")
+ .Tool("GetCurrentTime", () => $"The current time is {DateTime.Now}");
+
+ // The function handler that will be called for each Lambda event
+ var handler = async (BedrockFunctionRequest input, ILambdaContext context) =>
+ {
+ return await resolver.ResolveAsync(input, context);
+ };
+
+ // Build the Lambda runtime client passing in the handler to call for each
+ // event and the JSON serializer to use for translating Lambda JSON documents
+ // to .NET types.
+ await LambdaBootstrapBuilder.Create(handler, new DefaultLambdaJsonSerializer())
+ .Build()
+ .RunAsync();
+ ```
+
+=== "Class Library"
+
+ ```csharp
+ using AWS.Lambda.Powertools.EventHandler.Resolvers;
+ using AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.Models;
+ using Amazon.Lambda.Core;
+
+ [assembly: LambdaSerializer(typeof(Amazon.Lambda.Serialization.SystemTextJson.DefaultLambdaJsonSerializer))]
+
+ namespace MyLambdaFunction
+ {
+ public class Function
+ {
+ private readonly BedrockAgentFunctionResolver _resolver;
+
+ public Function()
+ {
+ _resolver = new BedrockAgentFunctionResolver();
+
+ // Register simple tool functions
+ _resolver
+ .Tool("GetWeather", (string city) => $"The weather in {city} is sunny")
+ .Tool("CalculateSum", (int a, int b) => $"The sum of {a} and {b} is {a + b}")
+ .Tool("GetCurrentTime", () => $"The current time is {DateTime.Now}");
+ }
+
+ // Lambda handler function
+ public BedrockFunctionResponse FunctionHandler(
+ BedrockFunctionRequest input, ILambdaContext context)
+ {
+ return _resolver.Resolve(input, context);
+ }
+ }
+ }
+ ```
+When the Bedrock Agent invokes your Lambda function with a request to use the "GetWeather" tool and a parameter for "city", the resolver automatically extracts the parameter, passes it to your function, and formats the response.
+
+## Response Format
+
+You can return any type from your tool function, the library will automatically format the response in a way that Bedrock Agents expect.
+
+The response will include:
+
+- The action group name
+- The function name
+- The function response body, which can be a text response or other structured data in string format
+- Any session attributes that were passed in the request or modified during the function execution
+
+The response body will **always be a string**.
+
+If you want to return an object the best practice is to override the `ToString()` method of your return type to provide a custom string representation, or if you don't override, create an anonymous object `return new {}` and pass your object, or simply return a string directly.
+
+```csharp
+public class AirportInfo
+{
+ public string City { get; set; } = string.Empty;
+ public string Code { get; set; } = string.Empty;
+ public string Name { get; set; } = string.Empty;
+
+ public override string ToString()
+ {
+ return $"{Name} ({Code}) in {City}";
+ }
+}
+
+resolver.Tool("getAirportCodeForCity", "Get airport code and full name for a specific city", (string city, ILambdaContext context) =>
+{
+ var airportService = new AirportService();
+ var airportInfo = airportService.GetAirportInfoForCity(city);
+ // Note: Best approach is to override the ToString method in the AirportInfo class
+ return airportInfo;
+});
+
+//Alternatively, you can return an anonymous object if you dont override ToString()
+// return new {
+// airportInfo
+// };
+```
+
+## How It Works with Amazon Bedrock Agents
+
+1. When a user interacts with a Bedrock Agent, the agent identifies when it needs to call an action to fulfill the user's request.
+2. The agent determines which function to call and what parameters are needed.
+3. Bedrock sends a request to your Lambda function with the function name and parameters.
+4. The BedrockAgentFunctionResolver automatically:
+ - Finds the registered handler for the requested function
+ - Extracts and converts parameters to the correct types
+ - Invokes your handler with the parameters
+ - Formats the response in the way Bedrock Agents expect
+5. The agent receives the response and uses it to continue the conversation with the user
+
+## Advanced Usage
+
+### Custom type serialization
+
+You can have your own custom types as arguments to the tool function. The library will automatically handle serialization and deserialization of these types. In this case, you need to ensure that your custom type is serializable to JSON, if serialization fails, the object will be null.
+
+```csharp hl_lines="4"
+resolver.Tool(
+ name: "PriceCalculator",
+ description: "Calculate total price with tax",
+ handler: (MyCustomType myCustomType) =>
+ {
+ var withTax = myCustomType.Price * 1.2m;
+ return $"Total price with tax: {withTax.ToString("F2", CultureInfo.InvariantCulture)}";
+ }
+);
+```
+
+### Custom type serialization native AOT
+
+For native AOT compilation, you can use JsonSerializerContext and pass it to `BedrockAgentFunctionResolver`. This allows the library to generate the necessary serialization code at compile time, ensuring compatibility with AOT.
+
+```csharp hl_lines="1 5 12-15"
+var resolver = new BedrockAgentFunctionResolver(MycustomSerializationContext.Default);
+resolver.Tool(
+ name: "PriceCalculator",
+ description: "Calculate total price with tax",
+ handler: (MyCustomType myCustomType) =>
+ {
+ var withTax = myCustomType.Price * 1.2m;
+ return $"Total price with tax: {withTax.ToString("F2", CultureInfo.InvariantCulture)}";
+ }
+);
+
+[JsonSerializable(typeof(MyCustomType))]
+public partial class MycustomSerializationContext : JsonSerializerContext
+{
+}
+```
+
+### Accessing Lambda Context
+
+You can access to the original Lambda event or context for additional information. These are passed to the handler function as optional arguments.
+
+```csharp
+resolver.Tool(
+ "LogRequest",
+ "Logs request information and returns confirmation",
+ (string requestId, ILambdaContext context) =>
+ {
+ context.Logger.LogLine($"Processing request {requestId}");
+ return $"Request {requestId} logged successfully";
+ });
+```
+
+### Handling errors
+
+By default, we will handle errors gracefully and return a well-formed response to the agent so that it can continue the conversation with the user.
+
+When an error occurs, we send back an error message in the response body that includes the error type and message. The agent will then use this information to let the user know that something went wrong.
+
+If you want to handle errors differently, you can return a `BedrockFunctionResponse` with a custom `Body` and `ResponseState` set to `FAILURE`. This is useful when you want to abort the conversation.
+
+```csharp
+resolver.Tool("CustomFailure", () =>
+{
+ // Return a custom FAILURE response
+ return new BedrockFunctionResponse
+ {
+ Response = new Response
+ {
+ ActionGroup = "TestGroup",
+ Function = "CustomFailure",
+ FunctionResponse = new FunctionResponse
+ {
+ ResponseBody = new ResponseBody
+ {
+ Text = new TextBody
+ {
+ Body = "Critical error occurred: Database unavailable"
+ }
+ },
+ ResponseState = ResponseState.FAILURE // Mark as FAILURE to abort the conversation
+ }
+ }
+ };
+});
+```
+
+### Setting session attributes
+
+When Bedrock Agents invoke your Lambda function, it can pass session attributes that you can use to store information across multiple interactions with the user. You can access these attributes in your handler function and modify them as needed.
+
+```csharp
+// Create a counter tool that reads and updates session attributes
+resolver.Tool("CounterTool", (BedrockFunctionRequest request) =>
+{
+ // Read the current count from session attributes
+ int currentCount = 0;
+ if (request.SessionAttributes != null &&
+ request.SessionAttributes.TryGetValue("counter", out var countStr) &&
+ int.TryParse(countStr, out var count))
+ {
+ currentCount = count;
+ }
+
+ // Increment the counter
+ currentCount++;
+
+ // Create a new dictionary with updated counter
+ var updatedSessionAttributes = new Dictionary(request.SessionAttributes ?? new Dictionary())
+ {
+ ["counter"] = currentCount.ToString(),
+ ["lastAccessed"] = DateTime.UtcNow.ToString("o")
+ };
+
+ // Return response with updated session attributes
+ return new BedrockFunctionResponse
+ {
+ Response = new Response
+ {
+ ActionGroup = request.ActionGroup,
+ Function = request.Function,
+ FunctionResponse = new FunctionResponse
+ {
+ ResponseBody = new ResponseBody
+ {
+ Text = new TextBody { Body = $"Current count: {currentCount}" }
+ }
+ }
+ },
+ SessionAttributes = updatedSessionAttributes,
+ PromptSessionAttributes = request.PromptSessionAttributes
+ };
+});
+```
+
+### Asynchronous Functions
+
+Register and use asynchronous functions:
+
+```csharp
+_resolver.Tool(
+ "FetchUserData",
+ "Fetches user data from external API",
+ async (string userId, ILambdaContext ctx) =>
+ {
+ // Log the request
+ ctx.Logger.LogLine($"Fetching data for user {userId}");
+
+ // Simulate API call
+ await Task.Delay(100);
+
+ // Return user information
+ return new { Id = userId, Name = "John Doe", Status = "Active" }.ToString();
+ });
+```
+
+### Direct Access to Request Payload
+
+Access the raw Bedrock Agent request:
+
+```csharp
+_resolver.Tool(
+ "ProcessRawRequest",
+ "Processes the raw Bedrock Agent request",
+ (BedrockFunctionRequest input) =>
+ {
+ var functionName = input.Function;
+ var parameterCount = input.Parameters.Count;
+ return $"Received request for {functionName} with {parameterCount} parameters";
+ });
+```
+
+## Dependency Injection
+
+The library supports dependency injection for integrating with services:
+
+```csharp
+using Microsoft.Extensions.DependencyInjection;
+
+// Set up dependency injection
+var services = new ServiceCollection();
+services.AddSingleton();
+services.AddBedrockResolver(); // Extension method to register the resolver
+
+var serviceProvider = services.BuildServiceProvider();
+var resolver = serviceProvider.GetRequiredService();
+
+// Register a tool that uses an injected service
+resolver.Tool(
+ "GetWeatherForecast",
+ "Gets the weather forecast for a location",
+ (string city, IWeatherService weatherService, ILambdaContext ctx) =>
+ {
+ ctx.Logger.LogLine($"Getting weather for {city}");
+ return weatherService.GetForecast(city);
+ });
+```
+
+## Using Attributes to Define Tools
+
+You can define Bedrock Agent functions using attributes instead of explicit registration. This approach provides a clean, declarative way to organize your tools into classes:
+
+### Define Tool Classes with Attributes
+
+```csharp
+// Define your tool class with BedrockFunctionType attribute
+[BedrockFunctionType]
+public class WeatherTools
+{
+ // Each method marked with BedrockFunctionTool attribute becomes a tool
+ [BedrockFunctionTool(Name = "GetWeather", Description = "Gets weather forecast for a location")]
+ public static string GetWeather(string city, int days)
+ {
+ return $"Weather forecast for {city} for the next {days} days: Sunny";
+ }
+
+ // Supports dependency injection and Lambda context access
+ [BedrockFunctionTool(Name = "GetDetailedForecast", Description = "Gets detailed weather forecast")]
+ public static string GetDetailedForecast(
+ string location,
+ IWeatherService weatherService,
+ ILambdaContext context)
+ {
+ context.Logger.LogLine($"Getting forecast for {location}");
+ return weatherService.GetForecast(location);
+ }
+}
+```
+
+### Register Tool Classes in Your Application
+
+Using the extension method provided in the library, you can easily register all tools from a class:
+
+```csharp
+
+var services = new ServiceCollection();
+services.AddSingleton();
+services.AddBedrockResolver(); // Extension method to register the resolver
+
+var serviceProvider = services.BuildServiceProvider();
+var resolver = serviceProvider.GetRequiredService()
+ .RegisterTool(); // Register tools from the class during service registration
+
+```
+
+## Complete Example with Dependency Injection
+
+You can find examples in the [Powertools for AWS Lambda (.NET) GitHub repository](https://github.com/aws-powertools/powertools-lambda-dotnet/tree/develop/examples/Event%20Handler/BedrockAgentFunction).
+
+
+```csharp
+using Amazon.BedrockAgentRuntime.Model;
+using Amazon.Lambda.Core;
+using AWS.Lambda.Powertools.EventHandler;
+using Microsoft.Extensions.DependencyInjection;
+
+[assembly: LambdaSerializer(typeof(Amazon.Lambda.Serialization.SystemTextJson.DefaultLambdaJsonSerializer))]
+
+namespace MyBedrockAgent
+{
+ // Service interfaces and implementations
+ public interface IWeatherService
+ {
+ string GetForecast(string city);
+ }
+
+ public class WeatherService : IWeatherService
+ {
+ public string GetForecast(string city) => $"Weather forecast for {city}: Sunny, 75°F";
+ }
+
+ public interface IProductService
+ {
+ string CheckInventory(string productId);
+ }
+
+ public class ProductService : IProductService
+ {
+ public string CheckInventory(string productId) => $"Product {productId} has 25 units in stock";
+ }
+
+ // Main Lambda function
+ public class Function
+ {
+ private readonly BedrockAgentFunctionResolver _resolver;
+
+ public Function()
+ {
+ // Set up dependency injection
+ var services = new ServiceCollection();
+ services.AddSingleton();
+ services.AddSingleton();
+ services.AddBedrockResolver(); // Extension method to register the resolver
+
+ var serviceProvider = services.BuildServiceProvider();
+ _resolver = serviceProvider.GetRequiredService();
+
+ // Register tool functions that use injected services
+ _resolver
+ .Tool("GetWeatherForecast",
+ "Gets weather forecast for a city",
+ (string city, IWeatherService weatherService, ILambdaContext ctx) =>
+ {
+ ctx.Logger.LogLine($"Weather request for {city}");
+ return weatherService.GetForecast(city);
+ })
+ .Tool("CheckInventory",
+ "Checks inventory for a product",
+ (string productId, IProductService productService) =>
+ productService.CheckInventory(productId))
+ .Tool("GetServerTime",
+ "Returns the current server time",
+ () => DateTime.Now.ToString("F"));
+ }
+
+ public ActionGroupInvocationOutput FunctionHandler(
+ ActionGroupInvocationInput input, ILambdaContext context)
+ {
+ return _resolver.Resolve(input, context);
+ }
+ }
+}
+```
\ No newline at end of file
diff --git a/docs/core/logging-v1.md b/docs/core/logging-v1.md
new file mode 100644
index 000000000..ba06f8e39
--- /dev/null
+++ b/docs/core/logging-v1.md
@@ -0,0 +1,808 @@
+---
+title: Logging v1 - Legacy
+description: Core utility
+---
+
+!!! warning
+ Version 1.x.x will continue to be supported until **end of July 2025** for critical bug fixes and security updates in very exceptional cases where you cannot update to v2, but no new features will be added to this version.
+
+ We recommend you upgrade to the latest version.
+
+ The latest version is available at [Logging v2](https://docs.powertools.aws.dev/lambda/dotnet/core/logging-v2/).
+
+
+The logging utility provides a Lambda optimized logger with output structured as JSON.
+
+
+## Key features
+
+* Capture key fields from Lambda context, cold start and structures logging output as JSON
+* Log Lambda event when instructed (disabled by default)
+* Log sampling enables DEBUG log level for a percentage of requests (disabled by default)
+* Append additional keys to structured log at any point in time
+* Ahead-of-Time compilation to native code support [AOT](https://docs.aws.amazon.com/lambda/latest/dg/dotnet-native-aot.html) from version 1.6.0
+
+## Installation
+
+Powertools for AWS Lambda (.NET) are available as NuGet packages. You can install the packages from [NuGet Gallery](https://www.nuget.org/packages?q=AWS+Lambda+Powertools*){target="_blank"} or from Visual Studio editor by searching `AWS.Lambda.Powertools*` to see various utilities available.
+
+* [AWS.Lambda.Powertools.Logging](https://www.nuget.org/packages?q=AWS.Lambda.Powertools.Logging):
+
+ `dotnet add package AWS.Lambda.Powertools.Logging --version 1.6.5`
+
+## Getting started
+
+!!! info
+
+ AOT Support
+ If loooking for AOT specific configurations navigate to the [AOT section](#aot-support)
+
+
+Logging requires two settings:
+
+Setting | Description | Environment variable | Attribute parameter
+------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- | -------------------------------------------------
+**Service** | Sets **Service** key that will be present across all log statements | `POWERTOOLS_SERVICE_NAME` | `Service`
+**Logging level** | Sets how verbose Logger should be (Information, by default) | `POWERTOOLS_LOG_LEVEL` | `LogLevel`
+
+### Service Property Priority Resolution
+
+The root level Service property now correctly follows this priority order:
+
+1. LoggingAttribute.Service (property value set in the decorator)
+2. POWERTOOLS_SERVICE_NAME (environment variable)
+
+
+### Example using AWS Serverless Application Model (AWS SAM)
+
+You can override log level by setting **`POWERTOOLS_LOG_LEVEL`** environment variable in the AWS SAM template.
+
+You can also explicitly set a service name via **`POWERTOOLS_SERVICE_NAME`** environment variable. This sets **Service** key that will be present across all log statements.
+
+Here is an example using the AWS SAM [Globals section](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-specification-template-anatomy-globals.html).
+
+=== "template.yaml"
+
+ ```yaml hl_lines="13 14"
+ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+ # SPDX-License-Identifier: MIT-0
+ AWSTemplateFormatVersion: "2010-09-09"
+ Transform: AWS::Serverless-2016-10-31
+ Description: >
+ Example project for Powertools for AWS Lambda (.NET) Logging utility
+
+ Globals:
+ Function:
+ Timeout: 10
+ Environment:
+ Variables:
+ POWERTOOLS_SERVICE_NAME: powertools-dotnet-logging-sample
+ POWERTOOLS_LOG_LEVEL: Debug
+ POWERTOOLS_LOGGER_LOG_EVENT: true
+ POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase
+ POWERTOOLS_LOGGER_SAMPLE_RATE: 0
+ ```
+
+### Full list of environment variables
+
+| Environment variable | Description | Default |
+| ------------------------------------------------- | --------------------------------------------------------------------------------- | ------------------------------------------------- |
+| **POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | `"service_undefined"` |
+| **POWERTOOLS_LOG_LEVEL** | Sets logging level | `Information` |
+| **POWERTOOLS_LOGGER_CASE** | Override the default casing for log keys | `SnakeCase` |
+| **POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | `false` |
+| **POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | `0` |
+
+
+### Using AWS Lambda Advanced Logging Controls (ALC)
+
+!!! question "When is it useful?"
+ When you want to set a logging policy to drop informational or verbose logs for one or all AWS Lambda functions, regardless of runtime and logger used.
+
+With [AWS Lambda Advanced Logging Controls (ALC)](https://docs.aws.amazon.com/lambda/latest/dg/monitoring-cloudwatchlogs.html#monitoring-cloudwatchlogs-advanced){target="_blank"}, you can enforce a minimum log level that Lambda will accept from your application code.
+
+When enabled, you should keep `Logger` and ALC log level in sync to avoid data loss.
+
+!!! warning "When using AWS Lambda Advanced Logging Controls (ALC)"
+ - When Powertools Logger output is set to `PascalCase` **`Level`** property name will be replaced by **`LogLevel`** as a property name.
+ - ALC takes precedence over **`POWERTOOLS_LOG_LEVEL`** and when setting it in code using **`[Logging(LogLevel = )]`**
+
+Here's a sequence diagram to demonstrate how ALC will drop both `Information` and `Debug` logs emitted from `Logger`, when ALC log level is stricter than `Logger`.
+
+```mermaid
+sequenceDiagram
+ title Lambda ALC allows WARN logs only
+ participant Lambda service
+ participant Lambda function
+ participant Application Logger
+
+ Note over Lambda service: AWS_LAMBDA_LOG_LEVEL="WARN"
+ Note over Application Logger: POWERTOOLS_LOG_LEVEL="DEBUG"
+ Lambda service->>Lambda function: Invoke (event)
+ Lambda function->>Lambda function: Calls handler
+ Lambda function->>Application Logger: Logger.Warning("Something happened")
+ Lambda function-->>Application Logger: Logger.Debug("Something happened")
+ Lambda function-->>Application Logger: Logger.Information("Something happened")
+
+ Lambda service->>Lambda service: DROP INFO and DEBUG logs
+
+ Lambda service->>CloudWatch Logs: Ingest error logs
+```
+
+**Priority of log level settings in Powertools for AWS Lambda**
+
+We prioritise log level settings in this order:
+
+1. AWS_LAMBDA_LOG_LEVEL environment variable
+2. Setting the log level in code using `[Logging(LogLevel = )]`
+3. POWERTOOLS_LOG_LEVEL environment variable
+
+If you set `Logger` level lower than ALC, we will emit a warning informing you that your messages will be discarded by Lambda.
+
+> **NOTE**
+> With ALC enabled, we are unable to increase the minimum log level below the `AWS_LAMBDA_LOG_LEVEL` environment variable value, see [AWS Lambda service documentation](https://docs.aws.amazon.com/lambda/latest/dg/monitoring-cloudwatchlogs.html#monitoring-cloudwatchlogs-log-level){target="_blank"} for more details.
+
+## Standard structured keys
+
+Your logs will always include the following keys to your structured logging:
+
+Key | Type | Example | Description
+------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- | -------------------------------------------------
+**Timestamp** | string | "2020-05-24 18:17:33,774" | Timestamp of actual log statement
+**Level** | string | "Information" | Logging level
+**Name** | string | "Powertools for AWS Lambda (.NET) Logger" | Logger name
+**ColdStart** | bool | true| ColdStart value.
+**Service** | string | "payment" | Service name defined. "service_undefined" will be used if unknown
+**SamplingRate** | int | 0.1 | Debug logging sampling rate in percentage e.g. 10% in this case
+**Message** | string | "Collecting payment" | Log statement value. Unserializable JSON values will be cast to string
+**FunctionName**| string | "example-powertools-HelloWorldFunction-1P1Z6B39FLU73"
+**FunctionVersion**| string | "12"
+**FunctionMemorySize**| string | "128"
+**FunctionArn**| string | "arn:aws:lambda:eu-west-1:012345678910:function:example-powertools-HelloWorldFunction-1P1Z6B39FLU73"
+**XRayTraceId**| string | "1-5759e988-bd862e3fe1be46a994272793" | X-Ray Trace ID when Lambda function has enabled Tracing
+**FunctionRequestId**| string | "899856cb-83d1-40d7-8611-9e78f15f32f4" | AWS Request ID from lambda context
+
+## Logging incoming event
+
+When debugging in non-production environments, you can instruct Logger to log the incoming event with `LogEvent` parameter or via `POWERTOOLS_LOGGER_LOG_EVENT` environment variable.
+
+!!! warning
+ Log event is disabled by default to prevent sensitive info being logged.
+
+=== "Function.cs"
+
+ ```c# hl_lines="6"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ [Logging(LogEvent = true)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ }
+ }
+ ```
+
+## Setting a Correlation ID
+
+You can set a Correlation ID using `CorrelationIdPath` parameter by passing a [JSON Pointer expression](https://datatracker.ietf.org/doc/html/draft-ietf-appsawg-json-pointer-03){target="_blank"}.
+
+!!! Attention
+ The JSON Pointer expression is `case sensitive`. In the bellow example `/headers/my_request_id_header` would work but `/Headers/my_request_id_header` would not find the element.
+
+
+=== "Function.cs"
+
+ ```c# hl_lines="6"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ [Logging(CorrelationIdPath = "/headers/my_request_id_header")]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ }
+ }
+ ```
+=== "Example Event"
+
+ ```json hl_lines="3"
+ {
+ "headers": {
+ "my_request_id_header": "correlation_id_value"
+ }
+ }
+ ```
+
+=== "Example CloudWatch Logs excerpt"
+
+ ```json hl_lines="15"
+ {
+ "cold_start": true,
+ "xray_trace_id": "1-61b7add4-66532bb81441e1b060389429",
+ "function_name": "test",
+ "function_version": "$LATEST",
+ "function_memory_size": 128,
+ "function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test",
+ "function_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72",
+ "timestamp": "2021-12-13T20:32:22.5774262Z",
+ "level": "Information",
+ "service": "lambda-example",
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "message": "Collecting payment",
+ "sampling_rate": 0.7,
+ "correlation_id": "correlation_id_value",
+ }
+ ```
+We provide [built-in JSON Pointer expression](https://datatracker.ietf.org/doc/html/draft-ietf-appsawg-json-pointer-03){target="_blank"}
+for known event sources, where either a request ID or X-Ray Trace ID are present.
+
+=== "Function.cs"
+
+ ```c# hl_lines="6"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ [Logging(CorrelationIdPath = CorrelationIdPaths.ApiGatewayRest)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ }
+ }
+ ```
+
+=== "Example Event"
+
+ ```json hl_lines="3"
+ {
+ "RequestContext": {
+ "RequestId": "correlation_id_value"
+ }
+ }
+ ```
+
+=== "Example CloudWatch Logs excerpt"
+
+ ```json hl_lines="15"
+ {
+ "cold_start": true,
+ "xray_trace_id": "1-61b7add4-66532bb81441e1b060389429",
+ "function_name": "test",
+ "function_version": "$LATEST",
+ "function_memory_size": 128,
+ "function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test",
+ "function_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72",
+ "timestamp": "2021-12-13T20:32:22.5774262Z",
+ "level": "Information",
+ "service": "lambda-example",
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "message": "Collecting payment",
+ "sampling_rate": 0.7,
+ "correlation_id": "correlation_id_value",
+ }
+ ```
+
+## Appending additional keys
+
+!!! info "Custom keys are persisted across warm invocations"
+ Always set additional keys as part of your handler to ensure they have the latest value, or explicitly clear them with [`ClearState=true`](#clearing-all-state).
+
+You can append your own keys to your existing logs via `AppendKey`. Typically this value would be passed into the function via the event. Appended keys are added to all subsequent log entries in the current execution from the point the logger method is called. To ensure the key is added to all log entries, call this method as early as possible in the Lambda handler.
+
+=== "Function.cs"
+
+ ```c# hl_lines="21"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ [Logging(LogEvent = true)]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigwProxyEvent,
+ ILambdaContext context)
+ {
+ var requestContextRequestId = apigwProxyEvent.RequestContext.RequestId;
+
+ var lookupInfo = new Dictionary()
+ {
+ {"LookupInfo", new Dictionary{{ "LookupId", requestContextRequestId }}}
+ };
+
+ // Appended keys are added to all subsequent log entries in the current execution.
+ // Call this method as early as possible in the Lambda handler.
+ // Typically this is value would be passed into the function via the event.
+ // Set the ClearState = true to force the removal of keys across invocations,
+ Logger.AppendKeys(lookupInfo);
+
+ Logger.LogInformation("Getting ip address from external service");
+
+ }
+ ```
+=== "Example CloudWatch Logs excerpt"
+
+ ```json hl_lines="4 5 6"
+ {
+ "cold_start": false,
+ "xray_trace_id": "1-622eede0-647960c56a91f3b071a9fff1",
+ "lookup_info": {
+ "lookup_id": "4c50eace-8b1e-43d3-92ba-0efacf5d1625"
+ },
+ "function_name": "PowertoolsLoggingSample-HelloWorldFunction-hm1r10VT3lCy",
+ "function_version": "$LATEST",
+ "function_memory_size": 256,
+ "function_arn": "arn:aws:lambda:ap-southeast-2:538510314095:function:PowertoolsLoggingSample-HelloWorldFunction-hm1r10VT3lCy",
+ "function_request_id": "96570b2c-f00e-471c-94ad-b25e95ba7347",
+ "timestamp": "2022-03-14T07:25:20.9418065Z",
+ "level": "Information",
+ "service": "powertools-dotnet-logging-sample",
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "message": "Getting ip address from external service"
+ }
+ ```
+
+### Removing additional keys
+
+You can remove any additional key from entry using `Logger.RemoveKeys()`.
+
+=== "Function.cs"
+
+ ```c# hl_lines="21 22"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ [Logging(LogEvent = true)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ Logger.AppendKey("test", "willBeLogged");
+ ...
+ var customKeys = new Dictionary
+ {
+ {"test1", "value1"},
+ {"test2", "value2"}
+ };
+
+ Logger.AppendKeys(customKeys);
+ ...
+ Logger.RemoveKeys("test");
+ Logger.RemoveKeys("test1", "test2");
+ ...
+ }
+ }
+ ```
+
+## Extra Keys
+
+Extra keys allow you to append additional keys to a log entry. Unlike `AppendKey`, extra keys will only apply to the current log entry.
+
+Extra keys argument is available for all log levels' methods, as implemented in the standard logging library - e.g. Logger.Information, Logger.Warning.
+
+It accepts any dictionary, and all keyword arguments will be added as part of the root structure of the logs for that log statement.
+
+!!! info
+ Any keyword argument added using extra keys will not be persisted for subsequent messages.
+
+=== "Function.cs"
+
+ ```c# hl_lines="16"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ [Logging(LogEvent = true)]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigwProxyEvent,
+ ILambdaContext context)
+ {
+ var requestContextRequestId = apigwProxyEvent.RequestContext.RequestId;
+
+ var lookupId = new Dictionary()
+ {
+ { "LookupId", requestContextRequestId }
+ };
+
+ // Appended keys are added to all subsequent log entries in the current execution.
+ // Call this method as early as possible in the Lambda handler.
+ // Typically this is value would be passed into the function via the event.
+ // Set the ClearState = true to force the removal of keys across invocations,
+ Logger.AppendKeys(lookupId);
+ }
+ ```
+
+### Clearing all state
+
+Logger is commonly initialized in the global scope. Due to [Lambda Execution Context reuse](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-context.html), this means that custom keys can be persisted across invocations. If you want all custom keys to be deleted, you can use `ClearState=true` attribute on `[Logging]` attribute.
+
+=== "Function.cs"
+
+ ```cs hl_lines="6 13"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ [Logging(ClearState = true)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ if (apigProxyEvent.Headers.ContainsKey("SomeSpecialHeader"))
+ {
+ Logger.AppendKey("SpecialKey", "value");
+ }
+
+ Logger.LogInformation("Collecting payment");
+ ...
+ }
+ }
+ ```
+=== "#1 Request"
+
+ ```json hl_lines="11"
+ {
+ "level": "Information",
+ "message": "Collecting payment",
+ "timestamp": "2021-12-13T20:32:22.5774262Z",
+ "service": "payment",
+ "cold_start": true,
+ "function_name": "test",
+ "function_memory_size": 128,
+ "function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test",
+ "function_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72",
+ "special_key": "value"
+ }
+ ```
+
+=== "#2 Request"
+
+ ```json
+ {
+ "level": "Information",
+ "message": "Collecting payment",
+ "timestamp": "2021-12-13T20:32:22.5774262Z",
+ "service": "payment",
+ "cold_start": true,
+ "function_name": "test",
+ "function_memory_size": 128,
+ "function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test",
+ "function_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72"
+ }
+ ```
+
+## Sampling debug logs
+
+You can dynamically set a percentage of your logs to **DEBUG** level via env var `POWERTOOLS_LOGGER_SAMPLE_RATE` or
+via `SamplingRate` parameter on attribute.
+
+!!! info
+ Configuration on environment variable is given precedence over sampling rate configuration on attribute, provided it's in valid value range.
+
+=== "Sampling via attribute parameter"
+
+ ```c# hl_lines="6"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ [Logging(SamplingRate = 0.5)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ }
+ }
+ ```
+
+=== "Sampling via environment variable"
+
+ ```yaml hl_lines="8"
+
+ Resources:
+ HelloWorldFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ ...
+ Environment:
+ Variables:
+ POWERTOOLS_LOGGER_SAMPLE_RATE: 0.5
+ ```
+
+## Configure Log Output Casing
+
+By definition Powertools for AWS Lambda (.NET) outputs logging keys using **snake case** (e.g. *"function_memory_size": 128*). This allows developers using different Powertools for AWS Lambda (.NET) runtimes, to search logs across services written in languages such as Python or TypeScript.
+
+If you want to override the default behavior you can either set the desired casing through attributes, as described in the example below, or by setting the `POWERTOOLS_LOGGER_CASE` environment variable on your AWS Lambda function. Allowed values are: `CamelCase`, `PascalCase` and `SnakeCase`.
+
+=== "Output casing via attribute parameter"
+
+ ```c# hl_lines="6"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ [Logging(LoggerOutputCase = LoggerOutputCase.CamelCase)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ }
+ }
+ ```
+
+Below are some output examples for different casing.
+
+=== "Camel Case"
+
+ ```json
+ {
+ "level": "Information",
+ "message": "Collecting payment",
+ "timestamp": "2021-12-13T20:32:22.5774262Z",
+ "service": "payment",
+ "coldStart": true,
+ "functionName": "test",
+ "functionMemorySize": 128,
+ "functionArn": "arn:aws:lambda:eu-west-1:12345678910:function:test",
+ "functionRequestId": "52fdfc07-2182-154f-163f-5f0f9a621d72"
+ }
+ ```
+
+=== "Pascal Case"
+
+ ```json
+ {
+ "Level": "Information",
+ "Message": "Collecting payment",
+ "Timestamp": "2021-12-13T20:32:22.5774262Z",
+ "Service": "payment",
+ "ColdStart": true,
+ "FunctionName": "test",
+ "FunctionMemorySize": 128,
+ "FunctionArn": "arn:aws:lambda:eu-west-1:12345678910:function:test",
+ "FunctionRequestId": "52fdfc07-2182-154f-163f-5f0f9a621d72"
+ }
+ ```
+
+=== "Snake Case"
+
+ ```json
+ {
+ "level": "Information",
+ "message": "Collecting payment",
+ "timestamp": "2021-12-13T20:32:22.5774262Z",
+ "service": "payment",
+ "cold_start": true,
+ "function_name": "test",
+ "function_memory_size": 128,
+ "function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test",
+ "function_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72"
+ }
+ ```
+
+## Custom Log formatter (Bring Your Own Formatter)
+
+You can customize the structure (keys and values) of your log entries by implementing a custom log formatter and override default log formatter using ``Logger.UseFormatter`` method. You can implement a custom log formatter by inheriting the ``ILogFormatter`` class and implementing the ``object FormatLogEntry(LogEntry logEntry)`` method.
+
+=== "Function.cs"
+
+ ```c# hl_lines="11"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ ///
+ /// Function constructor
+ ///
+ public Function()
+ {
+ Logger.UseFormatter(new CustomLogFormatter());
+ }
+
+ [Logging(CorrelationIdPath = "/headers/my_request_id_header", SamplingRate = 0.7)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ }
+ }
+ ```
+=== "CustomLogFormatter.cs"
+
+ ```c#
+ public class CustomLogFormatter : ILogFormatter
+ {
+ public object FormatLogEntry(LogEntry logEntry)
+ {
+ return new
+ {
+ Message = logEntry.Message,
+ Service = logEntry.Service,
+ CorrelationIds = new
+ {
+ AwsRequestId = logEntry.LambdaContext?.AwsRequestId,
+ XRayTraceId = logEntry.XRayTraceId,
+ CorrelationId = logEntry.CorrelationId
+ },
+ LambdaFunction = new
+ {
+ Name = logEntry.LambdaContext?.FunctionName,
+ Arn = logEntry.LambdaContext?.InvokedFunctionArn,
+ MemoryLimitInMB = logEntry.LambdaContext?.MemoryLimitInMB,
+ Version = logEntry.LambdaContext?.FunctionVersion,
+ ColdStart = logEntry.ColdStart,
+ },
+ Level = logEntry.Level.ToString(),
+ Timestamp = logEntry.Timestamp.ToString("o"),
+ Logger = new
+ {
+ Name = logEntry.Name,
+ SampleRate = logEntry.SamplingRate
+ },
+ };
+ }
+ }
+ ```
+
+=== "Example CloudWatch Logs excerpt"
+
+ ```json
+ {
+ "Message": "Test Message",
+ "Service": "lambda-example",
+ "CorrelationIds": {
+ "AwsRequestId": "52fdfc07-2182-154f-163f-5f0f9a621d72",
+ "XRayTraceId": "1-61b7add4-66532bb81441e1b060389429",
+ "CorrelationId": "correlation_id_value"
+ },
+ "LambdaFunction": {
+ "Name": "test",
+ "Arn": "arn:aws:lambda:eu-west-1:12345678910:function:test",
+ "MemorySize": 128,
+ "Version": "$LATEST",
+ "ColdStart": true
+ },
+ "Level": "Information",
+ "Timestamp": "2021-12-13T20:32:22.5774262Z",
+ "Logger": {
+ "Name": "AWS.Lambda.Powertools.Logging.Logger",
+ "SampleRate": 0.7
+ }
+ }
+ ```
+
+## AOT Support
+
+!!! info
+
+ If you want to use the `LogEvent`, `Custom Log Formatter` features, or serialize your own types when Logging events, you need to make changes in your Lambda `Main` method.
+
+!!! info
+
+ Starting from version 1.6.0, it is required to update the Amazon.Lambda.Serialization.SystemTextJson NuGet package to version 2.4.3 in your csproj.
+
+### Configure
+
+Replace `SourceGeneratorLambdaJsonSerializer` with `PowertoolsSourceGeneratorSerializer`.
+
+This change enables Powertools to construct an instance of `JsonSerializerOptions` used to customize the serialization and deserialization of Lambda JSON events and your own types.
+
+=== "Before"
+
+ ```csharp
+ Func> handler = FunctionHandler;
+ await LambdaBootstrapBuilder.Create(handler, new SourceGeneratorLambdaJsonSerializer())
+ .Build()
+ .RunAsync();
+ ```
+
+=== "After"
+
+ ```csharp hl_lines="2"
+ Func> handler = FunctionHandler;
+ await LambdaBootstrapBuilder.Create(handler, new PowertoolsSourceGeneratorSerializer())
+ .Build()
+ .RunAsync();
+ ```
+
+For example when you have your own Demo type
+
+```csharp
+public class Demo
+{
+ public string Name { get; set; }
+ public Headers Headers { get; set; }
+}
+```
+
+To be able to serialize it in AOT you have to have your own `JsonSerializerContext`
+
+```csharp
+[JsonSerializable(typeof(APIGatewayHttpApiV2ProxyRequest))]
+[JsonSerializable(typeof(APIGatewayHttpApiV2ProxyResponse))]
+[JsonSerializable(typeof(Demo))]
+public partial class MyCustomJsonSerializerContext : JsonSerializerContext
+{
+}
+```
+
+When you update your code to use `PowertoolsSourceGeneratorSerializer`, we combine your `JsonSerializerContext` with Powertools' `JsonSerializerContext`. This allows Powertools to serialize your types and Lambda events.
+
+### Custom Log Formatter
+
+To use a custom log formatter with AOT, pass an instance of `ILogFormatter` to `PowertoolsSourceGeneratorSerializer` instead of using the static `Logger.UseFormatter` in the Function constructor as you do in non-AOT Lambdas.
+
+=== "Function Main method"
+
+ ```csharp hl_lines="5"
+
+ Func> handler = FunctionHandler;
+ await LambdaBootstrapBuilder.Create(handler,
+ new PowertoolsSourceGeneratorSerializer
+ (
+ new CustomLogFormatter()
+ )
+ )
+ .Build()
+ .RunAsync();
+
+ ```
+
+=== "CustomLogFormatter.cs"
+
+ ```csharp
+ public class CustomLogFormatter : ILogFormatter
+ {
+ public object FormatLogEntry(LogEntry logEntry)
+ {
+ return new
+ {
+ Message = logEntry.Message,
+ Service = logEntry.Service,
+ CorrelationIds = new
+ {
+ AwsRequestId = logEntry.LambdaContext?.AwsRequestId,
+ XRayTraceId = logEntry.XRayTraceId,
+ CorrelationId = logEntry.CorrelationId
+ },
+ LambdaFunction = new
+ {
+ Name = logEntry.LambdaContext?.FunctionName,
+ Arn = logEntry.LambdaContext?.InvokedFunctionArn,
+ MemoryLimitInMB = logEntry.LambdaContext?.MemoryLimitInMB,
+ Version = logEntry.LambdaContext?.FunctionVersion,
+ ColdStart = logEntry.ColdStart,
+ },
+ Level = logEntry.Level.ToString(),
+ Timestamp = logEntry.Timestamp.ToString("o"),
+ Logger = new
+ {
+ Name = logEntry.Name,
+ SampleRate = logEntry.SamplingRate
+ },
+ };
+ }
+ }
+ ```
+
+### Anonymous types
+
+!!! note
+
+ While we support anonymous type serialization by converting to a `Dictionary`, this is **not** a best practice and is **not recommended** when using native AOT.
+
+ We recommend using concrete classes and adding them to your `JsonSerializerContext`.
diff --git a/docs/core/logging.md b/docs/core/logging.md
index 7c99d17a1..c3932e2a5 100644
--- a/docs/core/logging.md
+++ b/docs/core/logging.md
@@ -11,151 +11,439 @@ The logging utility provides a Lambda optimized logger with output structured as
* Log Lambda event when instructed (disabled by default)
* Log sampling enables DEBUG log level for a percentage of requests (disabled by default)
* Append additional keys to structured log at any point in time
-* Ahead-of-Time compilation to native code support [AOT](https://docs.aws.amazon.com/lambda/latest/dg/dotnet-native-aot.html) from version 1.6.0
+* Ahead-of-Time compilation to native code
+ support [AOT](https://docs.aws.amazon.com/lambda/latest/dg/dotnet-native-aot.html)
+* Custom log formatter to override default log structure
+* Support
+ for [AWS Lambda Advanced Logging Controls (ALC)](https://docs.aws.amazon.com/lambda/latest/dg/monitoring-cloudwatchlogs-advanced.html)
+ {target="_blank"}
+* Support for Microsoft.Extensions.Logging
+ and [ILogger](https://learn.microsoft.com/en-us/dotnet/api/microsoft.extensions.logging.ilogger?view=dotnet-plat-ext-7.0)
+ interface
+* Support
+ for [ILoggerFactory](https://learn.microsoft.com/en-us/dotnet/api/microsoft.extensions.logging.iloggerfactory?view=dotnet-plat-ext-7.0)
+ interface
+* Support for message templates `{}` and `{@}` for structured logging
+
+## Breaking changes from v1 (dependency updates)
+
+!!! info
+
+ Loooking for V1 specific documentation please go to [Logging v1](/lambda/dotnet/core/logging-v1)
+
+| Change | Before (v1.x) | After (v2.0) | Migration Action |
+|--------|---------------|--------------|-----------------|
+| Amazon.Lambda.Core | 2.2.0|2.5.0 | dotnet add package Amazon.Lambda.Core |
+| Amazon.Lambda.Serialization.SystemTextJson | 2.4.3 | 2.4.4 | dotnet add package Amazon.Lambda.Serialization.SystemTextJson |
+| Microsoft.Extensions.DependencyInjection | 8.0.0 | 8.0.1 | dotnet add package Microsoft.Extensions.DependencyInjection |
+
+#### Extra keys - Breaking change
+
+In v1.x, the extra keys were added to the log entry as a dictionary. In v2.x, the extra keys are added to the log entry as
+a JSON object.
+
+There is no longer a method that accepts extra keys as first argument.
+
+=== "Before (v1)"
+
+ ```csharp
+ public class User
+ {
+ public string Name { get; set; }
+ public int Age { get; set; }
+ }
+
+ Logger.LogInformation(user, "{Name} is {Age} years old",
+ new object[]{user.Name, user.Age});
+
+ var scopeKeys = new
+ {
+ PropOne = "Value 1",
+ PropTwo = "Value 2"
+ };
+ Logger.LogInformation(scopeKeys, "message");
+
+ ```
+
+=== "After (v2)"
+
+ ```csharp
+ public class User
+ {
+ public string Name { get; set; }
+ public int Age { get; set; }
+
+ public override string ToString()
+ {
+ return $"{Name} is {Age} years old";
+ }
+ }
+
+ // It uses the ToString() method of the object to log the message
+ // the extra keys are added because of the {@} in the message template
+ Logger.LogInformation("{@user}", user);
+
+ var scopeKeys = new
+ {
+ PropOne = "Value 1",
+ PropTwo = "Value 2"
+ };
+
+ // there is no longer a method that accepts extra keys as first argument.
+ Logger.LogInformation("{@keys}", scopeKeys);
+ ```
+
+This change was made to improve the performance of the logger and to make it easier to work with the extra keys.
+
## Installation
-Powertools for AWS Lambda (.NET) are available as NuGet packages. You can install the packages from [NuGet Gallery](https://www.nuget.org/packages?q=AWS+Lambda+Powertools*){target="_blank"} or from Visual Studio editor by searching `AWS.Lambda.Powertools*` to see various utilities available.
+Powertools for AWS Lambda (.NET) are available as NuGet packages. You can install the packages
+from [NuGet Gallery](https://www.nuget.org/packages?q=AWS+Lambda+Powertools*){target="_blank"} or from Visual Studio
+editor by searching `AWS.Lambda.Powertools*` to see various utilities available.
* [AWS.Lambda.Powertools.Logging](https://www.nuget.org/packages?q=AWS.Lambda.Powertools.Logging):
- `dotnet add package AWS.Lambda.Powertools.Logging`
+ `dotnet add package AWS.Lambda.Powertools.Logging`
## Getting started
!!! info
-
+
AOT Support
If loooking for AOT specific configurations navigate to the [AOT section](#aot-support)
-
Logging requires two settings:
-Setting | Description | Environment variable | Attribute parameter
-------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- | -------------------------------------------------
-**Service** | Sets **Service** key that will be present across all log statements | `POWERTOOLS_SERVICE_NAME` | `Service`
-**Logging level** | Sets how verbose Logger should be (Information, by default) | `POWERTOOLS_LOG_LEVEL` | `LogLevel`
-
-### Service Property Priority Resolution
+ Setting | Description | Environment variable | Attribute parameter
+-------------------|---------------------------------------------------------------------|---------------------------|---------------------
+ **Service** | Sets **Service** key that will be present across all log statements | `POWERTOOLS_SERVICE_NAME` | `Service`
+ **Logging level** | Sets how verbose Logger should be (Information, by default) | `POWERTOOLS_LOG_LEVEL` | `LogLevel`
-The root level Service property now correctly follows this priority order:
+### Full list of environment variables
-1. LoggingAttribute.Service (property value set in the decorator)
-2. POWERTOOLS_SERVICE_NAME (environment variable)
+| Environment variable | Description | Default |
+|-----------------------------------|----------------------------------------------------------------------------------------|-----------------------|
+| **POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | `"service_undefined"` |
+| **POWERTOOLS_LOG_LEVEL** | Sets logging level | `Information` |
+| **POWERTOOLS_LOGGER_CASE** | Override the default casing for log keys | `SnakeCase` |
+| **POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | `false` |
+| **POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | `0` |
+### Setting up the logger
-### Example using AWS Serverless Application Model (AWS SAM)
+You can set up the logger in different ways. The most common way is to use the `Logging` attribute on your Lambda.
+You can also use the `ILogger` interface to log messages. This interface is part of the Microsoft.Extensions.Logging.
-You can override log level by setting **`POWERTOOLS_LOG_LEVEL`** environment variable in the AWS SAM template.
+=== "Using decorator"
-You can also explicitly set a service name via **`POWERTOOLS_SERVICE_NAME`** environment variable. This sets **Service** key that will be present across all log statements.
+ ```c# hl_lines="6 10"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ [Logging(Service = "payment", LogLevel = LogLevel.Debug)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Logger.LogInformation("Collecting payment");
+ ...
+ }
+ }
+ ```
-Here is an example using the AWS SAM [Globals section](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-specification-template-anatomy-globals.html).
+=== "Logger Factory"
-=== "template.yaml"
+ ```c# hl_lines="6 10-17 23"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ private readonly ILogger _logger;
+
+ public Function(ILoggerFactory loggerFactory)
+ {
+ _logger = loggerFactory.Create(builder =>
+ {
+ builder.AddPowertoolsLogger(config =>
+ {
+ config.Service = "TestService";
+ config.LoggerOutputCase = LoggerOutputCase.PascalCase;
+ });
+ }).CreatePowertoolsLogger();
+ }
+
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ _logger.LogInformation("Collecting payment");
+ ...
+ }
+ }
+ ```
- ```yaml hl_lines="13 14"
- # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- # SPDX-License-Identifier: MIT-0
- AWSTemplateFormatVersion: "2010-09-09"
- Transform: AWS::Serverless-2016-10-31
- Description: >
- Example project for Powertools for AWS Lambda (.NET) Logging utility
+=== "With Builder"
- Globals:
- Function:
- Timeout: 10
- Environment:
- Variables:
- POWERTOOLS_SERVICE_NAME: powertools-dotnet-logging-sample
- POWERTOOLS_LOG_LEVEL: Debug
- POWERTOOLS_LOGGER_LOG_EVENT: true
- POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase
- POWERTOOLS_LOGGER_SAMPLE_RATE: 0
+ ```c# hl_lines="6 10-13 19"
+ /**
+ * Handler for requests to Lambda function.
+ */
+ public class Function
+ {
+ private readonly ILogger _logger;
+
+ public Function(ILogger logger)
+ {
+ _logger = logger ?? new PowertoolsLoggerBuilder()
+ .WithService("TestService")
+ .WithOutputCase(LoggerOutputCase.PascalCase)
+ .Build();
+ }
+
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ _logger.LogInformation("Collecting payment");
+ ...
+ }
+ }
```
-### Full list of environment variables
+### Customizing the logger
-| Environment variable | Description | Default |
-| ------------------------------------------------- | --------------------------------------------------------------------------------- | ------------------------------------------------- |
-| **POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | `"service_undefined"` |
-| **POWERTOOLS_LOG_LEVEL** | Sets logging level | `Information` |
-| **POWERTOOLS_LOGGER_CASE** | Override the default casing for log keys | `SnakeCase` |
-| **POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | `false` |
-| **POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | `0` |
+You can customize the logger by setting the following properties in the `Logger.Configure` method:
+| Property | Description |
+|:----------------------|--------------------------------------------------------------------------------------------------|
+| `Service` | The name of the service. This is used to identify the service in the logs. |
+| `MinimumLogLevel` | The minimum log level to log. This is used to filter out logs below the specified level. |
+| `LogFormatter` | The log formatter to use. This is used to customize the structure of the log entries. |
+| `JsonOptions` | The JSON options to use. This is used to customize the serialization of logs.|
+| `LogBuffering` | The log buffering options. This is used to configure log buffering. |
+| `TimestampFormat` | The format of the timestamp. This is used to customize the format of the timestamp in the logs.|
+| `SamplingRate` | Sets a percentage (0.0 to 1.0) of logs that will be dynamically elevated to DEBUG level |
+| `LoggerOutputCase` | The output casing of the logger. This is used to customize the casing of the log entries. |
+| `LogOutput` | Specifies the console output wrapper used for writing logs. This property allows redirecting log output for testing or specialized handling scenarios. |
-### Using AWS Lambda Advanced Logging Controls (ALC)
-!!! question "When is it useful?"
- When you want to set a logging policy to drop informational or verbose logs for one or all AWS Lambda functions, regardless of runtime and logger used.
+### Configuration
-With [AWS Lambda Advanced Logging Controls (ALC)](https://docs.aws.amazon.com/lambda/latest/dg/monitoring-cloudwatchlogs.html#monitoring-cloudwatchlogs-advanced){target="_blank"}, you can enforce a minimum log level that Lambda will accept from your application code.
+You can configure Powertools Logger using the static `Logger` class. This class is a singleton and is created when the
+Lambda function is initialized. You can configure the logger using the `Logger.Configure` method.
-When enabled, you should keep `Logger` and ALC log level in sync to avoid data loss.
+=== "Configure static Logger"
-!!! warning "When using AWS Lambda Advanced Logging Controls (ALC)"
- - When Powertools Logger output is set to `PascalCase` **`Level`** property name will be replaced by **`LogLevel`** as a property name.
- - ALC takes precedence over **`POWERTOOLS_LOG_LEVEL`** and when setting it in code using **`[Logging(LogLevel = )]`**
+```c# hl_lines="5-9"
+ public class Function
+ {
+ public Function()
+ {
+ Logger.Configure(options =>
+ {
+ options.MinimumLogLevel = LogLevel.Information;
+ options.LoggerOutputCase = LoggerOutputCase.CamelCase;
+ });
+ }
-Here's a sequence diagram to demonstrate how ALC will drop both `Information` and `Debug` logs emitted from `Logger`, when ALC log level is stricter than `Logger`.
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Logger.LogInformation("Collecting payment");
+ ...
+ }
+ }
+```
-```mermaid
-sequenceDiagram
- title Lambda ALC allows WARN logs only
- participant Lambda service
- participant Lambda function
- participant Application Logger
-
- Note over Lambda service: AWS_LAMBDA_LOG_LEVEL="WARN"
- Note over Application Logger: POWERTOOLS_LOG_LEVEL="DEBUG"
- Lambda service->>Lambda function: Invoke (event)
- Lambda function->>Lambda function: Calls handler
- Lambda function->>Application Logger: Logger.Warning("Something happened")
- Lambda function-->>Application Logger: Logger.Debug("Something happened")
- Lambda function-->>Application Logger: Logger.Information("Something happened")
+### ILogger
+You can also use the `ILogger` interface to log messages. This interface is part of the Microsoft.Extensions.Logging.
+With this approach you get more flexibility and testability using dependency injection (DI).
+
+=== "Configure with LoggerFactory or Builder"
+
+ ```c# hl_lines="5-12"
+ public class Function
+ {
+ public Function(ILogger logger)
+ {
+ _logger = logger ?? LoggerFactory.Create(builder =>
+ {
+ builder.AddPowertoolsLogger(config =>
+ {
+ config.Service = "TestService";
+ config.LoggerOutputCase = LoggerOutputCase.PascalCase;
+ });
+ }).CreatePowertoolsLogger();
+ }
- Lambda service->>Lambda service: DROP INFO and DEBUG logs
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Logger.LogInformation("Collecting payment");
+ ...
+ }
+ }
+ ```
- Lambda service->>CloudWatch Logs: Ingest error logs
-```
+## Standard structured keys
-**Priority of log level settings in Powertools for AWS Lambda**
+Your logs will always include the following keys to your structured logging:
-We prioritise log level settings in this order:
+ Key | Type | Example | Description
+------------------------|--------|------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------
+ **Level** | string | "Information" | Logging level
+ **Message** | string | "Collecting payment" | Log statement value. Unserializable JSON values will be cast to string
+ **Timestamp** | string | "2020-05-24 18:17:33,774" | Timestamp of actual log statement
+ **Service** | string | "payment" | Service name defined. "service_undefined" will be used if unknown
+ **ColdStart** | bool | true | ColdStart value.
+ **FunctionName** | string | "example-powertools-HelloWorldFunction-1P1Z6B39FLU73"
+ **FunctionMemorySize** | string | "128"
+ **FunctionArn** | string | "arn:aws:lambda:eu-west-1:012345678910:function:example-powertools-HelloWorldFunction-1P1Z6B39FLU73"
+ **FunctionRequestId** | string | "899856cb-83d1-40d7-8611-9e78f15f32f4" | AWS Request ID from lambda context
+ **FunctionVersion** | string | "12"
+ **XRayTraceId** | string | "1-5759e988-bd862e3fe1be46a994272793" | X-Ray Trace ID when Lambda function has enabled Tracing
+ **Name** | string | "Powertools for AWS Lambda (.NET) Logger" | Logger name
+ **SamplingRate** | int | 0.1 | Debug logging sampling rate in percentage e.g. 10% in this case
+ **Customer Keys** | | |
+
+!!! Warning
+ If you emit a log message with a key that matches one of `level`, `message`, `name`, `service`, or `timestamp`, the Logger will ignore the key.
+
+## Message templates
+
+You can use message templates to extract properties from your objects and log them as structured data.
-1. AWS_LAMBDA_LOG_LEVEL environment variable
-2. Setting the log level in code using `[Logging(LogLevel = )]`
-3. POWERTOOLS_LOG_LEVEL environment variable
+!!! info
-If you set `Logger` level lower than ALC, we will emit a warning informing you that your messages will be discarded by Lambda.
+ Override the `ToString()` method of your object to return a meaningful string representation of the object.
-> **NOTE**
-> With ALC enabled, we are unable to increase the minimum log level below the `AWS_LAMBDA_LOG_LEVEL` environment variable value, see [AWS Lambda service documentation](https://docs.aws.amazon.com/lambda/latest/dg/monitoring-cloudwatchlogs.html#monitoring-cloudwatchlogs-log-level){target="_blank"} for more details.
+ This is especially important when using `{}` to log the object as a string.
-## Standard structured keys
+ ```csharp
+ public class User
+ {
+ public string FirstName { get; set; }
+ public string LastName { get; set; }
+ public int Age { get; set; }
-Your logs will always include the following keys to your structured logging:
+ public override string ToString()
+ {
+ return $"{LastName}, {FirstName} ({Age})";
+ }
+ }
+ ```
+
+If you want to log the object as a JSON object, use `{@}`. This will serialize the object and log it as a JSON object.
+
+=== "Message template {@}"
+
+ ```c# hl_lines="7-14"
+ public class Function
+ {
+ [Logging(Service = "user-service", LogLevel = LogLevel.Information)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ var user = new User
+ {
+ FirstName = "John",
+ LastName = "Doe",
+ Age = 42
+ };
+
+ logger.LogInformation("User object: {@user}", user);
+ ...
+ }
+ }
+ ```
+
+=== "{@} Output"
+
+ ```json hl_lines="3 8-12"
+ {
+ "level": "Information",
+ "message": "User object: Doe, John (42)",
+ "timestamp": "2025-04-07 09:06:30.708",
+ "service": "user-service",
+ "coldStart": true,
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "user": {
+ "firstName": "John",
+ "lastName": "Doe",
+ "age": 42
+ },
+ ...
+ }
+ ```
+
+If you want to log the object as a string, use `{}`. This will call the `ToString()` method of the object and log it as
+a string.
+
+=== "Message template {} ToString"
+
+ ```c# hl_lines="7-12 14 18 19"
+ public class Function
+ {
+ [Logging(Service = "user", LogLevel = LogLevel.Information)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ var user = new User
+ {
+ FirstName = "John",
+ LastName = "Doe",
+ Age = 42
+ };
+
+ logger.LogInformation("User data: {user}", user);
+
+ // Also works with numbers, dates, etc.
+
+ logger.LogInformation("Price: {price:0.00}", 123.4567); // will respect decimal places
+ logger.LogInformation("Percentage: {percent:0.0%}", 0.1234);
+ ...
+ }
+ }
+ ```
+
+=== "Output {} ToString"
+
+ ```json hl_lines="3 8 12 17 21 26"
+ {
+ "level": "Information",
+ "message": "User data: Doe, John (42)",
+ "timestamp": "2025-04-07 09:06:30.689",
+ "service": "user-servoice",
+ "coldStart": true,
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "user": "Doe, John (42)"
+ }
+ {
+ "level": "Information",
+ "message": "Price: 123.46",
+ "timestamp": "2025-04-07 09:23:01.235",
+ "service": "user-servoice",
+ "cold_start": true,
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "price": 123.46
+ }
+ {
+ "level": "Information",
+ "message": "Percentage: 12.3%",
+ "timestamp": "2025-04-07 09:23:01.260",
+ "service": "user-servoice",
+ "cold_start": true,
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "percent": "12.3%"
+ }
+ ```
-Key | Type | Example | Description
-------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- | -------------------------------------------------
-**Timestamp** | string | "2020-05-24 18:17:33,774" | Timestamp of actual log statement
-**Level** | string | "Information" | Logging level
-**Name** | string | "Powertools for AWS Lambda (.NET) Logger" | Logger name
-**ColdStart** | bool | true| ColdStart value.
-**Service** | string | "payment" | Service name defined. "service_undefined" will be used if unknown
-**SamplingRate** | int | 0.1 | Debug logging sampling rate in percentage e.g. 10% in this case
-**Message** | string | "Collecting payment" | Log statement value. Unserializable JSON values will be cast to string
-**FunctionName**| string | "example-powertools-HelloWorldFunction-1P1Z6B39FLU73"
-**FunctionVersion**| string | "12"
-**FunctionMemorySize**| string | "128"
-**FunctionArn**| string | "arn:aws:lambda:eu-west-1:012345678910:function:example-powertools-HelloWorldFunction-1P1Z6B39FLU73"
-**XRayTraceId**| string | "1-5759e988-bd862e3fe1be46a994272793" | X-Ray Trace ID when Lambda function has enabled Tracing
-**FunctionRequestId**| string | "899856cb-83d1-40d7-8611-9e78f15f32f4" | AWS Request ID from lambda context
## Logging incoming event
-When debugging in non-production environments, you can instruct Logger to log the incoming event with `LogEvent` parameter or via `POWERTOOLS_LOGGER_LOG_EVENT` environment variable.
+When debugging in non-production environments, you can instruct Logger to log the incoming event with `LogEvent`
+parameter or via `POWERTOOLS_LOGGER_LOG_EVENT` environment variable.
!!! warning
Log event is disabled by default to prevent sensitive info being logged.
@@ -179,11 +467,12 @@ When debugging in non-production environments, you can instruct Logger to log th
## Setting a Correlation ID
-You can set a Correlation ID using `CorrelationIdPath` parameter by passing a [JSON Pointer expression](https://datatracker.ietf.org/doc/html/draft-ietf-appsawg-json-pointer-03){target="_blank"}.
+You can set a Correlation ID using `CorrelationIdPath` parameter by passing
+a [JSON Pointer expression](https://datatracker.ietf.org/doc/html/draft-ietf-appsawg-json-pointer-03){target="_blank"}.
!!! Attention
- The JSON Pointer expression is `case sensitive`. In the bellow example `/headers/my_request_id_header` would work but `/Headers/my_request_id_header` would not find the element.
-
+ The JSON Pointer expression is `case sensitive`. In the bellow example `/headers/my_request_id_header` would work but
+ `/Headers/my_request_id_header` would not find the element.
=== "Function.cs"
@@ -201,6 +490,7 @@ You can set a Correlation ID using `CorrelationIdPath` parameter by passing a [J
}
}
```
+
=== "Example Event"
```json hl_lines="3"
@@ -215,23 +505,25 @@ You can set a Correlation ID using `CorrelationIdPath` parameter by passing a [J
```json hl_lines="15"
{
+ "level": "Information",
+ "message": "Collecting payment",
+ "timestamp": "2021-12-13T20:32:22.5774262Z",
+ "service": "lambda-example",
"cold_start": true,
- "xray_trace_id": "1-61b7add4-66532bb81441e1b060389429",
"function_name": "test",
- "function_version": "$LATEST",
"function_memory_size": 128,
"function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test",
"function_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72",
- "timestamp": "2021-12-13T20:32:22.5774262Z",
- "level": "Information",
- "service": "lambda-example",
+ "function_version": "$LATEST",
+ "xray_trace_id": "1-61b7add4-66532bb81441e1b060389429",
"name": "AWS.Lambda.Powertools.Logging.Logger",
- "message": "Collecting payment",
"sampling_rate": 0.7,
"correlation_id": "correlation_id_value",
}
```
-We provide [built-in JSON Pointer expression](https://datatracker.ietf.org/doc/html/draft-ietf-appsawg-json-pointer-03){target="_blank"}
+
+We provide [built-in JSON Pointer expression](https://datatracker.ietf.org/doc/html/draft-ietf-appsawg-json-pointer-03)
+{target="_blank"}
for known event sources, where either a request ID or X-Ray Trace ID are present.
=== "Function.cs"
@@ -265,18 +557,18 @@ for known event sources, where either a request ID or X-Ray Trace ID are present
```json hl_lines="15"
{
+ "level": "Information",
+ "message": "Collecting payment",
+ "timestamp": "2021-12-13T20:32:22.5774262Z",
+ "service": "lambda-example",
"cold_start": true,
- "xray_trace_id": "1-61b7add4-66532bb81441e1b060389429",
"function_name": "test",
- "function_version": "$LATEST",
"function_memory_size": 128,
"function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test",
"function_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72",
- "timestamp": "2021-12-13T20:32:22.5774262Z",
- "level": "Information",
- "service": "lambda-example",
+ "function_version": "$LATEST",
+ "xray_trace_id": "1-61b7add4-66532bb81441e1b060389429",
"name": "AWS.Lambda.Powertools.Logging.Logger",
- "message": "Collecting payment",
"sampling_rate": 0.7,
"correlation_id": "correlation_id_value",
}
@@ -285,9 +577,13 @@ for known event sources, where either a request ID or X-Ray Trace ID are present
## Appending additional keys
!!! info "Custom keys are persisted across warm invocations"
- Always set additional keys as part of your handler to ensure they have the latest value, or explicitly clear them with [`ClearState=true`](#clearing-all-state).
+ Always set additional keys as part of your handler to ensure they have the latest value, or explicitly clear them with [
+ `ClearState=true`](#clearing-all-state).
-You can append your own keys to your existing logs via `AppendKey`. Typically this value would be passed into the function via the event. Appended keys are added to all subsequent log entries in the current execution from the point the logger method is called. To ensure the key is added to all log entries, call this method as early as possible in the Lambda handler.
+You can append your own keys to your existing logs via `AppendKey`. Typically this value would be passed into the
+function via the event. Appended keys are added to all subsequent log entries in the current execution from the point
+the logger method is called. To ensure the key is added to all log entries, call this method as early as possible in the
+Lambda handler.
=== "Function.cs"
@@ -318,25 +614,26 @@ You can append your own keys to your existing logs via `AppendKey`. Typically th
}
```
+
=== "Example CloudWatch Logs excerpt"
```json hl_lines="4 5 6"
{
+ "level": "Information",
+ "message": "Getting ip address from external service"
+ "timestamp": "2022-03-14T07:25:20.9418065Z",
+ "service": "powertools-dotnet-logging-sample",
"cold_start": false,
- "xray_trace_id": "1-622eede0-647960c56a91f3b071a9fff1",
- "lookup_info": {
- "lookup_id": "4c50eace-8b1e-43d3-92ba-0efacf5d1625"
- },
"function_name": "PowertoolsLoggingSample-HelloWorldFunction-hm1r10VT3lCy",
- "function_version": "$LATEST",
"function_memory_size": 256,
- "function_arn": "arn:aws:lambda:ap-southeast-2:538510314095:function:PowertoolsLoggingSample-HelloWorldFunction-hm1r10VT3lCy",
+ "function_arn": "arn:aws:lambda:function:PowertoolsLoggingSample-HelloWorldFunction-hm1r10VT3lCy",
"function_request_id": "96570b2c-f00e-471c-94ad-b25e95ba7347",
- "timestamp": "2022-03-14T07:25:20.9418065Z",
- "level": "Information",
- "service": "powertools-dotnet-logging-sample",
+ "function_version": "$LATEST",
+ "xray_trace_id": "1-622eede0-647960c56a91f3b071a9fff1",
"name": "AWS.Lambda.Powertools.Logging.Logger",
- "message": "Getting ip address from external service"
+ "lookup_info": {
+ "lookup_id": "4c50eace-8b1e-43d3-92ba-0efacf5d1625"
+ },
}
```
@@ -376,14 +673,17 @@ You can remove any additional key from entry using `Logger.RemoveKeys()`.
## Extra Keys
-Extra keys allow you to append additional keys to a log entry. Unlike `AppendKey`, extra keys will only apply to the current log entry.
+Extra keys allow you to append additional keys to a log entry. Unlike `AppendKey`, extra keys will only apply to the
+current log entry.
-Extra keys argument is available for all log levels' methods, as implemented in the standard logging library - e.g. Logger.Information, Logger.Warning.
+Extra keys argument is available for all log levels' methods, as implemented in the standard logging library - e.g.
+Logger.Information, Logger.Warning.
-It accepts any dictionary, and all keyword arguments will be added as part of the root structure of the logs for that log statement.
+It accepts any dictionary, and all keyword arguments will be added as part of the root structure of the logs for that
+log statement.
!!! info
- Any keyword argument added using extra keys will not be persisted for subsequent messages.
+ Any keyword argument added using extra keys will not be persisted for subsequent messages.
=== "Function.cs"
@@ -414,7 +714,10 @@ It accepts any dictionary, and all keyword arguments will be added as part of th
### Clearing all state
-Logger is commonly initialized in the global scope. Due to [Lambda Execution Context reuse](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-context.html), this means that custom keys can be persisted across invocations. If you want all custom keys to be deleted, you can use `ClearState=true` attribute on `[Logging]` attribute.
+Logger is commonly initialized in the global scope. Due
+to [Lambda Execution Context reuse](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-context.html), this means that
+custom keys can be persisted across invocations. If you want all custom keys to be deleted, you can use
+`ClearState=true` attribute on `[Logging]` attribute.
=== "Function.cs"
@@ -439,6 +742,7 @@ Logger is commonly initialized in the global scope. Due to [Lambda Execution Con
}
}
```
+
=== "#1 Request"
```json hl_lines="11"
@@ -478,7 +782,8 @@ You can dynamically set a percentage of your logs to **DEBUG** level via env var
via `SamplingRate` parameter on attribute.
!!! info
- Configuration on environment variable is given precedence over sampling rate configuration on attribute, provided it's in valid value range.
+ Configuration on environment variable is given precedence over sampling rate configuration on attribute, provided it's
+ in valid value range.
=== "Sampling via attribute parameter"
@@ -513,9 +818,13 @@ via `SamplingRate` parameter on attribute.
## Configure Log Output Casing
-By definition Powertools for AWS Lambda (.NET) outputs logging keys using **snake case** (e.g. *"function_memory_size": 128*). This allows developers using different Powertools for AWS Lambda (.NET) runtimes, to search logs across services written in languages such as Python or TypeScript.
+By definition Powertools for AWS Lambda (.NET) outputs logging keys using **snake case** (e.g. *"function_memory_size":
+128*). This allows developers using different Powertools for AWS Lambda (.NET) runtimes, to search logs across services
+written in languages such as Python or TypeScript.
-If you want to override the default behavior you can either set the desired casing through attributes, as described in the example below, or by setting the `POWERTOOLS_LOGGER_CASE` environment variable on your AWS Lambda function. Allowed values are: `CamelCase`, `PascalCase` and `SnakeCase`.
+If you want to override the default behavior you can either set the desired casing through attributes, as described in
+the example below, or by setting the `POWERTOOLS_LOGGER_CASE` environment variable on your AWS Lambda function. Allowed
+values are: `CamelCase`, `PascalCase` and `SnakeCase`.
=== "Output casing via attribute parameter"
@@ -584,9 +893,132 @@ Below are some output examples for different casing.
}
```
-## Custom Log formatter (Bring Your Own Formatter)
-You can customize the structure (keys and values) of your log entries by implementing a custom log formatter and override default log formatter using ``Logger.UseFormatter`` method. You can implement a custom log formatter by inheriting the ``ILogFormatter`` class and implementing the ``object FormatLogEntry(LogEntry logEntry)`` method.
+## Advanced
+
+### Log Levels
+
+The default log level is `Information` and can be set using the `MinimumLogLevel` property option or by using the `POWERTOOLS_LOG_LEVEL` environment variable.
+
+We support the following log levels:
+
+| Level | Numeric value | Lambda Level |
+|---------------|---------------|--------------|
+| `Trace` | 0 | `trace` |
+| `Debug` | 1 | `debug` |
+| `Information` | 2 | `info` |
+| `Warning` | 3 | `warn` |
+| `Error` | 4 | `error` |
+| `Critical` | 5 | `fatal` |
+| `None` | 6 | |
+
+### Using AWS Lambda Advanced Logging Controls (ALC)
+
+!!! question "When is it useful?"
+ When you want to set a logging policy to drop informational or verbose logs for one or all AWS Lambda functions,
+ regardless of runtime and logger used.
+
+With [AWS Lambda Advanced Logging Controls (ALC)](https://docs.aws.amazon.com/lambda/latest/dg/monitoring-cloudwatchlogs.html#monitoring-cloudwatchlogs-advanced)
+{target="_blank"}, you can enforce a minimum log level that Lambda will accept from your application code.
+
+When enabled, you should keep `Logger` and ALC log level in sync to avoid data loss.
+
+!!! warning "When using AWS Lambda Advanced Logging Controls (ALC)"
+ - When Powertools Logger output is set to `PascalCase` **`Level`** property name will be replaced by **`LogLevel`** as
+ a property name.
+ - ALC takes precedence over **`POWERTOOLS_LOG_LEVEL`** and when setting it in code using **`[Logging(LogLevel = )]`**
+
+Here's a sequence diagram to demonstrate how ALC will drop both `Information` and `Debug` logs emitted from `Logger`,
+when ALC log level is stricter than `Logger`.
+
+```mermaid
+sequenceDiagram
+ title Lambda ALC allows WARN logs only
+ participant Lambda service
+ participant Lambda function
+ participant Application Logger
+
+ Note over Lambda service: AWS_LAMBDA_LOG_LEVEL="WARN"
+ Note over Application Logger: POWERTOOLS_LOG_LEVEL="DEBUG"
+ Lambda service->>Lambda function: Invoke (event)
+ Lambda function->>Lambda function: Calls handler
+ Lambda function->>Application Logger: Logger.Warning("Something happened")
+ Lambda function-->>Application Logger: Logger.Debug("Something happened")
+ Lambda function-->>Application Logger: Logger.Information("Something happened")
+
+ Lambda service->>Lambda service: DROP INFO and DEBUG logs
+
+ Lambda service->>CloudWatch Logs: Ingest error logs
+```
+
+**Priority of log level settings in Powertools for AWS Lambda**
+
+We prioritise log level settings in this order:
+
+1. AWS_LAMBDA_LOG_LEVEL environment variable
+2. Setting the log level in code using `[Logging(LogLevel = )]`
+3. POWERTOOLS_LOG_LEVEL environment variable
+
+If you set `Logger` level lower than ALC, we will emit a warning informing you that your messages will be discarded by
+Lambda.
+
+> **NOTE**
+> With ALC enabled, we are unable to increase the minimum log level below the `AWS_LAMBDA_LOG_LEVEL` environment
+> variable value,
+> see [AWS Lambda service documentation](https://docs.aws.amazon.com/lambda/latest/dg/monitoring-cloudwatchlogs.html#monitoring-cloudwatchlogs-log-level)
+> {target="_blank"} for more details.
+
+### Using JsonSerializerOptions
+
+Powertools supports customizing the serialization and deserialization of Lambda JSON events and your own types using
+`JsonSerializerOptions`.
+You can do this by creating a custom `JsonSerializerOptions` and passing it to the `JsonOptions` of the Powertools
+Logger.
+
+Supports `TypeInfoResolver` and `DictionaryKeyPolicy` options. These two options are the most common ones used to
+customize the serialization of Powertools Logger.
+
+- `TypeInfoResolver`: This option allows you to specify a custom `JsonSerializerContext` that contains the types you
+ want to serialize and deserialize. This is especially useful when using AOT compilation, as it allows you to specify
+ the types that should be included in the generated assembly.
+- `DictionaryKeyPolicy`: This option allows you to specify a custom naming policy for the properties in the JSON output.
+ This is useful when you want to change the casing of the property names or use a different naming convention.
+
+!!! info
+ If you want to preserve the original casing of the property names (keys), you can set the `DictionaryKeyPolicy` to
+ `null`.
+
+```csharp
+builder.Logging.AddPowertoolsLogger(options =>
+{
+ options.JsonOptions = new JsonSerializerOptions
+ {
+ DictionaryKeyPolicy = JsonNamingPolicy.CamelCase, // Override output casing
+ TypeInfoResolver = MyCustomJsonSerializerContext.Default // Your custom JsonSerializerContext
+ };
+});
+```
+
+!!! warning
+ When using `builder.Logging.AddPowertoolsLogger` method it will use any already configured logging providers (file loggers, database loggers, third-party providers).
+
+ If you want to use Powertools Logger as the only logging provider, you should call `builder.Logging.ClearProviders()` before adding Powertools Logger or the new method override
+
+ ```csharp
+ builder.Logging.AddPowertoolsLogger(config =>
+ {
+ config.Service = "TestService";
+ config.LoggerOutputCase = LoggerOutputCase.PascalCase;
+ }, clearExistingProviders: true);
+ ```
+
+### Custom Log formatter (Bring Your Own Formatter)
+
+You can customize the structure (keys and values) of your log entries by implementing a custom log formatter and
+override default log formatter using ``LogFormatter`` property in the `configure` options.
+
+You can implement a custom log formatter by
+inheriting the ``ILogFormatter`` class and implementing the ``object FormatLogEntry(LogEntry logEntry)`` method.
=== "Function.cs"
@@ -601,7 +1033,10 @@ You can customize the structure (keys and values) of your log entries by impleme
///
public Function()
{
- Logger.UseFormatter(new CustomLogFormatter());
+ Logger.Configure(options =>
+ {
+ options.LogFormatter = new CustomLogFormatter();
+ });
}
[Logging(CorrelationIdPath = "/headers/my_request_id_header", SamplingRate = 0.7)]
@@ -612,6 +1047,7 @@ You can customize the structure (keys and values) of your log entries by impleme
}
}
```
+
=== "CustomLogFormatter.cs"
```c#
@@ -676,21 +1112,332 @@ You can customize the structure (keys and values) of your log entries by impleme
}
```
+### Buffering logs
+
+Log buffering enables you to buffer logs for a specific request or invocation. Enable log buffering by passing `LogBufferingOptions` when configuring a Logger instance. You can buffer logs at the `Warning`, `Information`, `Debug` or `Trace` level, and flush them automatically on error or manually as needed.
+
+!!! tip "This is useful when you want to reduce the number of log messages emitted while still having detailed logs when needed, such as when troubleshooting issues."
+
+=== "LogBufferingOptions"
+
+ ```csharp hl_lines="5-14"
+ public class Function
+ {
+ public Function()
+ {
+ Logger.Configure(logger =>
+ {
+ logger.Service = "MyServiceName";
+ logger.LogBuffering = new LogBufferingOptions
+ {
+ BufferAtLogLevel = LogLevel.Debug,
+ MaxBytes = 20480, // Default is 20KB (20480 bytes)
+ FlushOnErrorLog = true // default true
+ };
+ });
+
+ Logger.LogDebug('This is a debug message'); // This is NOT buffered
+ }
+
+ [Logging]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Logger.LogDebug('This is a debug message'); // This is buffered
+ Logger.LogInformation('This is an info message');
+
+ // your business logic here
+
+ Logger.LogError('This is an error message'); // This also flushes the buffer
+ }
+ }
+
+ ```
+
+#### Configuring the buffer
+
+When configuring the buffer, you can set the following options to fine-tune how logs are captured, stored, and emitted. You can configure the following options in the `logBufferOptions` constructor parameter:
+
+| Parameter | Description | Configuration | Default |
+|---------------------|------------------------------------------------- |--------------------------------------------|---------|
+| `MaxBytes` | Maximum size of the log buffer in bytes | `number` | `20480` |
+| `BufferAtLogLevel` | Minimum log level to buffer | `Trace`, `Debug`, `Information`, `Warning` | `Debug` |
+| `FlushOnErrorLog` | Automatically flush buffer when logging an error | `True`, `False` | `True` |
+
+=== "BufferAtLogLevel"
+
+ ```csharp hl_lines="10"
+ public class Function
+ {
+ public Function()
+ {
+ Logger.Configure(logger =>
+ {
+ logger.Service = "MyServiceName";
+ logger.LogBuffering = new LogBufferingOptions
+ {
+ BufferAtLogLevel = LogLevel.Warning
+ };
+ });
+ }
+
+ [Logging]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ // All logs below are buffered
+ Logger.LogDebug('This is a debug message');
+ Logger.LogInformation('This is an info message');
+ Logger.LogWarning('This is a warn message');
+
+ Logger.ClearBuffer(); // This will clear the buffer without emitting the logs
+ }
+ }
+ ```
+
+ 1. Setting `BufferAtLogLevel: 'Warning'` configures log buffering for `Warning` and all lower severity levels like `Information`, `Debug`, and `Trace`.
+ 2. Calling `Logger.ClearBuffer()` will clear the buffer without emitting the logs.
+
+=== "FlushOnErrorLog"
+
+ ```csharp hl_lines="10"
+ public class Function
+ {
+ public Function()
+ {
+ Logger.Configure(logger =>
+ {
+ logger.Service = "MyServiceName";
+ logger.LogBuffering = new LogBufferingOptions
+ {
+ FlushOnErrorLog = false
+ };
+ });
+ }
+
+ [Logging]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Logger.LogDebug('This is a debug message'); // this is buffered
+
+ try
+ {
+ throw new Exception();
+ }
+ catch (Exception e)
+ {
+ Logger.LogError(e.Message); // this does NOT flush the buffer
+ }
+
+ Logger.LogDebug("Debug!!"); // this is buffered
+
+ try
+ {
+ throw new Exception();
+ }
+ catch (Exception e)
+ {
+ Logger.LogError(e.Message); // this does NOT flush the buffer
+ Logger.FlushBuffer(); // Manually flush
+ }
+ }
+ }
+ ```
+
+ 1. Disabling `FlushOnErrorLog` will not flush the buffer when logging an error. This is useful when you want to control when the buffer is flushed by calling the `Logger.FlushBuffer()` method.
+
+#### Flushing on errors
+
+When using the `Logger` decorator, you can configure the logger to automatically flush the buffer when an error occurs. This is done by setting the `FlushBufferOnUncaughtError` option to `true` in the decorator.
+
+=== "FlushBufferOnUncaughtError"
+
+ ```csharp hl_lines="15"
+ public class Function
+ {
+ public Function()
+ {
+ Logger.Configure(logger =>
+ {
+ logger.Service = "MyServiceName";
+ logger.LogBuffering = new LogBufferingOptions
+ {
+ BufferAtLogLevel = LogLevel.Debug
+ };
+ });
+ }
+
+ [Logging(FlushBufferOnUncaughtError = true)]
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Logger.LogDebug('This is a debug message');
+
+ throw new Exception(); // This causes the buffer to be flushed
+ }
+ }
+ ```
+
+#### Buffering workflows
+
+##### Manual flush
+
+
+
+##### Flushing on error
+
+This works only when using the `Logger` decorator. You can configure the logger to automatically flush the buffer when an error occurs by setting the `FlushBufferOnUncaughtError` option to `true` in the decorator.
+
+
+
+#### Buffering FAQs
+
+1. **Does the buffer persist across Lambda invocations?**
+ No, each Lambda invocation has its own buffer. The buffer is initialized when the Lambda function is invoked and is cleared after the function execution completes or when flushed manually.
+
+2. **Are my logs buffered during cold starts?**
+ No, we never buffer logs during cold starts. This is because we want to ensure that logs emitted during this phase are always available for debugging and monitoring purposes. The buffer is only used during the execution of the Lambda function.
+
+3. **How can I prevent log buffering from consuming excessive memory?**
+ You can limit the size of the buffer by setting the `MaxBytes` option in the `LogBufferingOptions` constructor parameter. This will ensure that the buffer does not grow indefinitely and consume excessive memory.
+
+4. **What happens if the log buffer reaches its maximum size?**
+ Older logs are removed from the buffer to make room for new logs. This means that if the buffer is full, you may lose some logs if they are not flushed before the buffer reaches its maximum size. When this happens, we emit a warning when flushing the buffer to indicate that some logs have been dropped.
+
+5. **How is the log size of a log line calculated?**
+ The log size is calculated based on the size of the serialized log line in bytes. This includes the size of the log message, the size of any additional keys, and the size of the timestamp.
+
+6. **What timestamp is used when I flush the logs?**
+ The timestamp preserves the original time when the log record was created. If you create a log record at 11:00:10 and flush it at 11:00:25, the log line will retain its original timestamp of 11:00:10.
+
+7. **What happens if I try to add a log line that is bigger than max buffer size?**
+ The log will be emitted directly to standard output and not buffered. When this happens, we emit a warning to indicate that the log line was too big to be buffered.
+
+8. **What happens if Lambda times out without flushing the buffer?**
+ Logs that are still in the buffer will be lost. If you are using the log buffer to log asynchronously, you should ensure that the buffer is flushed before the Lambda function times out. You can do this by calling the `Logger.FlushBuffer()` method at the end of your Lambda function.
+
+### Timestamp formatting
+
+You can customize the timestamp format by setting the `TimestampFormat` property in the `Logger.Configure` method. The default format is `o`, which is the ISO 8601 format.
+You can use any valid [DateTime format string](https://docs.microsoft.com/en-us/dotnet/standard/base-types/custom-date-and-time-format-strings) to customize the timestamp format.
+For example, to use the `yyyy-MM-dd HH:mm:ss` format, you can do the following:
+
+```csharp
+Logger.Configure(logger =>
+{
+ logger.TimestampFormat = "yyyy-MM-dd HH:mm:ss";
+});
+```
+This will output the timestamp in the following format:
+
+```json
+{
+ "level": "Information",
+ "message": "Test Message",
+ "timestamp": "2021-12-13 20:32:22",
+ "service": "lambda-example",
+ ...
+}
+```
+
## AOT Support
!!! info
-
- If you want to use the `LogEvent`, `Custom Log Formatter` features, or serialize your own types when Logging events, you need to make changes in your Lambda `Main` method.
+
+ If you want to use the `LogEvent`, `Custom Log Formatter` features, or serialize your own types when Logging events, you need to either pass `JsonSerializerContext` or make changes in your Lambda `Main` method.
!!! info
Starting from version 1.6.0, it is required to update the Amazon.Lambda.Serialization.SystemTextJson NuGet package to version 2.4.3 in your csproj.
-### Configure
+### Using JsonSerializerOptions
+
+To be able to serializer your own types, you need to pass your `JsonSerializerContext` to the `TypeInfoResolver` of the `Logger.Configure` method.
+
+```csharp
+Logger.Configure(logger =>
+{
+ logger.JsonOptions = new JsonSerializerOptions
+ {
+ TypeInfoResolver = YourJsonSerializerContext.Default
+ };
+});
+```
+
+### Using PowertoolsSourceGeneratorSerializer
Replace `SourceGeneratorLambdaJsonSerializer` with `PowertoolsSourceGeneratorSerializer`.
-This change enables Powertools to construct an instance of `JsonSerializerOptions` used to customize the serialization and deserialization of Lambda JSON events and your own types.
+This change enables Powertools to construct an instance of `JsonSerializerOptions` used to customize the serialization
+and deserialization of Lambda JSON events and your own types.
=== "Before"
@@ -710,7 +1457,7 @@ This change enables Powertools to construct an instance of `JsonSerializerOption
.RunAsync();
```
-For example when you have your own Demo type
+For example when you have your own Demo type
```csharp
public class Demo
@@ -731,11 +1478,14 @@ public partial class MyCustomJsonSerializerContext : JsonSerializerContext
}
```
-When you update your code to use `PowertoolsSourceGeneratorSerializer`, we combine your `JsonSerializerContext` with Powertools' `JsonSerializerContext`. This allows Powertools to serialize your types and Lambda events.
+When you update your code to use `PowertoolsSourceGeneratorSerializer`, we combine your
+`JsonSerializerContext` with Powertools' `JsonSerializerContext`. This allows Powertools to serialize your types and
+Lambda events.
### Custom Log Formatter
-To use a custom log formatter with AOT, pass an instance of `ILogFormatter` to `PowertoolsSourceGeneratorSerializer` instead of using the static `Logger.UseFormatter` in the Function constructor as you do in non-AOT Lambdas.
+To use a custom log formatter with AOT, pass an instance of `ILogFormatter` to `PowertoolsSourceGeneratorSerializer`
+instead of using the static `Logger.UseFormatter` in the Function constructor as you do in non-AOT Lambdas.
=== "Function Main method"
@@ -797,3 +1547,99 @@ To use a custom log formatter with AOT, pass an instance of `ILogFormatter` to `
While we support anonymous type serialization by converting to a `Dictionary`, this is **not** a best practice and is **not recommended** when using native AOT.
We recommend using concrete classes and adding them to your `JsonSerializerContext`.
+
+## Testing
+
+You can change where the `Logger` will output its logs by setting the `LogOutput` property.
+We also provide a helper class for tests `TestLoggerOutput` or you can provider your own implementation of `IConsoleWrapper`.
+
+```csharp
+Logger.Configure(options =>
+{
+ // Using TestLoggerOutput
+ options.LogOutput = new TestLoggerOutput();
+ // Custom console output for testing
+ options.LogOutput = new TestConsoleWrapper();
+});
+
+// Example implementation for testing:
+public class TestConsoleWrapper : IConsoleWrapper
+{
+ public List CapturedOutput { get; } = new();
+
+ public void WriteLine(string message)
+ {
+ CapturedOutput.Add(message);
+ }
+}
+```
+```csharp
+// Test example
+[Fact]
+public void When_Setting_Service_Should_Update_Key()
+{
+ // Arrange
+ var consoleOut = new TestLoggerOutput();
+ Logger.Configure(options =>
+ {
+ options.LogOutput = consoleOut;
+ });
+
+ // Act
+ _testHandlers.HandlerService();
+
+ // Assert
+
+ var st = consoleOut.ToString();
+
+ Assert.Contains("\"level\":\"Information\"", st);
+ Assert.Contains("\"service\":\"test\"", st);
+ Assert.Contains("\"name\":\"AWS.Lambda.Powertools.Logging.Logger\"", st);
+ Assert.Contains("\"message\":\"test\"", st);
+}
+```
+
+### ILogger
+
+If you are using ILogger interface you can inject the logger in a dedicated constructor for your Lambda function and thus you can mock your ILogger instance.
+
+```csharp
+public class Function
+{
+ private readonly ILogger _logger;
+
+ public Function()
+ {
+ _logger = oggerFactory.Create(builder =>
+ {
+ builder.AddPowertoolsLogger(config =>
+ {
+ config.Service = "TestService";
+ config.LoggerOutputCase = LoggerOutputCase.PascalCase;
+ });
+ }).CreatePowertoolsLogger();
+ }
+
+ // constructor used for tests - pass the mock ILogger
+ public Function(ILogger logger)
+ {
+ _logger = logger ?? loggerFactory.Create(builder =>
+ {
+ builder.AddPowertoolsLogger(config =>
+ {
+ config.Service = "TestService";
+ config.LoggerOutputCase = LoggerOutputCase.PascalCase;
+ });
+ }).CreatePowertoolsLogger();
+ }
+
+ public async Task FunctionHandler
+ (APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ _logger.LogInformation("Collecting payment");
+ ...
+ }
+}
+```
+
+
diff --git a/docs/core/metrics-v1.md b/docs/core/metrics-v1.md
new file mode 100644
index 000000000..7ed992637
--- /dev/null
+++ b/docs/core/metrics-v1.md
@@ -0,0 +1,416 @@
+---
+title: Metrics v1 - Legacy
+description: Core utility
+---
+
+!!! warning
+ Version 1.x.x will continue to be supported **until end of October 2025** for bug fixes and security updates, but no new features will be added to this version. We recommend you upgrade to the latest version.
+
+ The latest version is available at [Metrics v2](https://docs.powertools.aws.dev/lambda/dotnet/core/metrics-v2/).
+
+
+Metrics creates custom metrics asynchronously by logging metrics to standard output following [Amazon CloudWatch Embedded Metric Format (EMF)](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format.html).
+
+These metrics can be visualized through [Amazon CloudWatch Console](https://aws.amazon.com/cloudwatch/).
+
+## Key features
+
+* Aggregate up to 100 metrics using a single [CloudWatch EMF](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format_Specification.html){target="_blank"} object (large JSON blob)
+* Validating your metrics against common metric definitions mistakes (for example, metric unit, values, max dimensions, max metrics)
+* Metrics are created asynchronously by the CloudWatch service. You do not need any custom stacks, and there is no impact to Lambda function latency
+* Context manager to create a one off metric with a different dimension
+* Ahead-of-Time compilation to native code support [AOT](https://docs.aws.amazon.com/lambda/latest/dg/dotnet-native-aot.html) from version 1.7.0
+
+
+
+
+
+ Metrics showcase - Metrics Explorer
+
+
+## Installation
+
+Powertools for AWS Lambda (.NET) are available as NuGet packages. You can install the packages from [NuGet Gallery](https://www.nuget.org/packages?q=AWS+Lambda+Powertools*){target="_blank"} or from Visual Studio editor by searching `AWS.Lambda.Powertools*` to see various utilities available.
+
+* [AWS.Lambda.Powertools.Metrics](https://www.nuget.org/packages?q=AWS.Lambda.Powertools.Metrics):
+
+ `dotnet add package AWS.Lambda.Powertools.Metrics -v 1.7.1`
+
+## Terminologies
+
+If you're new to Amazon CloudWatch, there are two terminologies you must be aware of before using this utility:
+
+* **Namespace**. It's the highest level container that will group multiple metrics from multiple services for a given application, for example `ServerlessEcommerce`.
+* **Dimensions**. Metrics metadata in key-value format. They help you slice and dice metrics visualization, for example `ColdStart` metric by Payment `service`.
+* **Metric**. It's the name of the metric, for example: SuccessfulBooking or UpdatedBooking.
+* **Unit**. It's a value representing the unit of measure for the corresponding metric, for example: Count or Seconds.
+* **Resolution**. It's a value representing the storage resolution for the corresponding metric. Metrics can be either Standard or High resolution. Read more [here](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#Resolution_definition).
+
+Visit the AWS documentation for a complete explanation for [Amazon CloudWatch concepts](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html).
+
+
+
+ Metric terminology, visually explained
+
+
+## Getting started
+
+**`Metrics`** is implemented as a Singleton to keep track of your aggregate metrics in memory and make them accessible anywhere in your code. To guarantee that metrics are flushed properly the **`MetricsAttribute`** must be added on the lambda handler.
+
+Metrics has two global settings that will be used across all metrics emitted. Use your application or main service as the metric namespace to easily group all metrics:
+
+Setting | Description | Environment variable | Constructor parameter
+------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- | -------------------------------------------------
+**Service** | Optionally, sets **service** metric dimension across all metrics e.g. `payment` | `POWERTOOLS_SERVICE_NAME` | `Service`
+**Metric namespace** | Logical container where all metrics will be placed e.g. `MyCompanyEcommerce` | `POWERTOOLS_METRICS_NAMESPACE` | `Namespace`
+
+!!! info "Autocomplete Metric Units"
+ All parameters in **`Metrics Attribute`** are optional. Following rules apply:
+
+ - **Namespace:** **`Empty`** string by default. You can either specify it in code or environment variable. If not present before flushing metrics, a **`SchemaValidationException`** will be thrown.
+ - **Service:** **`service_undefined`** by default. You can either specify it in code or environment variable.
+ - **CaptureColdStart:** **`false`** by default.
+ - **RaiseOnEmptyMetrics:** **`false`** by default.
+
+### Example using AWS Serverless Application Model (AWS SAM)
+
+=== "template.yml"
+
+ ```yaml hl_lines="9 10"
+ Resources:
+ HelloWorldFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ ...
+ Environment:
+ Variables:
+ POWERTOOLS_SERVICE_NAME: ShoppingCartService
+ POWERTOOLS_METRICS_NAMESPACE: MyCompanyEcommerce
+ ```
+
+=== "Function.cs"
+
+ ```csharp hl_lines="4"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+ [Metrics(Namespace = "MyCompanyEcommerce", Service = "ShoppingCartService", CaptureColdStart = true, RaiseOnEmptyMetrics = true)]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ }
+ }
+ ```
+
+### Full list of environment variables
+
+| Environment variable | Description | Default |
+| ------------------------------------------------- | --------------------------------------------------------------------------------- | ------------------------------------------------- |
+| **POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | `"service_undefined"` |
+| **POWERTOOLS_METRICS_NAMESPACE** | Sets namespace used for metrics | `None` |
+
+### Creating metrics
+
+You can create metrics using **`AddMetric`**, and you can create dimensions for all your aggregate metrics using **`AddDimension`** method.
+
+=== "Metrics"
+
+ ```csharp hl_lines="5 8"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ }
+ }
+ ```
+=== "Metrics with custom dimensions"
+
+ ```csharp hl_lines="8-9"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.AddDimension("Environment","Prod");
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ }
+ }
+ ```
+
+!!! tip "Autocomplete Metric Units"
+ `MetricUnit` enum facilitates finding a supported metric unit by CloudWatch.
+
+!!! note "Metrics overflow"
+ CloudWatch EMF supports a max of 100 metrics per batch. Metrics utility will flush all metrics when adding the 100th metric. Subsequent metrics, e.g. 101th, will be aggregated into a new EMF object, for your convenience.
+
+!!! warning "Metric value must be a positive number"
+ Metric values must be a positive number otherwise an `ArgumentException` will be thrown.
+
+!!! warning "Do not create metrics or dimensions outside the handler"
+ Metrics or dimensions added in the global scope will only be added during cold start. Disregard if that's the intended behavior.
+
+### Adding high-resolution metrics
+
+You can create [high-resolution metrics](https://aws.amazon.com/about-aws/whats-new/2023/02/amazon-cloudwatch-high-resolution-metric-extraction-structured-logs/) passing `MetricResolution` as parameter to `AddMetric`.
+
+!!! tip "When is it useful?"
+ High-resolution metrics are data with a granularity of one second and are very useful in several situations such as telemetry, time series, real-time incident management, and others.
+
+=== "Metrics with high resolution"
+
+ ```csharp hl_lines="9 12 15"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ // Publish a metric with standard resolution i.e. StorageResolution = 60
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count, MetricResolution.Standard);
+
+ // Publish a metric with high resolution i.e. StorageResolution = 1
+ Metrics.AddMetric("FailedBooking", 1, MetricUnit.Count, MetricResolution.High);
+
+ // The last parameter (storage resolution) is optional
+ Metrics.AddMetric("SuccessfulUpgrade", 1, MetricUnit.Count);
+ }
+ }
+ ```
+
+!!! tip "Autocomplete Metric Resolutions"
+ Use the `MetricResolution` enum to easily find a supported metric resolution by CloudWatch.
+
+### Adding default dimensions
+
+You can use **`SetDefaultDimensions`** method to persist dimensions across Lambda invocations.
+
+=== "SetDefaultDimensions method"
+
+ ```csharp hl_lines="4 5 6 7 12"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+ private Dictionary _defaultDimensions = new Dictionary{
+ {"Environment", "Prod"},
+ {"Another", "One"}
+ };
+
+ [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.SetDefaultDimensions(_defaultDimensions);
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ }
+ }
+ ```
+
+### Flushing metrics
+
+With **`MetricsAttribute`** all your metrics are validated, serialized and flushed to standard output when lambda handler completes execution or when you had the 100th metric to memory.
+
+During metrics validation, if no metrics are provided then a warning will be logged, but no exception will be raised.
+
+=== "Function.cs"
+
+ ```csharp hl_lines="8"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ }
+ }
+ ```
+=== "Example CloudWatch Logs excerpt"
+
+ ```json hl_lines="2 7 10 15 22"
+ {
+ "BookingConfirmation": 1.0,
+ "_aws": {
+ "Timestamp": 1592234975665,
+ "CloudWatchMetrics": [
+ {
+ "Namespace": "ExampleApplication",
+ "Dimensions": [
+ [
+ "service"
+ ]
+ ],
+ "Metrics": [
+ {
+ "Name": "BookingConfirmation",
+ "Unit": "Count"
+ }
+ ]
+ }
+ ]
+ },
+ "service": "ExampleService"
+ }
+ ```
+
+!!! tip "Metric validation"
+ If metrics are provided, and any of the following criteria are not met, **`SchemaValidationException`** will be raised:
+
+ * Maximum of 9 dimensions
+ * Namespace is set
+ * Metric units must be [supported by CloudWatch](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_MetricDatum.html)
+
+!!! info "We do not emit 0 as a value for ColdStart metric for cost reasons. [Let us know](https://github.com/aws-powertools/powertools-lambda-dotnet/issues/new?assignees=&labels=feature-request%2Ctriage&template=feature_request.yml&title=Feature+request%3A+TITLE) if you'd prefer a flag to override it"
+
+#### Raising SchemaValidationException on empty metrics
+
+If you want to ensure that at least one metric is emitted, you can pass **`RaiseOnEmptyMetrics`** to the Metrics attribute:
+
+=== "Function.cs"
+
+ ```python hl_lines="5"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(RaiseOnEmptyMetrics = true)]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ ```
+
+### Capturing cold start metric
+
+You can optionally capture cold start metrics by setting **`CaptureColdStart`** parameter to `true`.
+
+=== "Function.cs"
+
+ ```csharp hl_lines="5"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(CaptureColdStart = true)]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ ...
+ ```
+
+If it's a cold start invocation, this feature will:
+
+* Create a separate EMF blob solely containing a metric named `ColdStart`
+* Add `function_name` and `service` dimensions
+
+This has the advantage of keeping cold start metric separate from your application metrics, where you might have unrelated dimensions.
+
+## Advanced
+
+### Adding metadata
+
+You can add high-cardinality data as part of your Metrics log with `AddMetadata` method. This is useful when you want to search highly contextual information along with your metrics in your logs.
+
+!!! info
+ **This will not be available during metrics visualization** - Use **dimensions** for this purpose
+
+!!! info
+ Adding metadata with a key that is the same as an existing metric will be ignored
+
+=== "Function.cs"
+
+ ```csharp hl_lines="9"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(Namespace = ExampleApplication, Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ Metrics.AddMetadata("BookingId", "683EEB2D-B2F3-4075-96EE-788E6E2EED45");
+ ...
+ ```
+
+=== "Example CloudWatch Logs excerpt"
+
+ ```json hl_lines="23"
+ {
+ "SuccessfulBooking": 1.0,
+ "_aws": {
+ "Timestamp": 1592234975665,
+ "CloudWatchMetrics": [
+ {
+ "Namespace": "ExampleApplication",
+ "Dimensions": [
+ [
+ "service"
+ ]
+ ],
+ "Metrics": [
+ {
+ "Name": "SuccessfulBooking",
+ "Unit": "Count"
+ }
+ ]
+ }
+ ]
+ },
+ "Service": "Booking",
+ "BookingId": "683EEB2D-B2F3-4075-96EE-788E6E2EED45"
+ }
+ ```
+
+### Single metric with a different dimension
+
+CloudWatch EMF uses the same dimensions across all your metrics. Use **`PushSingleMetric`** if you have a metric that should have different dimensions.
+
+!!! info
+ Generally, this would be an edge case since you [pay for unique metric](https://aws.amazon.com/cloudwatch/pricing). Keep the following formula in mind:
+
+ **unique metric = (metric_name + dimension_name + dimension_value)**
+
+=== "Function.cs"
+
+ ```csharp hl_lines="8-17"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(Namespace = ExampleApplication, Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.PushSingleMetric(
+ metricName: "ColdStart",
+ value: 1,
+ unit: MetricUnit.Count,
+ nameSpace: "ExampleApplication",
+ service: "Booking",
+ defaultDimensions: new Dictionary
+ {
+ {"FunctionContext", "$LATEST"}
+ });
+ ...
+ ```
+
+## Testing your code
+
+### Environment variables
+
+???+ tip
+ Ignore this section, if:
+
+ * You are explicitly setting namespace/default dimension via `namespace` and `service` parameters
+ * You're not instantiating `Metrics` in the global namespace
+
+ For example, `Metrics(namespace="ExampleApplication", service="booking")`
+
+Make sure to set `POWERTOOLS_METRICS_NAMESPACE` and `POWERTOOLS_SERVICE_NAME` before running your tests to prevent failing on `SchemaValidation` exception. You can set it before you run tests by adding the environment variable.
+
+```csharp title="Injecting Metric Namespace before running tests"
+Environment.SetEnvironmentVariable("POWERTOOLS_METRICS_NAMESPACE","AWSLambdaPowertools");
+```
diff --git a/docs/core/metrics-v2.md b/docs/core/metrics-v2.md
deleted file mode 100644
index c3d216c3e..000000000
--- a/docs/core/metrics-v2.md
+++ /dev/null
@@ -1,1000 +0,0 @@
----
-title: Metrics V2
-description: Core utility
----
-
-Metrics creates custom metrics asynchronously by logging metrics to standard output following [Amazon CloudWatch Embedded Metric Format (EMF)](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format.html).
-
-These metrics can be visualized through [Amazon CloudWatch Console](https://aws.amazon.com/cloudwatch/).
-
-## Key features
-
-* Aggregate up to 100 metrics using a single [CloudWatch EMF](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format_Specification.html){target="_blank"} object (large JSON blob)
-* Validating your metrics against common metric definitions mistakes (for example, metric unit, values, max dimensions, max metrics)
-* Metrics are created asynchronously by the CloudWatch service. You do not need any custom stacks, and there is no impact to Lambda function latency
-* Context manager to create a one off metric with a different dimension
-* Ahead-of-Time compilation to native code support [AOT](https://docs.aws.amazon.com/lambda/latest/dg/dotnet-native-aot.html) from version 1.7.0
-* Support for AspNetCore middleware and filters to capture metrics for HTTP requests
-
-## Breaking changes from V1
-
-* **`Dimensions`** outputs as an array of arrays instead of an array of objects. Example: `Dimensions: [["service", "Environment"]]` instead of `Dimensions: ["service", "Environment"]`
-* **`FunctionName`** is not added as default dimension and only to cold start metric.
-* **`Default Dimensions`** can now be included in Cold Start metrics, this is a potential breaking change if you were relying on the absence of default dimensions in Cold Start metrics when searching.
-
-
-
-
-
- Metrics showcase - Metrics Explorer
-
-
-## Installation
-
-Powertools for AWS Lambda (.NET) are available as NuGet packages. You can install the packages from [NuGet Gallery](https://www.nuget.org/packages?q=AWS+Lambda+Powertools*){target="_blank"} or from Visual Studio editor by searching `AWS.Lambda.Powertools*` to see various utilities available.
-
-* [AWS.Lambda.Powertools.Metrics](https://www.nuget.org/packages?q=AWS.Lambda.Powertools.Metrics):
-
- `dotnet add package AWS.Lambda.Powertools.Metrics`
-
-## Terminologies
-
-If you're new to Amazon CloudWatch, there are two terminologies you must be aware of before using this utility:
-
-* **Namespace**. It's the highest level container that will group multiple metrics from multiple services for a given application, for example `ServerlessEcommerce`.
-* **Dimensions**. Metrics metadata in key-value format. They help you slice and dice metrics visualization, for example `ColdStart` metric by Payment `service`.
-* **Metric**. It's the name of the metric, for example: SuccessfulBooking or UpdatedBooking.
-* **Unit**. It's a value representing the unit of measure for the corresponding metric, for example: Count or Seconds.
-* **Resolution**. It's a value representing the storage resolution for the corresponding metric. Metrics can be either Standard or High resolution. Read more [here](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#Resolution_definition).
-
-Visit the AWS documentation for a complete explanation for [Amazon CloudWatch concepts](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html).
-
-
-
- Metric terminology, visually explained
-
-
-## Getting started
-
-**`Metrics`** is implemented as a Singleton to keep track of your aggregate metrics in memory and make them accessible anywhere in your code. To guarantee that metrics are flushed properly the **`MetricsAttribute`** must be added on the lambda handler.
-
-Metrics has three global settings that will be used across all metrics emitted. Use your application or main service as the metric namespace to easily group all metrics:
-
- Setting | Description | Environment variable | Decorator parameter
--------------------------------|---------------------------------------------------------------------------------| ------------------------------------------------- |-----------------------
- **Metric namespace** | Logical container where all metrics will be placed e.g. `MyCompanyEcommerce` | `POWERTOOLS_METRICS_NAMESPACE` | `Namespace`
- **Service** | Optionally, sets **Service** metric dimension across all metrics e.g. `payment` | `POWERTOOLS_SERVICE_NAME` | `Service`
-**Disable Powertools Metrics** | Optionally, disables all Powertools metrics |`POWERTOOLS_METRICS_DISABLED` | N/A |
-
-???+ info
- `POWERTOOLS_METRICS_DISABLED` will not disable default metrics created by AWS services.
-
-!!! info "Autocomplete Metric Units"
- All parameters in **`Metrics Attribute`** are optional. Following rules apply:
-
- - **Namespace:** **`Empty`** string by default. You can either specify it in code or environment variable. If not present before flushing metrics, a **`SchemaValidationException`** will be thrown.
- - **Service:** **`service_undefined`** by default. You can either specify it in code or environment variable.
- - **CaptureColdStart:** **`false`** by default.
- - **RaiseOnEmptyMetrics:** **`false`** by default.
-
-### Metrics object
-
-#### Attribute
-
-The **`MetricsAttribute`** is a class-level attribute that can be used to set the namespace and service for all metrics emitted by the lambda handler.
-
-```csharp hl_lines="3"
-using AWS.Lambda.Powertools.Metrics;
-
-[Metrics(Namespace = "ExampleApplication", Service = "Booking")]
-public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
-{
- ...
-}
-```
-
-#### Methods
-
-The **`Metrics`** class provides methods to add metrics, dimensions, and metadata to the metrics object.
-
-```csharp hl_lines="5-7"
-using AWS.Lambda.Powertools.Metrics;
-
-public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
-{
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- Metrics.AddDimension("Environment", "Prod");
- Metrics.AddMetadata("BookingId", "683EEB2D-B2F3-4075-96EE-788E6E2EED45");
- ...
-}
-```
-
-#### Initialization
-
-The **`Metrics`** object is initialized as a Singleton and can be accessed anywhere in your code.
-
-But can also be initialize with `Configure` or `Builder` patterns in your Lambda constructor, this the best option for testing.
-
-Configure:
-
-```csharp
-using AWS.Lambda.Powertools.Metrics;
-
-public Function()
-{
- Metrics.Configure(options =>
- {
- options.Namespace = "dotnet-powertools-test";
- options.Service = "testService";
- options.CaptureColdStart = true;
- options.DefaultDimensions = new Dictionary
- {
- { "Environment", "Prod" },
- { "Another", "One" }
- };
- });
-}
-
-[Metrics]
-public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
-{
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- ...
-}
-```
-
-Builder:
-
-```csharp
-using AWS.Lambda.Powertools.Metrics;
-
-private readonly IMetrics _metrics;
-
-public Function()
-{
- _metrics = new MetricsBuilder()
- .WithCaptureColdStart(true)
- .WithService("testService")
- .WithNamespace("dotnet-powertools-test")
- .WithDefaultDimensions(new Dictionary
- {
- { "Environment", "Prod1" },
- { "Another", "One" }
- }).Build();
-}
-
-[Metrics]
-public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
-{
- _metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- ...
-}
-```
-
-
-### Creating metrics
-
-You can create metrics using **`AddMetric`**, and you can create dimensions for all your aggregate metrics using **`AddDimension`** method.
-
-=== "Metrics"
-
- ```csharp hl_lines="5 8"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- }
- }
- ```
-=== "Metrics with custom dimensions"
-
- ```csharp hl_lines="8-9"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.AddDimension("Environment","Prod");
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- }
- }
- ```
-
-!!! tip "Autocomplete Metric Units"
- `MetricUnit` enum facilitates finding a supported metric unit by CloudWatch.
-
-!!! note "Metrics overflow"
- CloudWatch EMF supports a max of 100 metrics per batch. Metrics utility will flush all metrics when adding the 100th metric. Subsequent metrics, e.g. 101th, will be aggregated into a new EMF object, for your convenience.
-
-!!! warning "Metric value must be a positive number"
- Metric values must be a positive number otherwise an `ArgumentException` will be thrown.
-
-!!! warning "Do not create metrics or dimensions outside the handler"
- Metrics or dimensions added in the global scope will only be added during cold start. Disregard if that's the intended behavior.
-
-### Adding high-resolution metrics
-
-You can create [high-resolution metrics](https://aws.amazon.com/about-aws/whats-new/2023/02/amazon-cloudwatch-high-resolution-metric-extraction-structured-logs/) passing `MetricResolution` as parameter to `AddMetric`.
-
-!!! tip "When is it useful?"
- High-resolution metrics are data with a granularity of one second and are very useful in several situations such as telemetry, time series, real-time incident management, and others.
-
-=== "Metrics with high resolution"
-
- ```csharp hl_lines="9 12 15"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- // Publish a metric with standard resolution i.e. StorageResolution = 60
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count, MetricResolution.Standard);
-
- // Publish a metric with high resolution i.e. StorageResolution = 1
- Metrics.AddMetric("FailedBooking", 1, MetricUnit.Count, MetricResolution.High);
-
- // The last parameter (storage resolution) is optional
- Metrics.AddMetric("SuccessfulUpgrade", 1, MetricUnit.Count);
- }
- }
- ```
-
-!!! tip "Autocomplete Metric Resolutions"
- Use the `MetricResolution` enum to easily find a supported metric resolution by CloudWatch.
-
-### Adding default dimensions
-
-You can use **`SetDefaultDimensions`** method to persist dimensions across Lambda invocations.
-
-=== "SetDefaultDimensions method"
-
- ```csharp hl_lines="4 5 6 7 12"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
- private Dictionary _defaultDimensions = new Dictionary{
- {"Environment", "Prod"},
- {"Another", "One"}
- };
-
- [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.SetDefaultDimensions(_defaultDimensions);
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- }
- }
- ```
-
-### Adding default dimensions with cold start metric
-
-You can use the Builder or Configure patterns in your Lambda class constructor to set default dimensions.
-
-=== "Builder pattern"
-
- ```csharp hl_lines="12-16"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
- private readonly IMetrics _metrics;
-
- public Function()
- {
- _metrics = new MetricsBuilder()
- .WithCaptureColdStart(true)
- .WithService("testService")
- .WithNamespace("dotnet-powertools-test")
- .WithDefaultDimensions(new Dictionary
- {
- { "Environment", "Prod1" },
- { "Another", "One" }
- }).Build();
- }
-
- [Metrics]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- _metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- ...
- }
- ```
-=== "Configure pattern"
-
- ```csharp hl_lines="12-16"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- public Function()
- {
- Metrics.Configure(options =>
- {
- options.Namespace = "dotnet-powertools-test";
- options.Service = "testService";
- options.CaptureColdStart = true;
- options.DefaultDimensions = new Dictionary
- {
- { "Environment", "Prod" },
- { "Another", "One" }
- };
- });
- }
-
- [Metrics]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- ...
- }
- ```
-### Adding dimensions
-
-You can add dimensions to your metrics using **`AddDimension`** method.
-
-=== "Function.cs"
-
- ```csharp hl_lines="8"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.AddDimension("Environment","Prod");
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- }
- }
- ```
-=== "Example CloudWatch Logs excerpt"
-
- ```json hl_lines="11 24"
- {
- "SuccessfulBooking": 1.0,
- "_aws": {
- "Timestamp": 1592234975665,
- "CloudWatchMetrics": [
- {
- "Namespace": "ExampleApplication",
- "Dimensions": [
- [
- "service",
- "Environment"
- ]
- ],
- "Metrics": [
- {
- "Name": "SuccessfulBooking",
- "Unit": "Count"
- }
- ]
- }
- ]
- },
- "service": "ExampleService",
- "Environment": "Prod"
- }
- ```
-
-### Flushing metrics
-
-With **`MetricsAttribute`** all your metrics are validated, serialized and flushed to standard output when lambda handler completes execution or when you had the 100th metric to memory.
-
-You can also flush metrics manually by calling **`Flush`** method.
-
-During metrics validation, if no metrics are provided then a warning will be logged, but no exception will be raised.
-
-=== "Function.cs"
-
- ```csharp hl_lines="9"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- Metrics.Flush();
- }
- }
- ```
-=== "Example CloudWatch Logs excerpt"
-
- ```json hl_lines="2 7 10 15 22"
- {
- "BookingConfirmation": 1.0,
- "_aws": {
- "Timestamp": 1592234975665,
- "CloudWatchMetrics": [
- {
- "Namespace": "ExampleApplication",
- "Dimensions": [
- [
- "service"
- ]
- ],
- "Metrics": [
- {
- "Name": "BookingConfirmation",
- "Unit": "Count"
- }
- ]
- }
- ]
- },
- "service": "ExampleService"
- }
- ```
-
-!!! tip "Metric validation"
- If metrics are provided, and any of the following criteria are not met, **`SchemaValidationException`** will be raised:
-
- * Maximum of 30 dimensions
- * Namespace is set
- * Metric units must be [supported by CloudWatch](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_MetricDatum.html)
-
-!!! info "We do not emit 0 as a value for ColdStart metric for cost reasons. [Let us know](https://github.com/aws-powertools/powertools-lambda-dotnet/issues/new?assignees=&labels=feature-request%2Ctriage&template=feature_request.yml&title=Feature+request%3A+TITLE) if you'd prefer a flag to override it"
-
-### Raising SchemaValidationException on empty metrics
-
-If you want to ensure that at least one metric is emitted, you can pass **`RaiseOnEmptyMetrics`** to the Metrics attribute:
-
-=== "Function.cs"
-
- ```python hl_lines="5"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(RaiseOnEmptyMetrics = true)]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- ...
- ```
-
-### Capturing cold start metric
-
-You can optionally capture cold start metrics by setting **`CaptureColdStart`** parameter to `true`.
-
-=== "Function.cs"
-
- ```csharp hl_lines="5"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(CaptureColdStart = true)]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- ...
- ```
-=== "Builder pattern"
-
- ```csharp hl_lines="9"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
- private readonly IMetrics _metrics;
-
- public Function()
- {
- _metrics = new MetricsBuilder()
- .WithCaptureColdStart(true)
- .WithService("testService")
- .WithNamespace("dotnet-powertools-test")
- }
-
- [Metrics]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- _metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- ...
- }
- ```
-=== "Configure pattern"
-
- ```csharp hl_lines="11"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- public Function()
- {
- Metrics.Configure(options =>
- {
- options.Namespace = "dotnet-powertools-test";
- options.Service = "testService";
- options.CaptureColdStart = true;
- });
- }
-
- [Metrics]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- ...
- }
- ```
-
-If it's a cold start invocation, this feature will:
-
-* Create a separate EMF blob solely containing a metric named `ColdStart`
-* Add `FunctionName` and `Service` dimensions
-
-This has the advantage of keeping cold start metric separate from your application metrics, where you might have unrelated dimensions.
-
-## Advanced
-
-### Adding metadata
-
-You can add high-cardinality data as part of your Metrics log with `AddMetadata` method. This is useful when you want to search highly contextual information along with your metrics in your logs.
-
-!!! info
- **This will not be available during metrics visualization** - Use **dimensions** for this purpose
-
-!!! info
- Adding metadata with a key that is the same as an existing metric will be ignored
-
-=== "Function.cs"
-
- ```csharp hl_lines="9"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(Namespace = ExampleApplication, Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- Metrics.AddMetadata("BookingId", "683EEB2D-B2F3-4075-96EE-788E6E2EED45");
- ...
- ```
-
-=== "Example CloudWatch Logs excerpt"
-
- ```json hl_lines="23"
- {
- "SuccessfulBooking": 1.0,
- "_aws": {
- "Timestamp": 1592234975665,
- "CloudWatchMetrics": [
- {
- "Namespace": "ExampleApplication",
- "Dimensions": [
- [
- "service"
- ]
- ],
- "Metrics": [
- {
- "Name": "SuccessfulBooking",
- "Unit": "Count"
- }
- ]
- }
- ]
- },
- "Service": "Booking",
- "BookingId": "683EEB2D-B2F3-4075-96EE-788E6E2EED45"
- }
- ```
-
-### Single metric with a different dimension
-
-CloudWatch EMF uses the same dimensions across all your metrics. Use **`PushSingleMetric`** if you have a metric that should have different dimensions.
-
-!!! info
- Generally, this would be an edge case since you [pay for unique metric](https://aws.amazon.com/cloudwatch/pricing). Keep the following formula in mind:
-
- **unique metric = (metric_name + dimension_name + dimension_value)**
-
-=== "Function.cs"
-
- ```csharp hl_lines="8-13"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(Namespace = ExampleApplication, Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.PushSingleMetric(
- name: "ColdStart",
- value: 1,
- unit: MetricUnit.Count,
- nameSpace: "ExampleApplication",
- service: "Booking");
- ...
- ```
-
-By default it will skip all previously defined dimensions including default dimensions. Use `dimensions` argument if you want to reuse default dimensions or specify custom dimensions from a dictionary.
-
-- `Metrics.DefaultDimensions`: Reuse default dimensions when using static Metrics
-- `Options.DefaultDimensions`: Reuse default dimensions when using Builder or Configure patterns
-
-=== "New Default Dimensions.cs"
-
- ```csharp hl_lines="8-17"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(Namespace = ExampleApplication, Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.PushSingleMetric(
- name: "ColdStart",
- value: 1,
- unit: MetricUnit.Count,
- nameSpace: "ExampleApplication",
- service: "Booking",
- dimensions: new Dictionary
- {
- {"FunctionContext", "$LATEST"}
- });
- ...
- ```
-=== "Default Dimensions static.cs"
-
- ```csharp hl_lines="8-12"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(Namespace = ExampleApplication, Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.SetDefaultDimensions(new Dictionary
- {
- { "Default", "SingleMetric" }
- });
- Metrics.PushSingleMetric("SingleMetric", 1, MetricUnit.Count, dimensions: Metrics.DefaultDimensions );
- ...
- ```
-=== "Default Dimensions Options / Builder patterns"
-
- ```csharp hl_lines="9-13 18"
- using AWS.Lambda.Powertools.Metrics;
-
- public MetricsnBuilderHandler(IMetrics metrics = null)
- {
- _metrics = metrics ?? new MetricsBuilder()
- .WithCaptureColdStart(true)
- .WithService("testService")
- .WithNamespace("dotnet-powertools-test")
- .WithDefaultDimensions(new Dictionary
- {
- { "Environment", "Prod1" },
- { "Another", "One" }
- }).Build();
- }
-
- public void HandlerSingleMetricDimensions()
- {
- _metrics.PushSingleMetric("SuccessfulBooking", 1, MetricUnit.Count, dimensions: _metrics.Options.DefaultDimensions);
- }
- ...
- ```
-
-### Cold start Function Name dimension
-
-In cases where you want to customize the `FunctionName` dimension in Cold Start metrics.
-
-This is useful where you want to maintain the same name in case of auto generated handler names (cdk, top-level statement functions, etc.)
-
-Example:
-
-=== "In decorator"
-
- ```csharp hl_lines="5"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- [Metrics(FunctionName = "MyFunctionName", Namespace = "ExampleApplication", Service = "Booking")]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- ...
- }
- ```
-=== "Configure / Builder patterns"
-
- ```csharp hl_lines="12"
- using AWS.Lambda.Powertools.Metrics;
-
- public class Function {
-
- public Function()
- {
- Metrics.Configure(options =>
- {
- options.Namespace = "dotnet-powertools-test";
- options.Service = "testService";
- options.CaptureColdStart = true;
- options.FunctionName = "MyFunctionName";
- });
- }
-
- [Metrics]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- ...
- }
- ```
-
-## AspNetCore
-
-### Installation
-
-To use the Metrics middleware in an ASP.NET Core application, you need to install the `AWS.Lambda.Powertools.Metrics.AspNetCore` NuGet package.
-
-```bash
-dotnet add package AWS.Lambda.Powertools.Metrics.AspNetCore
-```
-
-### UseMetrics() Middleware
-
-The `UseMetrics` middleware is an extension method for the `IApplicationBuilder` interface.
-
-It adds a metrics middleware to the specified application builder, which captures cold start metrics (if enabled) and flushes metrics on function exit.
-
-#### Example
-
-```csharp hl_lines="21"
-
-using AWS.Lambda.Powertools.Metrics.AspNetCore.Http;
-
-var builder = WebApplication.CreateBuilder(args);
-
-// Configure metrics
-builder.Services.AddSingleton(_ => new MetricsBuilder()
- .WithNamespace("MyApi") // Namespace for the metrics
- .WithService("WeatherService") // Service name for the metrics
- .WithCaptureColdStart(true) // Capture cold start metrics
- .WithDefaultDimensions(new Dictionary // Default dimensions for the metrics
- {
- {"Environment", "Prod"},
- {"Another", "One"}
- })
- .Build()); // Build the metrics
-
-builder.Services.AddAWSLambdaHosting(LambdaEventSource.RestApi);
-
-var app = builder.Build();
-
-app.UseMetrics(); // Add the metrics middleware
-
-app.MapGet("/powertools", (IMetrics metrics) =>
- {
- // add custom metrics
- metrics.AddMetric("MyCustomMetric", 1, MetricUnit.Count);
- // flush metrics - this is required to ensure metrics are sent to CloudWatch
- metrics.Flush();
- });
-
-app.Run();
-
-```
-
-Here is the highlighted `UseMetrics` method:
-
-```csharp
-///
-/// Adds a metrics middleware to the specified application builder.
-/// This will capture cold start (if CaptureColdStart is enabled) metrics and flush metrics on function exit.
-///
-/// The application builder to add the metrics middleware to.
-/// The application builder with the metrics middleware added.
-public static IApplicationBuilder UseMetrics(this IApplicationBuilder app)
-{
- app.UseMiddleware();
- return app;
-}
-```
-
-Explanation:
-
-- The method is defined as an extension method for the `IApplicationBuilder` interface.
-- It adds a `MetricsMiddleware` to the application builder using the `UseMiddleware` method.
-- The `MetricsMiddleware` captures and records metrics for HTTP requests, including cold start metrics if the `CaptureColdStart` option is enabled.
-
-### WithMetrics() filter
-
-The `WithMetrics` method is an extension method for the `RouteHandlerBuilder` class.
-
-It adds a metrics filter to the specified route handler builder, which captures cold start metrics (if enabled) and flushes metrics on function exit.
-
-#### Example
-
-```csharp hl_lines="31"
-
-using AWS.Lambda.Powertools.Metrics;
-using AWS.Lambda.Powertools.Metrics.AspNetCore.Http;
-
-var builder = WebApplication.CreateBuilder(args);
-
-// Configure metrics
-builder.Services.AddSingleton(_ => new MetricsBuilder()
- .WithNamespace("MyApi") // Namespace for the metrics
- .WithService("WeatherService") // Service name for the metrics
- .WithCaptureColdStart(true) // Capture cold start metrics
- .WithDefaultDimensions(new Dictionary // Default dimensions for the metrics
- {
- {"Environment", "Prod"},
- {"Another", "One"}
- })
- .Build()); // Build the metrics
-
-// Add AWS Lambda support. When the application is run in Lambda, Kestrel is swapped out as the web server with Amazon.Lambda.AspNetCoreServer. This
-// package will act as the web server translating requests and responses between the Lambda event source and ASP.NET Core.
-builder.Services.AddAWSLambdaHosting(LambdaEventSource.RestApi);
-
-var app = builder.Build();
-
-app.MapGet("/powertools", (IMetrics metrics) =>
- {
- // add custom metrics
- metrics.AddMetric("MyCustomMetric", 1, MetricUnit.Count);
- // flush metrics - this is required to ensure metrics are sent to CloudWatch
- metrics.Flush();
- })
- .WithMetrics();
-
-app.Run();
-
-```
-
-Here is the highlighted `WithMetrics` method:
-
-```csharp
-///
-/// Adds a metrics filter to the specified route handler builder.
-/// This will capture cold start (if CaptureColdStart is enabled) metrics and flush metrics on function exit.
-///
-/// The route handler builder to add the metrics filter to.
-/// The route handler builder with the metrics filter added.
-public static RouteHandlerBuilder WithMetrics(this RouteHandlerBuilder builder)
-{
- builder.AddEndpointFilter();
- return builder;
-}
-```
-
-Explanation:
-
-- The method is defined as an extension method for the `RouteHandlerBuilder` class.
-- It adds a `MetricsFilter` to the route handler builder using the `AddEndpointFilter` method.
-- The `MetricsFilter` captures and records metrics for HTTP endpoints, including cold start metrics if the `CaptureColdStart` option is enabled.
-- The method returns the modified `RouteHandlerBuilder` instance with the metrics filter added.
-
-
-## Testing your code
-
-### Unit testing
-
-To test your code that uses the Metrics utility, you can use the `TestLambdaContext` class from the `Amazon.Lambda.TestUtilities` package.
-
-You can also use the `IMetrics` interface to mock the Metrics utility in your tests.
-
-Here is an example of how you can test a Lambda function that uses the Metrics utility:
-
-#### Lambda Function
-
-```csharp
-using System.Collections.Generic;
-using Amazon.Lambda.Core;
-
-public class MetricsnBuilderHandler
-{
- private readonly IMetrics _metrics;
-
- // Allow injection of IMetrics for testing
- public MetricsnBuilderHandler(IMetrics metrics = null)
- {
- _metrics = metrics ?? new MetricsBuilder()
- .WithCaptureColdStart(true)
- .WithService("testService")
- .WithNamespace("dotnet-powertools-test")
- .WithDefaultDimensions(new Dictionary
- {
- { "Environment", "Prod1" },
- { "Another", "One" }
- }).Build();
- }
-
- [Metrics]
- public void Handler(ILambdaContext context)
- {
- _metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- }
-}
-
-```
-#### Unit Tests
-
-
-```csharp
-[Fact]
- public void Handler_With_Builder_Should_Configure_In_Constructor()
- {
- // Arrange
- var handler = new MetricsnBuilderHandler();
-
- // Act
- handler.Handler(new TestLambdaContext
- {
- FunctionName = "My_Function_Name"
- });
-
- // Get the output and parse it
- var metricsOutput = _consoleOut.ToString();
-
- // Assert cold start
- Assert.Contains(
- "\"CloudWatchMetrics\":[{\"Namespace\":\"dotnet-powertools-test\",\"Metrics\":[{\"Name\":\"ColdStart\",\"Unit\":\"Count\"}],\"Dimensions\":[[\"Service\",\"Environment\",\"Another\",\"FunctionName\"]]}]},\"Service\":\"testService\",\"Environment\":\"Prod1\",\"Another\":\"One\",\"FunctionName\":\"My_Function_Name\",\"ColdStart\":1}",
- metricsOutput);
- // Assert successful Memory metrics
- Assert.Contains(
- "\"CloudWatchMetrics\":[{\"Namespace\":\"dotnet-powertools-test\",\"Metrics\":[{\"Name\":\"SuccessfulBooking\",\"Unit\":\"Count\"}],\"Dimensions\":[[\"Service\",\"Environment\",\"Another\",\"FunctionName\"]]}]},\"Service\":\"testService\",\"Environment\":\"Prod1\",\"Another\":\"One\",\"FunctionName\":\"My_Function_Name\",\"SuccessfulBooking\":1}",
- metricsOutput);
- }
-
- [Fact]
- public void Handler_With_Builder_Should_Configure_In_Constructor_Mock()
- {
- var metricsMock = Substitute.For();
-
- metricsMock.Options.Returns(new MetricsOptions
- {
- CaptureColdStart = true,
- Namespace = "dotnet-powertools-test",
- Service = "testService",
- DefaultDimensions = new Dictionary
- {
- { "Environment", "Prod" },
- { "Another", "One" }
- }
- });
-
- Metrics.UseMetricsForTests(metricsMock);
-
- var sut = new MetricsnBuilderHandler(metricsMock);
-
- // Act
- sut.Handler(new TestLambdaContext
- {
- FunctionName = "My_Function_Name"
- });
-
- metricsMock.Received(1).PushSingleMetric("ColdStart", 1, MetricUnit.Count, "dotnet-powertools-test",
- service: "testService", Arg.Any>());
- metricsMock.Received(1).AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
- }
-```
-
-### Environment variables
-
-???+ tip
- Ignore this section, if:
-
- * You are explicitly setting namespace/default dimension via `namespace` and `service` parameters
- * You're not instantiating `Metrics` in the global namespace
-
- For example, `Metrics(namespace="ExampleApplication", service="booking")`
-
-Make sure to set `POWERTOOLS_METRICS_NAMESPACE` and `POWERTOOLS_SERVICE_NAME` before running your tests to prevent failing on `SchemaValidation` exception. You can set it before you run tests by adding the environment variable.
-
-```csharp title="Injecting Metric Namespace before running tests"
-Environment.SetEnvironmentVariable("POWERTOOLS_METRICS_NAMESPACE","AWSLambdaPowertools");
-```
diff --git a/docs/core/metrics.md b/docs/core/metrics.md
index 03f7d6fa8..e941600c4 100644
--- a/docs/core/metrics.md
+++ b/docs/core/metrics.md
@@ -14,6 +14,17 @@ These metrics can be visualized through [Amazon CloudWatch Console](https://aws.
* Metrics are created asynchronously by the CloudWatch service. You do not need any custom stacks, and there is no impact to Lambda function latency
* Context manager to create a one off metric with a different dimension
* Ahead-of-Time compilation to native code support [AOT](https://docs.aws.amazon.com/lambda/latest/dg/dotnet-native-aot.html) from version 1.7.0
+* Support for AspNetCore middleware and filters to capture metrics for HTTP requests
+
+## Breaking changes from V1
+
+!!! info
+
+ Loooking for v1 specific documentation please go to [Metrics v1](/lambda/dotnet/core/metrics-v1)
+
+* **`Dimensions`** outputs as an array of arrays instead of an array of objects. Example: `Dimensions: [["service", "Environment"]]` instead of `Dimensions: ["service", "Environment"]`
+* **`FunctionName`** is not added as default dimension and only to cold start metric.
+* **`Default Dimensions`** can now be included in Cold Start metrics, this is a potential breaking change if you were relying on the absence of default dimensions in Cold Start metrics when searching.
@@ -28,7 +39,7 @@ Powertools for AWS Lambda (.NET) are available as NuGet packages. You can instal
* [AWS.Lambda.Powertools.Metrics](https://www.nuget.org/packages?q=AWS.Lambda.Powertools.Metrics):
- `dotnet add package AWS.Lambda.Powertools.Metrics -v 1.7.1`
+ `dotnet add package AWS.Lambda.Powertools.Metrics`
## Terminologies
@@ -51,12 +62,16 @@ Visit the AWS documentation for a complete explanation for [Amazon CloudWatch co
**`Metrics`** is implemented as a Singleton to keep track of your aggregate metrics in memory and make them accessible anywhere in your code. To guarantee that metrics are flushed properly the **`MetricsAttribute`** must be added on the lambda handler.
-Metrics has two global settings that will be used across all metrics emitted. Use your application or main service as the metric namespace to easily group all metrics:
+Metrics has three global settings that will be used across all metrics emitted. Use your application or main service as the metric namespace to easily group all metrics:
-Setting | Description | Environment variable | Constructor parameter
-------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- | -------------------------------------------------
-**Service** | Optionally, sets **service** metric dimension across all metrics e.g. `payment` | `POWERTOOLS_SERVICE_NAME` | `Service`
-**Metric namespace** | Logical container where all metrics will be placed e.g. `MyCompanyEcommerce` | `POWERTOOLS_METRICS_NAMESPACE` | `Namespace`
+ Setting | Description | Environment variable | Decorator parameter
+-------------------------------|---------------------------------------------------------------------------------| ------------------------------------------------- |-----------------------
+ **Metric namespace** | Logical container where all metrics will be placed e.g. `MyCompanyEcommerce` | `POWERTOOLS_METRICS_NAMESPACE` | `Namespace`
+ **Service** | Optionally, sets **Service** metric dimension across all metrics e.g. `payment` | `POWERTOOLS_SERVICE_NAME` | `Service`
+**Disable Powertools Metrics** | Optionally, disables all Powertools metrics |`POWERTOOLS_METRICS_DISABLED` | N/A |
+
+???+ info
+ `POWERTOOLS_METRICS_DISABLED` will not disable default metrics created by AWS services.
!!! info "Autocomplete Metric Units"
All parameters in **`Metrics Attribute`** are optional. Following rules apply:
@@ -66,42 +81,100 @@ Setting | Description | Environment variable | Constructor parameter
- **CaptureColdStart:** **`false`** by default.
- **RaiseOnEmptyMetrics:** **`false`** by default.
-### Example using AWS Serverless Application Model (AWS SAM)
+### Metrics object
-=== "template.yml"
+#### Attribute
- ```yaml hl_lines="9 10"
- Resources:
- HelloWorldFunction:
- Type: AWS::Serverless::Function
- Properties:
- ...
- Environment:
- Variables:
- POWERTOOLS_SERVICE_NAME: ShoppingCartService
- POWERTOOLS_METRICS_NAMESPACE: MyCompanyEcommerce
- ```
+The **`MetricsAttribute`** is a class-level attribute that can be used to set the namespace and service for all metrics emitted by the lambda handler.
-=== "Function.cs"
+```csharp hl_lines="3"
+using AWS.Lambda.Powertools.Metrics;
+
+[Metrics(Namespace = "ExampleApplication", Service = "Booking")]
+public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+{
+ ...
+}
+```
- ```csharp hl_lines="4"
- using AWS.Lambda.Powertools.Metrics;
+#### Methods
- public class Function {
- [Metrics(Namespace = "MyCompanyEcommerce", Service = "ShoppingCartService", CaptureColdStart = true, RaiseOnEmptyMetrics = true)]
- public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
- {
- ...
- }
- }
- ```
+The **`Metrics`** class provides methods to add metrics, dimensions, and metadata to the metrics object.
+
+```csharp hl_lines="5-7"
+using AWS.Lambda.Powertools.Metrics;
+
+public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+{
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ Metrics.AddDimension("Environment", "Prod");
+ Metrics.AddMetadata("BookingId", "683EEB2D-B2F3-4075-96EE-788E6E2EED45");
+ ...
+}
+```
+
+#### Initialization
-### Full list of environment variables
+The **`Metrics`** object is initialized as a Singleton and can be accessed anywhere in your code.
+
+But can also be initialize with `Configure` or `Builder` patterns in your Lambda constructor, this the best option for testing.
+
+Configure:
+
+```csharp
+using AWS.Lambda.Powertools.Metrics;
+
+public Function()
+{
+ Metrics.Configure(options =>
+ {
+ options.Namespace = "dotnet-powertools-test";
+ options.Service = "testService";
+ options.CaptureColdStart = true;
+ options.DefaultDimensions = new Dictionary
+ {
+ { "Environment", "Prod" },
+ { "Another", "One" }
+ };
+ });
+}
+
+[Metrics]
+public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+{
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ ...
+}
+```
+
+Builder:
+
+```csharp
+using AWS.Lambda.Powertools.Metrics;
+
+private readonly IMetrics _metrics;
+
+public Function()
+{
+ _metrics = new MetricsBuilder()
+ .WithCaptureColdStart(true)
+ .WithService("testService")
+ .WithNamespace("dotnet-powertools-test")
+ .WithDefaultDimensions(new Dictionary
+ {
+ { "Environment", "Prod1" },
+ { "Another", "One" }
+ }).Build();
+}
+
+[Metrics]
+public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+{
+ _metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ ...
+}
+```
-| Environment variable | Description | Default |
-| ------------------------------------------------- | --------------------------------------------------------------------------------- | ------------------------------------------------- |
-| **POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | `"service_undefined"` |
-| **POWERTOOLS_METRICS_NAMESPACE** | Sets namespace used for metrics | `None` |
### Creating metrics
@@ -205,15 +278,127 @@ You can use **`SetDefaultDimensions`** method to persist dimensions across Lambd
}
```
+### Adding default dimensions with cold start metric
+
+You can use the Builder or Configure patterns in your Lambda class constructor to set default dimensions.
+
+=== "Builder pattern"
+
+ ```csharp hl_lines="12-16"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+ private readonly IMetrics _metrics;
+
+ public Function()
+ {
+ _metrics = new MetricsBuilder()
+ .WithCaptureColdStart(true)
+ .WithService("testService")
+ .WithNamespace("dotnet-powertools-test")
+ .WithDefaultDimensions(new Dictionary
+ {
+ { "Environment", "Prod1" },
+ { "Another", "One" }
+ }).Build();
+ }
+
+ [Metrics]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ _metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ ...
+ }
+ ```
+=== "Configure pattern"
+
+ ```csharp hl_lines="12-16"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ public Function()
+ {
+ Metrics.Configure(options =>
+ {
+ options.Namespace = "dotnet-powertools-test";
+ options.Service = "testService";
+ options.CaptureColdStart = true;
+ options.DefaultDimensions = new Dictionary
+ {
+ { "Environment", "Prod" },
+ { "Another", "One" }
+ };
+ });
+ }
+
+ [Metrics]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ ...
+ }
+ ```
+### Adding dimensions
+
+You can add dimensions to your metrics using **`AddDimension`** method.
+
+=== "Function.cs"
+
+ ```csharp hl_lines="8"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(Namespace = "ExampleApplication", Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.AddDimension("Environment","Prod");
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ }
+ }
+ ```
+=== "Example CloudWatch Logs excerpt"
+
+ ```json hl_lines="11 24"
+ {
+ "SuccessfulBooking": 1.0,
+ "_aws": {
+ "Timestamp": 1592234975665,
+ "CloudWatchMetrics": [
+ {
+ "Namespace": "ExampleApplication",
+ "Dimensions": [
+ [
+ "service",
+ "Environment"
+ ]
+ ],
+ "Metrics": [
+ {
+ "Name": "SuccessfulBooking",
+ "Unit": "Count"
+ }
+ ]
+ }
+ ]
+ },
+ "service": "ExampleService",
+ "Environment": "Prod"
+ }
+ ```
+
### Flushing metrics
With **`MetricsAttribute`** all your metrics are validated, serialized and flushed to standard output when lambda handler completes execution or when you had the 100th metric to memory.
+You can also flush metrics manually by calling **`Flush`** method.
+
During metrics validation, if no metrics are provided then a warning will be logged, but no exception will be raised.
=== "Function.cs"
- ```csharp hl_lines="8"
+ ```csharp hl_lines="9"
using AWS.Lambda.Powertools.Metrics;
public class Function {
@@ -222,6 +407,7 @@ During metrics validation, if no metrics are provided then a warning will be log
public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
{
Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ Metrics.Flush();
}
}
```
@@ -256,13 +442,13 @@ During metrics validation, if no metrics are provided then a warning will be log
!!! tip "Metric validation"
If metrics are provided, and any of the following criteria are not met, **`SchemaValidationException`** will be raised:
- * Maximum of 9 dimensions
+ * Maximum of 30 dimensions
* Namespace is set
* Metric units must be [supported by CloudWatch](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_MetricDatum.html)
!!! info "We do not emit 0 as a value for ColdStart metric for cost reasons. [Let us know](https://github.com/aws-powertools/powertools-lambda-dotnet/issues/new?assignees=&labels=feature-request%2Ctriage&template=feature_request.yml&title=Feature+request%3A+TITLE) if you'd prefer a flag to override it"
-#### Raising SchemaValidationException on empty metrics
+### Raising SchemaValidationException on empty metrics
If you want to ensure that at least one metric is emitted, you can pass **`RaiseOnEmptyMetrics`** to the Metrics attribute:
@@ -295,11 +481,58 @@ You can optionally capture cold start metrics by setting **`CaptureColdStart`**
{
...
```
+=== "Builder pattern"
+
+ ```csharp hl_lines="9"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+ private readonly IMetrics _metrics;
+
+ public Function()
+ {
+ _metrics = new MetricsBuilder()
+ .WithCaptureColdStart(true)
+ .WithService("testService")
+ .WithNamespace("dotnet-powertools-test")
+ }
+
+ [Metrics]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ _metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ ...
+ }
+ ```
+=== "Configure pattern"
+
+ ```csharp hl_lines="11"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ public Function()
+ {
+ Metrics.Configure(options =>
+ {
+ options.Namespace = "dotnet-powertools-test";
+ options.Service = "testService";
+ options.CaptureColdStart = true;
+ });
+ }
+
+ [Metrics]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ ...
+ }
+ ```
If it's a cold start invocation, this feature will:
* Create a separate EMF blob solely containing a metric named `ColdStart`
-* Add `function_name` and `service` dimensions
+* Add `FunctionName` and `Service` dimensions
This has the advantage of keeping cold start metric separate from your application metrics, where you might have unrelated dimensions.
@@ -370,6 +603,30 @@ CloudWatch EMF uses the same dimensions across all your metrics. Use **`PushSing
=== "Function.cs"
+ ```csharp hl_lines="8-13"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(Namespace = ExampleApplication, Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.PushSingleMetric(
+ name: "ColdStart",
+ value: 1,
+ unit: MetricUnit.Count,
+ nameSpace: "ExampleApplication",
+ service: "Booking");
+ ...
+ ```
+
+By default it will skip all previously defined dimensions including default dimensions. Use `dimensions` argument if you want to reuse default dimensions or specify custom dimensions from a dictionary.
+
+- `Metrics.DefaultDimensions`: Reuse default dimensions when using static Metrics
+- `Options.DefaultDimensions`: Reuse default dimensions when using Builder or Configure patterns
+
+=== "New Default Dimensions.cs"
+
```csharp hl_lines="8-17"
using AWS.Lambda.Powertools.Metrics;
@@ -379,20 +636,357 @@ CloudWatch EMF uses the same dimensions across all your metrics. Use **`PushSing
public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
{
Metrics.PushSingleMetric(
- metricName: "ColdStart",
+ name: "ColdStart",
value: 1,
unit: MetricUnit.Count,
nameSpace: "ExampleApplication",
service: "Booking",
- defaultDimensions: new Dictionary
+ dimensions: new Dictionary
{
{"FunctionContext", "$LATEST"}
});
...
```
+=== "Default Dimensions static.cs"
+
+ ```csharp hl_lines="8-12"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(Namespace = ExampleApplication, Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.SetDefaultDimensions(new Dictionary
+ {
+ { "Default", "SingleMetric" }
+ });
+ Metrics.PushSingleMetric("SingleMetric", 1, MetricUnit.Count, dimensions: Metrics.DefaultDimensions );
+ ...
+ ```
+=== "Default Dimensions Options / Builder patterns"
+
+ ```csharp hl_lines="9-13 18"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public MetricsnBuilderHandler(IMetrics metrics = null)
+ {
+ _metrics = metrics ?? new MetricsBuilder()
+ .WithCaptureColdStart(true)
+ .WithService("testService")
+ .WithNamespace("dotnet-powertools-test")
+ .WithDefaultDimensions(new Dictionary
+ {
+ { "Environment", "Prod1" },
+ { "Another", "One" }
+ }).Build();
+ }
+
+ public void HandlerSingleMetricDimensions()
+ {
+ _metrics.PushSingleMetric("SuccessfulBooking", 1, MetricUnit.Count, dimensions: _metrics.Options.DefaultDimensions);
+ }
+ ...
+ ```
+
+### Cold start Function Name dimension
+
+In cases where you want to customize the `FunctionName` dimension in Cold Start metrics.
+
+This is useful where you want to maintain the same name in case of auto generated handler names (cdk, top-level statement functions, etc.)
+
+Example:
+
+=== "In decorator"
+
+ ```csharp hl_lines="5"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ [Metrics(FunctionName = "MyFunctionName", Namespace = "ExampleApplication", Service = "Booking")]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ ...
+ }
+ ```
+=== "Configure / Builder patterns"
+
+ ```csharp hl_lines="12"
+ using AWS.Lambda.Powertools.Metrics;
+
+ public class Function {
+
+ public Function()
+ {
+ Metrics.Configure(options =>
+ {
+ options.Namespace = "dotnet-powertools-test";
+ options.Service = "testService";
+ options.CaptureColdStart = true;
+ options.FunctionName = "MyFunctionName";
+ });
+ }
+
+ [Metrics]
+ public async Task FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context)
+ {
+ Metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ ...
+ }
+ ```
+
+## AspNetCore
+
+### Installation
+
+To use the Metrics middleware in an ASP.NET Core application, you need to install the `AWS.Lambda.Powertools.Metrics.AspNetCore` NuGet package.
+
+```bash
+dotnet add package AWS.Lambda.Powertools.Metrics.AspNetCore
+```
+
+### UseMetrics() Middleware
+
+The `UseMetrics` middleware is an extension method for the `IApplicationBuilder` interface.
+
+It adds a metrics middleware to the specified application builder, which captures cold start metrics (if enabled) and flushes metrics on function exit.
+
+#### Example
+
+```csharp hl_lines="21"
+
+using AWS.Lambda.Powertools.Metrics.AspNetCore.Http;
+
+var builder = WebApplication.CreateBuilder(args);
+
+// Configure metrics
+builder.Services.AddSingleton(_ => new MetricsBuilder()
+ .WithNamespace("MyApi") // Namespace for the metrics
+ .WithService("WeatherService") // Service name for the metrics
+ .WithCaptureColdStart(true) // Capture cold start metrics
+ .WithDefaultDimensions(new Dictionary // Default dimensions for the metrics
+ {
+ {"Environment", "Prod"},
+ {"Another", "One"}
+ })
+ .Build()); // Build the metrics
+
+builder.Services.AddAWSLambdaHosting(LambdaEventSource.RestApi);
+
+var app = builder.Build();
+
+app.UseMetrics(); // Add the metrics middleware
+
+app.MapGet("/powertools", (IMetrics metrics) =>
+ {
+ // add custom metrics
+ metrics.AddMetric("MyCustomMetric", 1, MetricUnit.Count);
+ // flush metrics - this is required to ensure metrics are sent to CloudWatch
+ metrics.Flush();
+ });
+
+app.Run();
+
+```
+
+Here is the highlighted `UseMetrics` method:
+
+```csharp
+///
+/// Adds a metrics middleware to the specified application builder.
+/// This will capture cold start (if CaptureColdStart is enabled) metrics and flush metrics on function exit.
+///
+/// The application builder to add the metrics middleware to.
+/// The application builder with the metrics middleware added.
+public static IApplicationBuilder UseMetrics(this IApplicationBuilder app)
+{
+ app.UseMiddleware();
+ return app;
+}
+```
+
+Explanation:
+
+- The method is defined as an extension method for the `IApplicationBuilder` interface.
+- It adds a `MetricsMiddleware` to the application builder using the `UseMiddleware` method.
+- The `MetricsMiddleware` captures and records metrics for HTTP requests, including cold start metrics if the `CaptureColdStart` option is enabled.
+
+### WithMetrics() filter
+
+The `WithMetrics` method is an extension method for the `RouteHandlerBuilder` class.
+
+It adds a metrics filter to the specified route handler builder, which captures cold start metrics (if enabled) and flushes metrics on function exit.
+
+#### Example
+
+```csharp hl_lines="31"
+
+using AWS.Lambda.Powertools.Metrics;
+using AWS.Lambda.Powertools.Metrics.AspNetCore.Http;
+
+var builder = WebApplication.CreateBuilder(args);
+
+// Configure metrics
+builder.Services.AddSingleton(_ => new MetricsBuilder()
+ .WithNamespace("MyApi") // Namespace for the metrics
+ .WithService("WeatherService") // Service name for the metrics
+ .WithCaptureColdStart(true) // Capture cold start metrics
+ .WithDefaultDimensions(new Dictionary // Default dimensions for the metrics
+ {
+ {"Environment", "Prod"},
+ {"Another", "One"}
+ })
+ .Build()); // Build the metrics
+
+// Add AWS Lambda support. When the application is run in Lambda, Kestrel is swapped out as the web server with Amazon.Lambda.AspNetCoreServer. This
+// package will act as the web server translating requests and responses between the Lambda event source and ASP.NET Core.
+builder.Services.AddAWSLambdaHosting(LambdaEventSource.RestApi);
+
+var app = builder.Build();
+
+app.MapGet("/powertools", (IMetrics metrics) =>
+ {
+ // add custom metrics
+ metrics.AddMetric("MyCustomMetric", 1, MetricUnit.Count);
+ // flush metrics - this is required to ensure metrics are sent to CloudWatch
+ metrics.Flush();
+ })
+ .WithMetrics();
+
+app.Run();
+
+```
+
+Here is the highlighted `WithMetrics` method:
+
+```csharp
+///
+/// Adds a metrics filter to the specified route handler builder.
+/// This will capture cold start (if CaptureColdStart is enabled) metrics and flush metrics on function exit.
+///
+/// The route handler builder to add the metrics filter to.
+/// The route handler builder with the metrics filter added.
+public static RouteHandlerBuilder WithMetrics(this RouteHandlerBuilder builder)
+{
+ builder.AddEndpointFilter();
+ return builder;
+}
+```
+
+Explanation:
+
+- The method is defined as an extension method for the `RouteHandlerBuilder` class.
+- It adds a `MetricsFilter` to the route handler builder using the `AddEndpointFilter` method.
+- The `MetricsFilter` captures and records metrics for HTTP endpoints, including cold start metrics if the `CaptureColdStart` option is enabled.
+- The method returns the modified `RouteHandlerBuilder` instance with the metrics filter added.
+
## Testing your code
+### Unit testing
+
+To test your code that uses the Metrics utility, you can use the `TestLambdaContext` class from the `Amazon.Lambda.TestUtilities` package.
+
+You can also use the `IMetrics` interface to mock the Metrics utility in your tests.
+
+Here is an example of how you can test a Lambda function that uses the Metrics utility:
+
+#### Lambda Function
+
+```csharp
+using System.Collections.Generic;
+using Amazon.Lambda.Core;
+
+public class MetricsnBuilderHandler
+{
+ private readonly IMetrics _metrics;
+
+ // Allow injection of IMetrics for testing
+ public MetricsnBuilderHandler(IMetrics metrics = null)
+ {
+ _metrics = metrics ?? new MetricsBuilder()
+ .WithCaptureColdStart(true)
+ .WithService("testService")
+ .WithNamespace("dotnet-powertools-test")
+ .WithDefaultDimensions(new Dictionary
+ {
+ { "Environment", "Prod1" },
+ { "Another", "One" }
+ }).Build();
+ }
+
+ [Metrics]
+ public void Handler(ILambdaContext context)
+ {
+ _metrics.AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ }
+}
+
+```
+#### Unit Tests
+
+
+```csharp
+[Fact]
+ public void Handler_With_Builder_Should_Configure_In_Constructor()
+ {
+ // Arrange
+ var handler = new MetricsnBuilderHandler();
+
+ // Act
+ handler.Handler(new TestLambdaContext
+ {
+ FunctionName = "My_Function_Name"
+ });
+
+ // Get the output and parse it
+ var metricsOutput = _consoleOut.ToString();
+
+ // Assert cold start
+ Assert.Contains(
+ "\"CloudWatchMetrics\":[{\"Namespace\":\"dotnet-powertools-test\",\"Metrics\":[{\"Name\":\"ColdStart\",\"Unit\":\"Count\"}],\"Dimensions\":[[\"Service\",\"Environment\",\"Another\",\"FunctionName\"]]}]},\"Service\":\"testService\",\"Environment\":\"Prod1\",\"Another\":\"One\",\"FunctionName\":\"My_Function_Name\",\"ColdStart\":1}",
+ metricsOutput);
+ // Assert successful Memory metrics
+ Assert.Contains(
+ "\"CloudWatchMetrics\":[{\"Namespace\":\"dotnet-powertools-test\",\"Metrics\":[{\"Name\":\"SuccessfulBooking\",\"Unit\":\"Count\"}],\"Dimensions\":[[\"Service\",\"Environment\",\"Another\",\"FunctionName\"]]}]},\"Service\":\"testService\",\"Environment\":\"Prod1\",\"Another\":\"One\",\"FunctionName\":\"My_Function_Name\",\"SuccessfulBooking\":1}",
+ metricsOutput);
+ }
+
+ [Fact]
+ public void Handler_With_Builder_Should_Configure_In_Constructor_Mock()
+ {
+ var metricsMock = Substitute.For();
+
+ metricsMock.Options.Returns(new MetricsOptions
+ {
+ CaptureColdStart = true,
+ Namespace = "dotnet-powertools-test",
+ Service = "testService",
+ DefaultDimensions = new Dictionary
+ {
+ { "Environment", "Prod" },
+ { "Another", "One" }
+ }
+ });
+
+ Metrics.UseMetricsForTests(metricsMock);
+
+ var sut = new MetricsnBuilderHandler(metricsMock);
+
+ // Act
+ sut.Handler(new TestLambdaContext
+ {
+ FunctionName = "My_Function_Name"
+ });
+
+ metricsMock.Received(1).PushSingleMetric("ColdStart", 1, MetricUnit.Count, "dotnet-powertools-test",
+ service: "testService", Arg.Any>());
+ metricsMock.Received(1).AddMetric("SuccessfulBooking", 1, MetricUnit.Count);
+ }
+```
+
### Environment variables
???+ tip
diff --git a/docs/getting-started/idempotency/aot.md b/docs/getting-started/idempotency/aot.md
new file mode 100644
index 000000000..e69de29bb
diff --git a/docs/getting-started/idempotency/simple.md b/docs/getting-started/idempotency/simple.md
new file mode 100644
index 000000000..51536a470
--- /dev/null
+++ b/docs/getting-started/idempotency/simple.md
@@ -0,0 +1,4 @@
+---
+title: Simple Logging
+description: Getting started with Logging
+---
\ No newline at end of file
diff --git a/docs/getting-started/logger/aot.md b/docs/getting-started/logger/aot.md
new file mode 100644
index 000000000..f42610893
--- /dev/null
+++ b/docs/getting-started/logger/aot.md
@@ -0,0 +1,426 @@
+---
+title: Native AOT with Logger
+description: Getting started with Logging in Native AOT applications
+---
+
+# Getting Started with AWS Lambda Powertools for .NET Logger in Native AOT
+
+This tutorial shows you how to set up an AWS Lambda project using Native AOT compilation with Powertools for .NET
+Logger, addressing performance, trimming, and deployment considerations.
+
+## Prerequisites
+
+- An AWS account with appropriate permissions
+- A code editor (we'll use Visual Studio Code in this tutorial)
+- .NET 8 SDK or later
+- Docker (required for cross-platform AOT compilation)
+
+## 1. Understanding Native AOT
+
+Native AOT (Ahead-of-Time) compilation converts your .NET application directly to native code during build time rather
+than compiling to IL (Intermediate Language) code that gets JIT-compiled at runtime. Benefits for AWS Lambda include:
+
+- Faster cold start times (typically 50-70% reduction)
+- Lower memory footprint
+- No runtime JIT compilation overhead
+- No need for the full .NET runtime to be packaged with your Lambda
+
+## 2. Installing Required Tools
+
+First, ensure you have the .NET 8 SDK installed:
+
+```bash
+dotnet --version
+```
+
+Install the AWS Lambda .NET CLI tools:
+
+```bash
+dotnet tool install -g Amazon.Lambda.Tools
+dotnet new install Amazon.Lambda.Templates
+```
+
+Verify installation:
+
+```bash
+dotnet lambda --help
+```
+
+## 3. Creating a Native AOT Lambda Project
+
+Create a directory for your project:
+
+```bash
+mkdir powertools-aot-logger-demo
+cd powertools-aot-logger-demo
+```
+
+Create a new Lambda project using the Native AOT template:
+
+```bash
+dotnet new lambda.NativeAOT -n PowertoolsAotLoggerDemo
+cd PowertoolsAotLoggerDemo
+```
+
+## 4. Adding the Powertools Logger Package
+
+Add the AWS.Lambda.Powertools.Logging package:
+
+```bash
+cd src/PowertoolsAotLoggerDemo
+dotnet add package AWS.Lambda.Powertools.Logging
+```
+
+## 5. Implementing the Lambda Function with AOT-compatible Logger
+
+Let's modify the Function.cs file to implement our function with Powertools Logger in an AOT-compatible way:
+
+```csharp
+using Amazon.Lambda.Core;
+using Amazon.Lambda.RuntimeSupport;
+using Amazon.Lambda.Serialization.SystemTextJson;
+using System.Text.Json.Serialization;
+using System.Text.Json;
+using AWS.Lambda.Powertools.Logging;
+using Microsoft.Extensions.Logging;
+
+
+namespace PowertoolsAotLoggerDemo;
+
+public class Function
+{
+ private static ILogger _logger;
+
+ private static async Task Main()
+ {
+ _logger = LoggerFactory.Create(builder =>
+ {
+ builder.AddPowertoolsLogger(config =>
+ {
+ config.Service = "TestService";
+ config.LoggerOutputCase = LoggerOutputCase.PascalCase;
+ config.JsonOptions = new JsonSerializerOptions
+ {
+ TypeInfoResolver = LambdaFunctionJsonSerializerContext.Default
+ };
+ });
+ }).CreatePowertoolsLogger();
+
+ Func handler = FunctionHandler;
+ await LambdaBootstrapBuilder.Create(handler, new SourceGeneratorLambdaJsonSerializer())
+ .Build()
+ .RunAsync();
+ }
+
+ public static string FunctionHandler(string input, ILambdaContext context)
+ {
+ _logger.LogInformation("Processing input: {Input}", input);
+ _logger.LogInformation("Processing context: {@Context}", context);
+
+ return input.ToUpper();
+ }
+}
+
+
+[JsonSerializable(typeof(string))]
+[JsonSerializable(typeof(ILambdaContext))] // make sure to include ILambdaContext for serialization
+public partial class LambdaFunctionJsonSerializerContext : JsonSerializerContext
+{
+}
+```
+
+## 6. Updating the Project File for AOT Compatibility
+
+```xml
+
+
+
+ net8.0
+ enable
+ enable
+
+
+ true
+
+
+ true
+
+
+ full
+
+
+ 0
+
+
+
+
+
+ true
+ Size
+ true
+
+
+ true
+ Lambda
+
+
+ Exe
+
+
+ false
+ Size
+
+
+
+
+
+
+
+
+
+```
+
+## 8. Cross-Platform Deployment Considerations
+
+Native AOT compilation must target the same OS and architecture as the deployment environment. AWS Lambda runs on Amazon
+Linux 2023 (AL2023) with x64 architecture.
+
+### Building for AL2023 on Different Platforms
+
+#### Option A: Using the AWS Lambda .NET Tool with Docker
+
+The simplest approach is to use the AWS Lambda .NET tool, which handles the cross-platform compilation:
+
+```bash
+dotnet lambda deploy-function --function-name powertools-aot-logger-demo --function-role your-lambda-role-arn
+```
+
+This will:
+
+1. Detect your project is using Native AOT
+2. Use Docker behind the scenes to compile for Amazon Linux
+3. Deploy the resulting function
+
+#### Option B: Using Docker Directly
+
+Alternatively, you can use Docker directly for more control:
+
+##### On macOS/Linux:
+
+```bash
+# Create a build container using Amazon's provided image
+docker run --rm -v $(pwd):/workspace -w /workspace public.ecr.aws/sam/build-dotnet8:latest-x86_64 \
+ bash -c "cd src/PowertoolsAotLoggerDemo && dotnet publish -c Release -r linux-x64 -o publish"
+
+# Deploy using the AWS CLI
+cd src/PowertoolsAotLoggerDemo/publish
+zip -r function.zip *
+aws lambda create-function \
+ --function-name powertools-aot-logger-demo \
+ --runtime provided.al2023 \
+ --handler bootstrap \
+ --role arn:aws:iam::123456789012:role/your-lambda-role \
+ --zip-file fileb://function.zip
+```
+
+##### On Windows:
+
+```powershell
+# Create a build container using Amazon's provided image
+docker run --rm -v ${PWD}:/workspace -w /workspace public.ecr.aws/sam/build-dotnet8:latest-x86_64 `
+ bash -c "cd src/PowertoolsAotLoggerDemo && dotnet publish -c Release -r linux-x64 -o publish"
+
+# Deploy using the AWS CLI
+cd src\PowertoolsAotLoggerDemo\publish
+Compress-Archive -Path * -DestinationPath function.zip -Force
+aws lambda create-function `
+ --function-name powertools-aot-logger-demo `
+ --runtime provided.al2023 `
+ --handler bootstrap `
+ --role arn:aws:iam::123456789012:role/your-lambda-role `
+ --zip-file fileb://function.zip
+```
+
+## 9. Testing the Function
+
+Test your Lambda function using the AWS CLI:
+
+```bash
+aws lambda invoke --function-name powertools-aot-logger-demo --payload '{"name":"PowertoolsAOT"}' response.json
+cat response.json
+```
+
+You should see a response like:
+
+```json
+{
+ "Level": "Information",
+ "Message": "test",
+ "Timestamp": "2025-05-06T09:52:19.8222787Z",
+ "Service": "TestService",
+ "ColdStart": true,
+ "XrayTraceId": "1-6819dbd3-0de6dc4b6cc712b020ee8ae7",
+ "Name": "AWS.Lambda.Powertools.Logging.Logger"
+}
+{
+ "Level": "Information",
+ "Message": "Processing context: Amazon.Lambda.RuntimeSupport.LambdaContext",
+ "Timestamp": "2025-05-06T09:52:19.8232664Z",
+ "Service": "TestService",
+ "ColdStart": true,
+ "XrayTraceId": "1-6819dbd3-0de6dc4b6cc712b020ee8ae7",
+ "Name": "AWS.Lambda.Powertools.Logging.Logger",
+ "Context": {
+ "AwsRequestId": "20f8da57-002b-426d-84c2-c295e4797e23",
+ "ClientContext": {
+ "Environment": null,
+ "Client": null,
+ "Custom": null
+ },
+ "FunctionName": "powertools-aot-logger-demo",
+ "FunctionVersion": "$LATEST",
+ "Identity": {
+ "IdentityId": null,
+ "IdentityPoolId": null
+ },
+ "InvokedFunctionArn": "your arn",
+ "Logger": {},
+ "LogGroupName": "/aws/lambda/powertools-aot-logger-demo",
+ "LogStreamName": "2025/05/06/[$LATEST]71249d02013b42b9b044b42dd4c7c37a",
+ "MemoryLimitInMB": 512,
+ "RemainingTime": "00:00:29.9972216"
+ }
+}
+```
+
+Check the logs in CloudWatch Logs to see the structured logs created by Powertools Logger.
+
+## 10. Performance Considerations and Best Practices
+
+### Trimming Considerations
+
+Native AOT uses aggressive trimming, which can cause issues with reflection-based code. Here are tips to avoid common
+problems:
+
+1. **Using DynamicJsonSerializer**: If you're encountering trimming issues with JSON serialization, add a trimming hint:
+
+```csharp
+[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.PublicFields | DynamicallyAccessedMemberTypes.PublicProperties)]
+public class MyRequestType
+{
+ // Properties that will be preserved during trimming
+}
+```
+
+2. **Logging Objects**: When logging objects with structural logging, consider creating simple DTOs instead of complex
+ types:
+
+```csharp
+// Instead of logging complex domain objects:
+Logger.LogInformation("User: {@user}", complexUserWithCircularReferences);
+
+// Create a simple loggable DTO:
+var userInfo = new { Id = user.Id, Name = user.Name, Status = user.Status };
+Logger.LogInformation("User: {@userInfo}", userInfo);
+```
+
+3. **Handling Reflection**: If you need reflection, explicitly preserve types:
+
+```xml
+
+
+
+
+```
+
+And in TrimmerRoots.xml:
+
+```xml
+
+
+
+
+
+
+```
+
+### Lambda Configuration Best Practices
+
+1. **Memory Settings**: Native AOT functions typically need less memory:
+
+```bash
+aws lambda update-function-configuration \
+ --function-name powertools-aot-logger-demo \
+ --memory-size 512
+```
+
+2. **Environment Variables**: Set the AWS_LAMBDA_DOTNET_PREJIT environment variable to 0 (it's not needed for AOT):
+
+```bash
+aws lambda update-function-configuration \
+ --function-name powertools-aot-logger-demo \
+ --environment Variables={AWS_LAMBDA_DOTNET_PREJIT=0}
+```
+
+3. **ARM64 Support**: For even better performance, consider using ARM64 architecture:
+
+When creating your project:
+
+```bash
+dotnet new lambda.NativeAOT -n PowertoolsAotLoggerDemo --architecture arm64
+```
+
+Or modify your deployment:
+
+```bash
+aws lambda update-function-configuration \
+ --function-name powertools-aot-logger-demo \
+ --architectures arm64
+```
+
+### Monitoring Cold Start Performance
+
+The Powertools Logger automatically logs cold start information. Use CloudWatch Logs Insights to analyze performance:
+
+```
+fields @timestamp, coldStart, billedDurationMs, maxMemoryUsedMB
+| filter functionName = "powertools-aot-logger-demo"
+| sort @timestamp desc
+| limit 100
+```
+
+## 11. Troubleshooting Common AOT Issues
+
+### Missing Type Metadata
+
+If you see errors about missing metadata, you may need to add more types to your trimmer roots:
+
+```xml
+
+
+
+
+
+
+```
+
+### Build Failures on macOS/Windows
+
+If you're building directly on macOS/Windows without Docker and encountering errors, remember that Native AOT is
+platform-specific. Always use the cross-platform build options mentioned earlier.
+
+## Summary
+
+In this tutorial, you've learned:
+
+1. How to set up a .NET Native AOT Lambda project with Powertools Logger
+2. How to handle trimming concerns and ensure compatibility
+3. Cross-platform build and deployment strategies for Amazon Linux 2023
+4. Performance optimization techniques specific to AOT lambdas
+
+Native AOT combined with Powertools Logger gives you the best of both worlds: high-performance, low-latency Lambda
+functions with rich, structured logging capabilities.
+
+!!! tip "Next Steps"
+Explore using the Embedded Metrics Format (EMF) with your Native AOT Lambda functions for enhanced observability, or try
+implementing Powertools Tracing in your Native AOT functions.
diff --git a/docs/getting-started/logger/aspnet.md b/docs/getting-started/logger/aspnet.md
new file mode 100644
index 000000000..991bfc399
--- /dev/null
+++ b/docs/getting-started/logger/aspnet.md
@@ -0,0 +1,500 @@
+---
+title: ASP.NET Core Minimal API Logging
+description: Getting started with Logging in ASP.NET Core Minimal APIs
+---
+
+# Getting Started with AWS Lambda Powertools for .NET Logger in ASP.NET Core Minimal APIs
+
+This tutorial shows you how to set up an ASP.NET Core Minimal API project with AWS Lambda Powertools for .NET Logger - covering installation of required tools through deployment and advanced logging features.
+
+## Prerequisites
+
+- An AWS account with appropriate permissions
+- A code editor (we'll use Visual Studio Code in this tutorial)
+- .NET 8 SDK or later
+
+## 1. Installing Required Tools
+
+First, ensure you have the .NET SDK installed. If not, you can download it from the [.NET download page](https://dotnet.microsoft.com/download/dotnet).
+
+```bash
+dotnet --version
+```
+
+You should see output like `8.0.100` or similar.
+
+Next, install the AWS Lambda .NET CLI tools:
+
+```bash
+dotnet tool install -g Amazon.Lambda.Tools
+dotnet new install Amazon.Lambda.Templates
+```
+
+Verify installation:
+
+```bash
+dotnet lambda --help
+```
+
+## 2. Setting up AWS CLI credentials
+
+Ensure your AWS credentials are configured:
+
+```bash
+aws configure
+```
+
+Enter your AWS Access Key ID, Secret Access Key, default region, and output format.
+
+## 3. Creating a New ASP.NET Core Minimal API Lambda Project
+
+Create a directory for your project:
+
+```bash
+mkdir powertools-aspnet-logger-demo
+cd powertools-aspnet-logger-demo
+```
+
+Create a new ASP.NET Minimal API project using the AWS Lambda template:
+
+```bash
+dotnet new serverless.AspNetCoreMinimalAPI --name PowertoolsAspNetLoggerDemo
+cd PowertoolsAspNetLoggerDemo/src/PowertoolsAspNetLoggerDemo
+```
+
+## 4. Adding the Powertools Logger Package
+
+Add the AWS.Lambda.Powertools.Logging package:
+
+```bash
+dotnet add package AWS.Lambda.Powertools.Logging
+```
+
+## 5. Implementing the Minimal API with Powertools Logger
+
+Let's modify the Program.cs file to implement our Minimal API with Powertools Logger:
+
+```csharp
+using Microsoft.Extensions.Logging;
+using AWS.Lambda.Powertools.Logging;
+
+var builder = WebApplication.CreateBuilder(args);
+
+// Configure AWS Lambda
+// This is what connects the Events from API Gateway to the ASP.NET Core pipeline
+// In this case we are using HttpApi
+builder.Services.AddAWSLambdaHosting(LambdaEventSource.HttpApi);
+
+// Add Powertools Logger
+var logger = LoggerFactory.Create(builder =>
+{
+ builder.AddPowertoolsLogger(config =>
+ {
+ config.Service = "powertools-aspnet-demo";
+ config.MinimumLogLevel = LogLevel.Debug;
+ config.LoggerOutputCase = LoggerOutputCase.CamelCase;
+ config.TimestampFormat = "yyyy-MM-dd HH:mm:ss.fff";
+ });
+}).CreatePowertoolsLogger();
+
+var app = builder.Build();
+
+app.MapGet("/", () => {
+ logger.LogInformation("Processing root request");
+ return "Hello from Powertools ASP.NET Core Minimal API!";
+});
+
+app.MapGet("/users/{id}", (string id) => {
+ logger.LogInformation("Getting user with ID: {userId}", id);
+
+ // Log a structured object
+ var user = new User {
+ Id = id,
+ Name = "John Doe",
+ Email = "john.doe@example.com"
+ };
+
+ logger.LogDebug("User details: {@user}", user);
+
+ return Results.Ok(user);
+});
+
+app.Run();
+
+// Simple user class for demonstration
+public class User
+{
+ public string? Id { get; set; }
+ public string? Name { get; set; }
+ public string? Email { get; set; }
+
+ public override string ToString()
+ {
+ return $"{Name} ({Id})";
+ }
+}
+```
+
+## 6. Understanding the LoggerFactory Setup
+
+Let's examine the key parts of how we've set up the logger:
+
+```csharp
+var logger = LoggerFactory.Create(builder =>
+{
+ builder.AddPowertoolsLogger(config =>
+ {
+ config.Service = "powertools-aspnet-demo";
+ config.MinimumLogLevel = LogLevel.Debug;
+ config.LoggerOutputCase = LoggerOutputCase.CamelCase;
+ config.TimestampFormat = "yyyy-MM-dd HH:mm:ss.fff";
+ });
+}).CreatePowertoolsLogger();
+```
+
+This setup:
+
+1. Creates a new `LoggerFactory` instance
+2. Adds the Powertools Logger provider to the factory
+3. Configures the logger with:
+ - Service name that appears in all logs
+ - Minimum logging level set to Information
+ - CamelCase output format for JSON properties
+4. Creates a Powertools logger instance from the factory
+
+## 7. Building and Deploying the Lambda Function
+
+Build your function:
+
+```bash
+dotnet build
+```
+
+Deploy the function using the AWS Lambda CLI tools:
+
+We started from a serverless template but we are just going to deploy a Lambda function not an API Gateway.
+
+First update the `aws-lambda-tools-defaults.json` file with your details:
+
+```json
+{
+ "Information": [
+ ],
+ "profile": "",
+ "region": "",
+ "configuration": "Release",
+ "function-runtime": "dotnet8",
+ "function-memory-size": 512,
+ "function-timeout": 30,
+ "function-handler": "PowertoolsAspNetLoggerDemo",
+ "function-role": "arn:aws:iam::123456789012:role/my-role",
+ "function-name": "PowertoolsAspNetLoggerDemo"
+}
+```
+!!! Info "IAM Role"
+ Make sure to replace the `function-role` with the ARN of an IAM role that has permissions to write logs to CloudWatch.
+
+!!! Info
+ As you can see the function-handler is set to `PowertoolsAspNetLoggerDemo` which is the name of the project.
+ This example template uses [Executable assembly handlers](https://docs.aws.amazon.com/lambda/latest/dg/csharp-handler.html#csharp-executable-assembly-handlers) which use the assembly name as the handler.
+
+Then deploy the function:
+
+```bash
+dotnet lambda deploy-function
+```
+
+Follow the prompts to complete the deployment.
+
+## 8. Testing the Function
+
+Test your Lambda function using the AWS CLI.
+The following command simulates an API Gateway payload, more information can be found in the [AWS Lambda documentation](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html).
+
+```bash
+dotnet lambda invoke-function PowertoolsAspNetLoggerDemo --payload '{
+ "requestContext": {
+ "http": {
+ "method": "GET",
+ "path": "/"
+ }
+ }
+}'
+```
+
+You should see a response and the logs in JSON format.
+
+```bash
+Payload:
+{
+ "statusCode": 200,
+ "headers": {
+ "Content-Type": "text/plain; charset=utf-8"
+ },
+ "body": "Hello from Powertools ASP.NET Core Minimal API!",
+ "isBase64Encoded": false
+}
+
+Log Tail:
+START RequestId: cf670319-d9c4-4005-aebc-3afd08ae01e0 Version: $LATEST
+warn: Amazon.Lambda.AspNetCoreServer.AbstractAspNetCoreFunction[0]
+Request does not contain domain name information but is derived from APIGatewayProxyFunction.
+{
+ "level": "Information",
+ "message": "Processing root request",
+ "timestamp": "2025-04-23T18:02:54.9014083Z",
+ "service": "powertools-aspnet-demo",
+ "coldStart": true,
+ "xrayTraceId": "1-68092b4e-352be5201ea5b15b23854c44",
+ "name": "AWS.Lambda.Powertools.Logging.Logger"
+}
+END RequestId: cf670319-d9c4-4005-aebc-3afd08ae01e0
+```
+
+## 9. Advanced Logging Features
+
+Now that we have basic logging set up, let's explore some advanced features of Powertools Logger.
+
+### Adding Context with AppendKey
+
+You can add custom keys to all subsequent log messages:
+
+```csharp
+app.MapGet("/users/{id}", (string id) =>
+{
+ // Add context to all subsequent logs
+ Logger.AppendKey("userId", id);
+ Logger.AppendKey("source", "users-api");
+
+ logger.LogInformation("Getting user with ID: {id}", id);
+
+ // Log a structured object
+ var user = new User
+ {
+ Id = id,
+ Name = "John Doe",
+ Email = "john.doe@example.com"
+ };
+
+ logger.LogInformation("User details: {@user}", user);
+
+ return Results.Ok(user);
+});
+```
+
+This will add `userId` and `source` to all logs generated in this request context.
+This will output:
+
+```bash hl_lines="19-20 32-36"
+Payload:
+{
+ "statusCode": 200,
+ "headers": {
+ "Content-Type": "application/json; charset=utf-8"
+ },
+ "body": "{\"id\":\"1\",\"name\":\"John Doe\",\"email\":\"john.doe@example.com\"}",
+ "isBase64Encoded": false
+}
+Log Tail:
+{
+ "level": "Information",
+ "message": "Getting user with ID: 1",
+ "timestamp": "2025-04-23T18:21:28.5314300Z",
+ "service": "powertools-aspnet-demo",
+ "coldStart": true,
+ "xrayTraceId": "1-68092fa7-64f070f7329650563b7501fe",
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "userId": "1",
+ "source": "users-api"
+}
+{
+ "level": "Information",
+ "message": "User details: John Doe (1)",
+ "timestamp": "2025-04-23T18:21:28.6491316Z",
+ "service": "powertools-aspnet-demo",
+ "coldStart": true,
+ "xrayTraceId": "1-68092fa7-64f070f7329650563b7501fe",
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "userId": "1",
+ "source": "users-api",
+ "user": { // User object logged
+ "id": "1",
+ "name": "John Doe",
+ "email": "john.doe@example.com"
+ }
+}
+```
+
+### Customizing Log Output
+
+You can customize the log output format:
+
+```csharp
+builder.AddPowertoolsLogger(config =>
+{
+ config.Service = "powertools-aspnet-demo";
+ config.LoggerOutputCase = LoggerOutputCase.SnakeCase; // Change to snake_case
+ config.TimestampFormat = "yyyy-MM-dd HH:mm:ss"; // Custom timestamp format
+});
+```
+
+### Log Sampling for Debugging
+
+When you need more detailed logs for a percentage of requests:
+
+```csharp
+// In your logger factory setup
+builder.AddPowertoolsLogger(config =>
+{
+ config.Service = "powertools-aspnet-demo";
+ config.MinimumLogLevel = LogLevel.Information; // Normal level
+ config.SamplingRate = 0.1; // 10% of requests will log at Debug level
+});
+```
+
+### Structured Logging
+
+Powertools Logger provides excellent support for structured logging:
+
+```csharp
+app.MapPost("/products", (Product product) => {
+ logger.LogInformation("Creating new product: {productName}", product.Name);
+
+ // Log the entire object with all properties
+ logger.LogDebug("Product details: {@product}", product);
+
+ // Log the ToString() of the object
+ logger.LogDebug("Product details: {product}", product);
+
+ return Results.Created($"/products/{product.Id}", product);
+});
+
+public class Product
+{
+ public string Id { get; set; } = Guid.NewGuid().ToString();
+ public string Name { get; set; } = string.Empty;
+ public decimal Price { get; set; }
+ public string Category { get; set; } = string.Empty;
+ public override string ToString()
+ {
+ return $"{Name} ({Id}) - {Category}: {Price:C}";
+ }
+}
+```
+
+### Using Log Buffering
+
+For high-throughput applications, you can buffer lower-level logs and only flush them when needed:
+
+```csharp
+var logger = LoggerFactory.Create(builder =>
+{
+ builder.AddPowertoolsLogger(config =>
+ {
+ config.Service = "powertools-aspnet-demo";
+ config.LogBuffering = new LogBufferingOptions
+ {
+ BufferAtLogLevel = LogLevel.Debug,
+ FlushOnErrorLog = true
+ };
+ });
+}).CreatePowertoolsLogger();
+
+// Usage example
+app.MapGet("/process", () => {
+ logger.LogDebug("Debug log 1"); // Buffered
+ logger.LogDebug("Debug log 2"); // Buffered
+
+ try {
+ // Business logic that might fail
+ throw new Exception("Something went wrong");
+ }
+ catch (Exception ex) {
+ // This will also flush all buffered logs
+ logger.LogError(ex, "An error occurred");
+ return Results.Problem("Processing failed");
+ }
+
+ // Manual flushing option
+ // Logger.FlushBuffer();
+
+ return Results.Ok("Processed successfully");
+});
+```
+
+### Correlation IDs
+
+For tracking requests across multiple services:
+
+```csharp
+app.Use(async (context, next) => {
+ // Extract correlation ID from headers
+ if (context.Request.Headers.TryGetValue("X-Correlation-ID", out var correlationId))
+ {
+ Logger.AppendKey("correlationId", correlationId.ToString());
+ }
+
+ await next();
+});
+```
+
+## 10. Best Practices for ASP.NET Minimal API Logging
+
+### Register Logger as a Singleton
+
+For better performance, you can register the Powertools Logger as a singleton:
+
+```csharp
+// In Program.cs
+builder.Services.AddSingleton(sp => {
+ return LoggerFactory.Create(builder =>
+ {
+ builder.AddPowertoolsLogger(config =>
+ {
+ config.Service = "powertools-aspnet-demo";
+ });
+ }).CreatePowertoolsLogger();
+});
+
+// Then inject it in your handlers
+app.MapGet("/example", (ILogger logger) => {
+ logger.LogInformation("Using injected logger");
+ return "Example with injected logger";
+});
+```
+
+## 11. Viewing and Analyzing Logs
+
+After deploying your Lambda function, you can view the logs in AWS CloudWatch Logs. The structured JSON format makes it easy to search and analyze logs.
+
+Here's an example of what your logs will look like:
+
+```json
+{
+ "level": "Information",
+ "message": "Getting user with ID: 123",
+ "timestamp": "2023-04-15 14:23:45.123",
+ "service": "powertools-aspnet-demo",
+ "coldStart": true,
+ "functionName": "PowertoolsAspNetLoggerDemo",
+ "functionMemorySize": 256,
+ "functionArn": "arn:aws:lambda:us-east-1:123456789012:function:PowertoolsAspNetLoggerDemo",
+ "functionRequestId": "a1b2c3d4-e5f6-g7h8-i9j0-k1l2m3n4o5p6",
+ "userId": "123"
+}
+```
+
+## Summary
+
+In this tutorial, you've learned:
+
+1. How to set up ASP.NET Core Minimal API with AWS Lambda
+2. How to integrate Powertools Logger using the LoggerFactory approach
+3. How to configure and customize the logger
+4. Advanced logging features like structured logging, correlation IDs, and log buffering
+5. Best practices for using the logger in an ASP.NET Core application
+
+Powertools for AWS Lambda Logger provides structured logging that makes it easier to search, analyze, and monitor your Lambda functions, and integrates seamlessly with ASP.NET Core Minimal APIs.
+
+!!! tip "Next Steps"
+ Explore integrating Powertools Tracing and Metrics with your ASP.NET Core Minimal API to gain even more observability insights.
diff --git a/docs/getting-started/logger/simple.md b/docs/getting-started/logger/simple.md
new file mode 100644
index 000000000..b47ed3a07
--- /dev/null
+++ b/docs/getting-started/logger/simple.md
@@ -0,0 +1,329 @@
+---
+title: Simple Logging
+description: Getting started with Logging
+---
+
+# Getting Started with AWS Lambda Powertools for .NET Logger
+
+This tutorial shows you how to set up a new AWS Lambda project with Powertools for .NET Logger from scratch - covering the installation of required tools through to deployment.
+
+## Prerequisites
+
+- An AWS account with appropriate permissions
+- A code editor (we'll use Visual Studio Code in this tutorial)
+
+## 1. Installing .NET SDK
+
+First, let's download and install the .NET SDK.
+You can find the latest version on the [.NET download page](https://dotnet.microsoft.com/download/dotnet).
+Make sure to install the latest version of the .NET SDK (8.0 or later).
+
+Verify installation:
+
+```bash
+dotnet --version
+```
+
+You should see output like `8.0.100` or similar (the version number may vary).
+
+## 2. Installing AWS Lambda Tools for .NET CLI
+
+Install the AWS Lambda .NET CLI tools:
+
+```bash
+dotnet tool install -g Amazon.Lambda.Tools
+dotnet new install Amazon.Lambda.Templates
+```
+
+Verify installation:
+
+```bash
+dotnet lambda --help
+```
+
+You should see AWS Lambda CLI command help displayed.
+
+## 3. Setting up AWS CLI credentials
+
+Ensure your AWS credentials are configured:
+
+```bash
+aws configure
+```
+
+Enter your AWS Access Key ID, Secret Access Key, default region, and output format.
+
+## 4. Creating a New Lambda Project
+
+Create a directory for your project:
+
+```bash
+mkdir powertools-logger-demo
+cd powertools-logger-demo
+```
+
+Create a new Lambda project using the AWS Lambda template:
+
+```bash
+dotnet new lambda.EmptyFunction --name PowertoolsLoggerDemo
+cd PowertoolsLoggerDemo/src/PowertoolsLoggerDemo
+```
+
+## 5. Adding the Powertools Logger Package
+
+Add the AWS.Lambda.Powertools.Logging and Amazon.Lambda.APIGatewayEvents packages:
+
+```bash
+dotnet add package AWS.Lambda.Powertools.Logging
+dotnet add package Amazon.Lambda.APIGatewayEvents
+```
+
+## 6. Implementing the Lambda Function with Logger
+
+Let's modify the Function.cs file to implement our function with Powertools Logger:
+
+```csharp
+using System.Net;
+using Amazon.Lambda.APIGatewayEvents;
+using Amazon.Lambda.Core;
+using AWS.Lambda.Powertools.Logging;
+
+// Assembly attribute to enable the Lambda function's JSON input to be converted into a .NET class.
+[assembly: LambdaSerializer(typeof(Amazon.Lambda.Serialization.SystemTextJson.DefaultLambdaJsonSerializer))]
+
+namespace PowertoolsLoggerDemo
+{
+ public class Function
+ {
+ ///
+ /// A simple function that returns a greeting
+ ///
+ /// API Gateway request object
+ /// Lambda context
+ /// API Gateway response object
+ [Logging(Service = "greeting-service", LogLevel = Microsoft.Extensions.Logging.LogLevel.Information)]
+ public async Task FunctionHandler(APIGatewayProxyRequest request, ILambdaContext context)
+ {
+ // you can {@} serialize objects to log them
+ Logger.LogInformation("Processing request {@request}", request);
+
+ // You can append additional keys to your logs
+ Logger.AppendKey("QueryString", request.QueryStringParameters);
+
+ // Simulate processing
+ string name = "World";
+ if (request.QueryStringParameters != null && request.QueryStringParameters.ContainsKey("name"))
+ {
+ name = request.QueryStringParameters["name"];
+ Logger.LogInformation("Custom name provided: {name}", name);
+ }
+ else
+ {
+ Logger.LogInformation("Using default name");
+ }
+
+ // Create response
+ var response = new APIGatewayProxyResponse
+ {
+ StatusCode = (int)HttpStatusCode.OK,
+ Body = $"Hello, {name}!",
+ Headers = new Dictionary { { "Content-Type", "text/plain" } }
+ };
+
+ Logger.LogInformation("Response successfully created");
+
+ return response;
+ }
+ }
+}
+```
+
+## 7. Configuring the Lambda Project
+
+Let's update the aws-lambda-tools-defaults.json file with specific settings:
+
+```json
+{
+ "profile": "",
+ "region": "",
+ "configuration": "Release",
+ "function-runtime": "dotnet8",
+ "function-memory-size": 256,
+ "function-timeout": 30,
+ "function-handler": "PowertoolsLoggerDemo::PowertoolsLoggerDemo.Function::FunctionHandler",
+ "function-name": "powertools-logger-demo",
+ "function-role": "arn:aws:iam::123456789012:role/your_role_here"
+}
+```
+
+## 8. Understanding Powertools Logger Features
+
+Let's examine some of the key features we've implemented:
+
+### Service Attribute
+
+The `[Logging]` attribute configures the logger for our Lambda function:
+
+```csharp
+[Logging(Service = "greeting-service", LogLevel = Microsoft.Extensions.Logging.LogLevel.Information)]
+```
+
+This sets:
+- The service name that will appear in all logs
+- The minimum logging level
+
+### Structured Logging
+
+Powertools Logger supports structured logging with named placeholders:
+
+```csharp
+Logger.LogInformation("Processing request {@request}", request);
+```
+
+This creates structured logs where `request` becomes a separate field in the JSON log output.
+
+### Additional Context
+
+You can add custom fields to all subsequent logs:
+
+```csharp
+Logger.AppendKey("QueryString", request.QueryStringParameters);
+```
+
+This adds the QueryString field with the key and value from the QueryStringParameters property.
+This can be an object like in the example or a simple value type.
+
+## 9. Building and Deploying the Lambda Function
+
+Build your function:
+
+```bash
+dotnet build
+```
+
+Deploy the function using the AWS Lambda CLI tools:
+
+```bash
+dotnet lambda deploy-function
+```
+
+The tool will use the settings from aws-lambda-tools-defaults.json. If prompted, confirm the deployment settings.
+
+## 10. Testing the Function
+
+Test your Lambda function using the AWS CLI:
+You should see: `Hello, Powertools!` and the logs in JSON format.
+
+```bash
+Payload:
+{"statusCode":200,"headers":{"Content-Type":"text/plain"},"body":"Hello, Powertools!","isBase64Encoded":false}
+
+Log Tail:
+{"level":"Information","message":"Processing request Amazon.Lambda.APIGatewayEvents.APIGatewayProxyRequest","timestamp":"2025-04-23T15:16:42.7473327Z","service":"greeting-service","cold_start":true,"function_name":"powertools-logger-demo","function_memory_size":512,"function_arn":"","function_request_id":"93f07a79-6146-4ed2-80d3-c0a06a5739e0","function_version":"$LATEST","xray_trace_id":"1-68090459-2c2aa3377cdaa9476348236a","name":"AWS.Lambda.Powertools.Logging.Logger","request":{"resource":null,"path":null,"http_method":null,"headers":null,"multi_value_headers":null,"query_string_parameters":{"name":"Powertools"},"multi_value_query_string_parameters":null,"path_parameters":null,"stage_variables":null,"request_context":null,"body":null,"is_base64_encoded":false}}
+{"level":"Information","message":"Custom name provided: Powertools","timestamp":"2025-04-23T15:16:42.9064561Z","service":"greeting-service","cold_start":true,"function_name":"powertools-logger-demo","function_memory_size":512,"function_arn":"","function_request_id":"93f07a79-6146-4ed2-80d3-c0a06a5739e0","function_version":"$LATEST","xray_trace_id":"1-68090459-2c2aa3377cdaa9476348236a","name":"AWS.Lambda.Powertools.Logging.Logger","query_string":{"name":"Powertools"}}
+{"level":"Information","message":"Response successfully created","timestamp":"2025-04-23T15:16:42.9082709Z","service":"greeting-service","cold_start":true,"function_name":"powertools-logger-demo","function_memory_size":512,"function_arn":"","function_request_id":"93f07a79-6146-4ed2-80d3-c0a06a5739e0","function_version":"$LATEST","xray_trace_id":"1-68090459-2c2aa3377cdaa9476348236a","name":"AWS.Lambda.Powertools.Logging.Logger","query_string":{"name":"Powertools"}}
+END RequestId: 98e69b78-f544-4928-914f-6c0902ac8678
+REPORT RequestId: 98e69b78-f544-4928-914f-6c0902ac8678 Duration: 547.66 ms Billed Duration: 548 ms Memory Size: 512 MB Max Memory Used: 81 MB Init Duration: 278.70 ms
+```
+
+## 11. Checking the Logs
+
+Visit the AWS CloudWatch console to see your structured logs. You'll notice:
+
+- JSON-formatted logs with consistent structure
+- Service name "greeting-service" in all logs
+- Additional fields like "query_string"
+- Cold start information automatically included
+- Lambda context information (function name, memory, etc.)
+
+Here's an example of what your logs will look like:
+
+```bash
+{
+ "level": "Information",
+ "message": "Processing request Amazon.Lambda.APIGatewayEvents.APIGatewayProxyRequest",
+ "timestamp": "2025-04-23T15:16:42.7473327Z",
+ "service": "greeting-service",
+ "cold_start": true,
+ "function_name": "powertools-logger-demo",
+ "function_memory_size": 512,
+ "function_arn": "",
+ "function_request_id": "93f07a79-6146-4ed2-80d3-c0a06a5739e0",
+ "function_version": "$LATEST",
+ "xray_trace_id": "1-68090459-2c2aa3377cdaa9476348236a",
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "request": {
+ "resource": null,
+ "path": null,
+ "http_method": null,
+ "headers": null,
+ "multi_value_headers": null,
+ "query_string_parameters": {
+ "name": "Powertools"
+ },
+ "multi_value_query_string_parameters": null,
+ "path_parameters": null,
+ "stage_variables": null,
+ "request_context": null,
+ "body": null,
+ "is_base64_encoded": false
+ }
+}
+{
+ "level": "Information",
+ "message": "Response successfully created",
+ "timestamp": "2025-04-23T15:16:42.9082709Z",
+ "service": "greeting-service",
+ "cold_start": true,
+ "function_name": "powertools-logger-demo",
+ "function_memory_size": 512,
+ "function_arn": "",
+ "function_request_id": "93f07a79-6146-4ed2-80d3-c0a06a5739e0",
+ "function_version": "$LATEST",
+ "xray_trace_id": "1-68090459-2c2aa3377cdaa9476348236a",
+ "name": "AWS.Lambda.Powertools.Logging.Logger",
+ "query_string": {
+ "name": "Powertools"
+ }
+}
+```
+
+## Advanced Logger Features
+
+### Correlation IDs
+
+Track requests across services by extracting correlation IDs:
+
+```csharp
+[Logging(CorrelationIdPath = "/headers/x-correlation-id")]
+```
+
+### Customizing Log Output Format
+
+You can change the casing style of the logs:
+
+```csharp
+[Logging(LoggerOutputCase = LoggerOutputCase.CamelCase)]
+```
+
+Options include `CamelCase`, `PascalCase`, and `SnakeCase` (default).
+
+## Summary
+
+In this tutorial, you've:
+
+1. Installed the .NET SDK and AWS Lambda tools
+2. Created a new Lambda project
+3. Added and configured Powertools Logger
+4. Deployed and tested your function
+
+Powertools for AWS Logger provides structured logging that makes it easier to search, analyze, and monitor your Lambda functions. The key benefits are:
+
+- JSON-formatted logs for better machine readability
+- Consistent structure across all logs
+- Automatic inclusion of Lambda context information
+- Ability to add custom fields for better context
+- Integration with AWS CloudWatch for centralized log management
+
+!!! tip "Next Steps"
+ Explore more advanced features like custom log formatters, log buffering, and integration with other Powertools utilities like Tracing and Metrics.
\ No newline at end of file
diff --git a/docs/getting-started/metrics/aot.md b/docs/getting-started/metrics/aot.md
new file mode 100644
index 000000000..e69de29bb
diff --git a/docs/getting-started/metrics/aspnet.md b/docs/getting-started/metrics/aspnet.md
new file mode 100644
index 000000000..e69de29bb
diff --git a/docs/getting-started/metrics/simple.md b/docs/getting-started/metrics/simple.md
new file mode 100644
index 000000000..51536a470
--- /dev/null
+++ b/docs/getting-started/metrics/simple.md
@@ -0,0 +1,4 @@
+---
+title: Simple Logging
+description: Getting started with Logging
+---
\ No newline at end of file
diff --git a/docs/getting-started/tracing/aot.md b/docs/getting-started/tracing/aot.md
new file mode 100644
index 000000000..e69de29bb
diff --git a/docs/getting-started/tracing/simple.md b/docs/getting-started/tracing/simple.md
new file mode 100644
index 000000000..51536a470
--- /dev/null
+++ b/docs/getting-started/tracing/simple.md
@@ -0,0 +1,4 @@
+---
+title: Simple Logging
+description: Getting started with Logging
+---
\ No newline at end of file
diff --git a/docs/index.md b/docs/index.md
index 29875d66c..68251d623 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -163,6 +163,9 @@ Knowing which companies are using this library is important to help prioritize t
[**Caylent**](https://caylent.com/){target="_blank" rel="nofollow"}
{ .card }
+[**Instil Software**](https://instil.co/){target="_blank" rel="nofollow"}
+{ .card }
+
[**Pushpay**](https://pushpay.com/){target="_blank" rel="nofollow"}
{ .card }
diff --git a/docs/requirements.in b/docs/requirements.in
index 2424249a4..2b9323e7b 100644
--- a/docs/requirements.in
+++ b/docs/requirements.in
@@ -1 +1,2 @@
mkdocs-git-revision-date-plugin==0.3.2
+mkdocs-llmstxt==0.2.0
diff --git a/docs/requirements.txt b/docs/requirements.txt
index b2f8b22ed..6f492883d 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,9 +1,15 @@
#
-# This file is autogenerated by pip-compile with Python 3.12
+# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile --generate-hashes --output-file=requirements.txt requirements.in
#
+beautifulsoup4==4.13.4 \
+ --hash=sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b \
+ --hash=sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195
+ # via
+ # markdownify
+ # mkdocs-llmstxt
click==8.1.8 \
--hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \
--hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a
@@ -30,6 +36,14 @@ markdown==3.7 \
--hash=sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2 \
--hash=sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803
# via mkdocs
+markdown-it-py==3.0.0 \
+ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \
+ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb
+ # via mdformat
+markdownify==1.1.0 \
+ --hash=sha256:32a5a08e9af02c8a6528942224c91b933b4bd2c7d078f9012943776fc313eeef \
+ --hash=sha256:449c0bbbf1401c5112379619524f33b63490a8fa479456d41de9dc9e37560ebd
+ # via mkdocs-llmstxt
markupsafe==3.0.2 \
--hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \
--hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \
@@ -95,6 +109,14 @@ markupsafe==3.0.2 \
# via
# jinja2
# mkdocs
+mdformat==0.7.22 \
+ --hash=sha256:61122637c9e1d9be1329054f3fa216559f0d1f722b7919b060a8c2a4ae1850e5 \
+ --hash=sha256:eef84fa8f233d3162734683c2a8a6222227a229b9206872e6139658d99acb1ea
+ # via mkdocs-llmstxt
+mdurl==0.1.2 \
+ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \
+ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba
+ # via markdown-it-py
mergedeep==1.3.4 \
--hash=sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8 \
--hash=sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307
@@ -112,6 +134,10 @@ mkdocs-get-deps==0.2.0 \
mkdocs-git-revision-date-plugin==0.3.2 \
--hash=sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef
# via -r requirements.in
+mkdocs-llmstxt==0.2.0 \
+ --hash=sha256:104f10b8101167d6baf7761942b4743869be3d8f8a8d909f4e9e0b63307f709e \
+ --hash=sha256:907de892e0c8be74002e8b4d553820c2b5bbcf03cc303b95c8bca48fb49c1a29
+ # via -r requirements.in
packaging==24.2 \
--hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \
--hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f
@@ -193,11 +219,55 @@ pyyaml-env-tag==0.1 \
six==1.17.0 \
--hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \
--hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81
- # via python-dateutil
+ # via
+ # markdownify
+ # python-dateutil
smmap==5.0.2 \
--hash=sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5 \
--hash=sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e
# via gitdb
+soupsieve==2.7 \
+ --hash=sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4 \
+ --hash=sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a
+ # via beautifulsoup4
+tomli==2.2.1 \
+ --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \
+ --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \
+ --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \
+ --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \
+ --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \
+ --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \
+ --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \
+ --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \
+ --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \
+ --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \
+ --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \
+ --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \
+ --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \
+ --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \
+ --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \
+ --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \
+ --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \
+ --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \
+ --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \
+ --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \
+ --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \
+ --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \
+ --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \
+ --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \
+ --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \
+ --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \
+ --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \
+ --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \
+ --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \
+ --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \
+ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \
+ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7
+ # via mdformat
+typing-extensions==4.13.2 \
+ --hash=sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c \
+ --hash=sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef
+ # via beautifulsoup4
watchdog==6.0.0 \
--hash=sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a \
--hash=sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2 \
diff --git a/docs/roadmap.md b/docs/roadmap.md
index e88a50c15..0d537abc2 100644
--- a/docs/roadmap.md
+++ b/docs/roadmap.md
@@ -27,19 +27,19 @@ You can help us prioritize by [upvoting existing feature requests](https://githu
Modernizing our logging capabilities to align with .NET practices and improve developer experience.
-- [ ] Logger buffer implementation
-- [ ] New .NET-friendly API design (Serilog-like patterns)
-- [ ] Filtering and JMESPath expression support
-- [ ] Documentation for SDK context.Logger vs Powertools Logger differences
+- [x] Logger buffer implementation
+- [x] New .NET-friendly API design ILogger and LoggerFactory support
+- [x] Filtering and JMESPath expression support
+- [x] Message templates
#### Metrics V2
Updating metrics implementation to support latest EMF specifications and improve performance.
-- [ ] Update to latest EMF specifications
-- [ ] Breaking changes implementation for multiple dimensions
-- [ ] Add support for default dimensions on ColdStart metric
-- [ ] API updates - missing functionality that is present in Python implementation (ie: flush_metrics)
+- [x] Update to latest EMF specifications
+- [x] Breaking changes implementation for multiple dimensions
+- [x] Add support for default dimensions on ColdStart metric
+- [x] API updates - missing functionality that is present in Python implementation (ie: flush_metrics)
### Security and Production Readiness (P1)
@@ -47,7 +47,7 @@ Ensuring enterprise-grade security and compatibility with latest .NET developmen
- [ ] .NET 10 support from day one
- [ ] Deprecation path for .NET 6
-- [ ] Scorecard implementation
+- [x] Scorecard implementation
- [ ] Security compliance checks on our pipeline
- [ ] All utilities with end-to-end tests in our pipeline
diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css
index f24b32faa..93b397f56 100644
--- a/docs/stylesheets/extra.css
+++ b/docs/stylesheets/extra.css
@@ -33,3 +33,9 @@
[data-md-color-scheme="slate"] {
--md-typeset-a-color: rgb(28, 152, 152)
}
+
+/*.md-nav__link[for] {*/
+/* font-weight: bold*/
+/*}*/
+.md-nav__link[for] { color: var(--md-default-fg-color) !important; }
+
diff --git a/docs/utilities/kafka.md b/docs/utilities/kafka.md
new file mode 100644
index 000000000..60cdf191d
--- /dev/null
+++ b/docs/utilities/kafka.md
@@ -0,0 +1,971 @@
+---
+title: Kafka Consumer
+description: Utility
+status: new
+---
+
+
+
+The Kafka Consumer utility transparently handles message deserialization, provides an intuitive developer experience, and integrates seamlessly with the rest of the Powertools for AWS Lambda ecosystem.
+
+```mermaid
+flowchart LR
+ KafkaTopic["Kafka Topic"] --> MSK["Amazon MSK"]
+ KafkaTopic --> MSKServerless["Amazon MSK Serverless"]
+ KafkaTopic --> SelfHosted["Self-hosted Kafka"]
+ MSK --> EventSourceMapping["Event Source Mapping"]
+ MSKServerless --> EventSourceMapping
+ SelfHosted --> EventSourceMapping
+ EventSourceMapping --> Lambda["Lambda Function"]
+ Lambda --> KafkaConsumer["Kafka Consumer Utility"]
+ KafkaConsumer --> Deserialization["Deserialization"]
+ Deserialization --> YourLogic["Your Business Logic"]
+```
+
+## Key features
+
+* Automatic deserialization of Kafka messages (JSON, Avro, and Protocol Buffers)
+* Simplified event record handling with intuitive interface
+* Support for key and value deserialization
+* Support for ESM with and without Schema Registry integration
+* Proper error handling for deserialization issues
+* Support for native AOT
+
+## Terminology
+
+**Event Source Mapping (ESM)** A Lambda feature that reads from streaming sources (like Kafka) and invokes your Lambda function. It manages polling, batching, and error handling automatically, eliminating the need for consumer management code.
+
+**Record Key and Value** A Kafka messages contain two important parts: an optional key that determines the partition and a value containing the actual message data. Both are base64-encoded in Lambda events and can be independently deserialized.
+
+**Deserialization** Is the process of converting binary data (base64-encoded in Lambda events) into usable C# objects according to a specific format like JSON, Avro, or Protocol Buffers. Powertools handles this conversion automatically.
+
+**SchemaConfig class** Contains parameters that tell Powertools how to interpret message data, including the format type (JSON, Avro, Protocol Buffers) and optional schema definitions needed for binary formats.
+
+**Schema Registry** Is a centralized service that stores and validates schemas, ensuring producers and consumers maintain compatibility when message formats evolve over time.
+
+## Moving from traditional Kafka consumers
+
+Lambda processes Kafka messages as discrete events rather than continuous streams, requiring a different approach to consumer development that Powertools for AWS helps standardize.
+
+| Aspect | Traditional Kafka Consumers | Lambda Kafka Consumer |
+|--------|----------------------------|----------------------|
+| **Model** | Pull-based (you poll for messages) | Push-based (Lambda invoked with messages) |
+| **Scaling** | Manual scaling configuration | Automatic scaling to partition count |
+| **State** | Long-running application with state | Stateless, ephemeral executions |
+| **Offsets** | Manual offset management | Automatic offset commitment |
+| **Schema Validation** | Client-side schema validation | Optional Schema Registry integration with Event Source Mapping |
+| **Error Handling** | Per-message retry control | Batch-level retry policies |
+
+## Getting started
+
+### Installation
+
+Install the Powertools for AWS Lambda package with the appropriate extras for your use case:
+
+```bash
+# For processing Avro messages
+dotnet add package AWS.Lambda.Powertools.Kafka.Avro
+
+# For working with Protocol Buffers
+dotnet add package AWS.Lambda.Powertools.Kafka.Protobuf
+
+# For working with Json messages
+dotnet add package AWS.Lambda.Powertools.Kafka.Json
+```
+
+### Required resources
+
+To use the Kafka consumer utility, you need an AWS Lambda function configured with a Kafka event source. This can be Amazon MSK, MSK Serverless, or a self-hosted Kafka cluster.
+
+=== "getting_started_with_msk.yaml"
+
+ ```yaml
+ AWSTemplateFormatVersion: '2010-09-09'
+ Transform: AWS::Serverless-2016-10-31
+ Resources:
+ KafkaConsumerFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: LambdaFunction::LambdaFunction.Function::FunctionHandler
+ Runtime: dotnet8
+ Timeout: 30
+ Events:
+ MSKEvent:
+ Type: MSK
+ Properties:
+ StartingPosition: LATEST
+ Stream: !GetAtt MyMSKCluster.Arn
+ Topics:
+ - my-topic-1
+ - my-topic-2
+ Policies:
+ - AWSLambdaMSKExecutionRole
+ ```
+
+### Using ESM with Schema Registry
+
+The Event Source Mapping configuration determines which mode is used. With `JSON`, Lambda converts all messages to JSON before invoking your function. With `SOURCE` mode, Lambda preserves the original format, requiring you function to handle the appropriate deserialization.
+
+Powertools for AWS supports both Schema Registry integration modes in your Event Source Mapping configuration.
+
+### Function deployment type
+
+The Kafka consumer utility can be used with both Class Library and Top Level Function deployment types. The choice depends on your project structure and whether you prefer to define your Lambda handler in a class or as a standalone function.
+
+When using the Kafka consumer utility, you must specify the serializer in your Lambda function. This serializer handles the deserialization of Kafka messages into C# objects.
+
+- Class Library Deployment: Use `PowertoolsKafkaAvroSerializer`, `PowertoolsKafkaProtobufSerializer`, or `PowertoolsKafkaJsonSerializer` and replace the default serializer in your Lambda function assembly attribute.
+- Top Level Function Deployment: Use `PowertoolsKafkaAvroSerializer`, `PowertoolsKafkaProtobufSerializer`, or `PowertoolsKafkaJsonSerializer` and pass it to the `LambdaBootstrapBuilder.Create` method.
+
+=== "Class Library Deployment"
+
+ ```csharp hl_lines="5"
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Avro;
+ using AWS.Lambda.Powertools.Logging;
+
+ [assembly: LambdaSerializer(typeof(PowertoolsKafkaAvroSerializer))] // Use PowertoolsKafkaAvroSerializer for Avro serialization
+
+ namespace MyKafkaConsumer;
+
+ public class Function
+ {
+ public string FunctionHandler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+ }
+ ```
+=== "Top Level Function Deployment"
+
+ ```csharp hl_lines="15"
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Avro;
+ using AWS.Lambda.Powertools.Logging;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization
+ .Build()
+ .RunAsync();
+ ```
+
+
+### Processing Kafka events
+
+The Kafka consumer utility transforms raw Lambda Kafka events into an intuitive format for processing. To handle messages effectively, you'll need to configure a schema that matches your data format.
+
+The parameter for the handler funcion is `ConsumerRecords`, where `TK` is the type of the key and `T` is the type of the value.
+
+
+???+ tip "Using Avro or Protocol Buffers is recommended"
+ We recommend Avro or Protocol Buffers for production Kafka implementations due to its schema evolution capabilities, compact binary format, and integration with Schema Registry. This offers better type safety and forward/backward compatibility compared to JSON.
+
+
+=== "Avro Messages"
+
+ ```csharp hl_lines="16"
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Avro;
+ using AWS.Lambda.Powertools.Logging;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization
+ .Build()
+ .RunAsync();
+ ```
+
+=== "Protocol Buffers"
+
+ ```csharp hl_lines="16"
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Protobuf;
+ using AWS.Lambda.Powertools.Logging;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization
+ .Build()
+ .RunAsync();
+ ```
+
+=== "JSON Messages"
+
+ ```csharp hl_lines="16"
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Json;
+ using AWS.Lambda.Powertools.Logging;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaJsonSerializer for Json serialization
+ .Build()
+ .RunAsync();
+ ```
+
+???+ tip "Full examples on GitHub"
+ A full example including how to generate Avro and Protobuf Java classes can be found on [GitHub](https://github.com/aws-powertools/powertools-lambda-dotnet/tree/main/examples/kafka).
+
+### Deserializing keys and values
+
+The `PowertoolsKafkaJsonSerializer`, `PowertoolsKafkaProtobufSerializer` and `PowertoolsKafkaAvroSerializer` serializers can deserialize both keys and values independently based on your schema configuration.
+
+This flexibility allows you to work with different data formats in the same message.
+
+=== "Key and Value Deserialization"
+
+ ```csharp hl_lines="5"
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Protobuf;
+ using AWS.Lambda.Powertools.Logging;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization
+ .Build()
+ .RunAsync();
+ ```
+
+=== "Value-Only Deserialization"
+
+ ```csharp hl_lines="5"
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Protobuf;
+ using AWS.Lambda.Powertools.Logging;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization
+ .Build()
+ .RunAsync();
+ ```
+
+### Handling primitive types
+
+When working with primitive data types (string, int, etc.) rather than complex types, you can use any deserialization type like `PowertoolsKafkaJsonSerializer`.
+
+Simply place the primitive type like `int` or `string` in the ` ConsumerRecords` type parameters, and the library will automatically handle primitive type deserialization.
+
+???+ tip "Common pattern: Keys with primitive values"
+ Using primitive types (strings, integers) as Kafka message keys is a common pattern for partitioning and identifying messages. Powertools automatically handles these primitive keys without requiring special configuration, making it easy to implement this popular design pattern.
+
+=== "Primitive key"
+
+ ```csharp hl_lines="5"
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Protobuf;
+ using AWS.Lambda.Powertools.Logging;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization
+ .Build()
+ .RunAsync();
+ ```
+
+=== "Primitive key and value"
+
+ ```csharp hl_lines="5"
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Protobuf;
+ using AWS.Lambda.Powertools.Logging;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization
+ .Build()
+ .RunAsync();
+ ```
+
+### Message format support and comparison
+
+The Kafka consumer utility supports multiple serialization formats to match your existing Kafka implementation. Choose the format that best suits your needs based on performance, schema evolution requirements, and ecosystem compatibility.
+
+???+ tip "Selecting the right format"
+ For new applications, consider Avro or Protocol Buffers over JSON. Both provide schema validation, evolution support, and significantly better performance with smaller message sizes. Avro is particularly well-suited for Kafka due to its built-in schema evolution capabilities.
+
+=== "Supported Formats"
+
+ | Format | Schema Type | Description | Required Parameters |
+ |--------|-------------|-------------|---------------------|
+ | **JSON** | `"PowertoolsKafkaJsonSerializer"` | Human-readable text format | None |
+ | **Avro** | `"PowertoolsKafkaAvroSerializer"` | Compact binary format with schema | Apache Avro |
+ | **Protocol Buffers** | `"PowertoolsKafkaProtobufSerializer"` | Efficient binary format | Protocol Buffers |
+
+=== "Format Comparison"
+
+ | Feature | JSON | Avro | Protocol Buffers |
+ |---------|------|------|-----------------|
+ | **Schema Definition** | Optional | Required schema file | Required .proto file |
+ | **Schema Evolution** | None | Strong support | Strong support |
+ | **Size Efficiency** | Low | High | Highest |
+ | **Processing Speed** | Slower | Fast | Fastest |
+ | **Human Readability** | High | Low | Low |
+ | **Implementation Complexity** | Low | Medium | Medium |
+ | **Additional Dependencies** | None | Apache Avro | Protocol Buffers |
+
+Choose the serialization format that best fits your needs:
+
+* **JSON**: Best for simplicity and when schema flexibility is important
+* **Avro**: Best for systems with evolving schemas and when compatibility is critical
+* **Protocol Buffers**: Best for performance-critical systems with structured data
+
+## Advanced
+
+### Accessing record metadata
+
+Each Kafka record contains important metadata that you can access alongside the deserialized message content. This metadata helps with message processing, troubleshooting, and implementing advanced patterns like exactly-once processing.
+
+=== "Working with Record Metadata"
+
+ ```csharp
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Protobuf;
+ using AWS.Lambda.Powertools.Logging;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ // Log record coordinates for tracing
+ Logger.LogInformation("Processing messagem from topic: {topic}", record.Topic);
+ Logger.LogInformation("Partition: {partition}, Offset: {offset}", record.Partition, record.Offset);
+ Logger.LogInformation("Produced at: {timestamp}", record.Timestamp);
+
+ // Process message headers
+ foreach (var header in record.Headers.DecodedValues())
+ {
+ Logger.LogInformation($"{header.Key}: {header.Value}");
+ }
+
+ // Access the Avro deserialized message content
+ CustomerProfile customerProfile = record.Value; // CustomerProfile class is auto-generated from Protobuf schema
+ Logger.LogInformation("Processing order for: {fullName}", customerProfile.FullName);
+ }
+ }
+
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization
+ .Build()
+ .RunAsync();
+ ```
+
+#### Available metadata properties
+
+| Property | Description | Example Use Case |
+|----------|-------------|-----------------|
+| `Topic` | Topic name the record was published to | Routing logic in multi-topic consumers |
+| `Partition` | Kafka partition number | Tracking message distribution |
+| `Offset` | Position in the partition | De-duplication, exactly-once processing |
+| `Timestamp` | Unix Timestamp when record was created | Event timing analysis |
+| `TimestampType` | Timestamp type (CREATE_TIME or LOG_APPEND_TIME) | Data lineage verification |
+| `Headers` | Key-value pairs attached to the message | Cross-cutting concerns like correlation IDs |
+| `Key` | Deserialized message key | Customer ID or entity identifier |
+| `Value` | Deserialized message content | The actual business data |
+
+### Error handling
+
+Handle errors gracefully when processing Kafka messages to ensure your application maintains resilience and provides clear diagnostic information. The Kafka consumer utility integrates with standard C# exception handling patterns.
+
+=== "Error Handling"
+
+ ```csharp
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Protobuf;
+ using AWS.Lambda.Powertools.Logging;
+
+ var successfulRecords = 0;
+ var failedRecords = 0;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ try
+ {
+ // Process each record
+ Logger.LogInformation("Processing record from topic: {topic}", record.Topic);
+ Logger.LogInformation("Partition: {partition}, Offset: {offset}", record.Partition, record.Offset);
+
+ // Access the deserialized message content
+ CustomerProfile customerProfile = record.Value; // CustomerProfile class is auto-generated from Protobuf schema
+ ProcessOrder(customerProfile);
+ successfulRecords ++;
+ }
+ catch (Exception ex)
+ {
+ failedRecords ++;
+
+ // Log the error and continue processing other records
+ Logger.LogError(ex, "Error processing record from topic: {topic}, partition: {partition}, offset: {offset}",
+ record.Topic, record.Partition, record.Offset);
+
+ SendToDeadLetterQueue(record, ex); // Optional: Send to a dead-letter queue for further analysis
+ }
+
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return $"Processed {successfulRecords} records successfully, {failedRecords} records failed";
+ }
+
+ private void ProcessOrder(CustomerProfile customerProfile)
+ {
+ Logger.LogInformation("Processing order for: {fullName}", customerProfile.FullName);
+ // Your business logic to process the order
+ // This could throw exceptions for various reasons (e.g., validation errors, database issues)
+ }
+
+ private void SendToDeadLetterQueue(ConsumerRecord record, Exception ex)
+ {
+ // Implement your dead-letter queue logic here
+ Logger.LogError("Sending record to dead-letter queue: {record}, error: {error}", record, ex.Message);
+ }
+
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization
+ .Build()
+ .RunAsync();
+ ```
+
+
+!!! info "Treating Deserialization errors"
+ Read [Deserialization failures](#deserialization-failures). Deserialization failures will fail the whole batch and do not execute your handler.
+
+### Integrating with Idempotency
+
+When processing Kafka messages in Lambda, failed batches can result in message reprocessing. The idempotency utility prevents duplicate processing by tracking which messages have already been handled, ensuring each message is processed exactly once.
+
+The Idempotency utility automatically stores the result of each successful operation, returning the cached result if the same message is processed again, which prevents potentially harmful duplicate operations like double-charging customers or double-counting metrics.
+
+=== "Idempotent Kafka Processing"
+
+ ```csharp
+ using Amazon.Lambda.Core;
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Protobuf;
+ using AWS.Lambda.Powertools.Logging;
+ using AWS.Lambda.Powertools.Idempotency;
+
+ [assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))]
+
+ namespace ProtoBufClassLibrary;
+
+ public class Function
+ {
+ public Function()
+ {
+ Idempotency.Configure(builder => builder.UseDynamoDb("idempotency_table"));
+ }
+
+ public string FunctionHandler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ ProcessPayment(record.Key, record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+
+ [Idempotent]
+ private void ProcessPayment(Payment payment)
+ {
+ Logger.LogInformation("Processing payment {paymentId} for customer {customerName}",
+ payment.Id, payment.CustomerName);
+
+ // Your payment processing logic here
+ // This could involve calling an external payment service, updating a database, etc.
+ }
+ }
+ ```
+
+
+???+ tip "Ensuring exactly-once processing"
+ The `[Idempotent]` attribute will use the JSON representation of the Payment object to make sure that the same object is only processed exactly once. Even if a batch fails and Lambda retries the messages, each unique payment will be processed exactly once.
+
+### Best practices
+
+#### Handling large messages
+
+When processing large Kafka messages in Lambda, be mindful of memory limitations. Although the Kafka consumer utility optimizes memory usage, large deserialized messages can still exhaust Lambda's resources.
+
+For large messages, consider these proven approaches:
+
+* **Store the data**: use Amazon S3 and include only the S3 reference in your Kafka message
+* **Split large payloads**: use multiple smaller messages with sequence identifiers
+* **Increase memory** Increase your Lambda function's memory allocation, which also increases CPU capacity
+
+#### Batch size configuration
+
+The number of Kafka records processed per Lambda invocation is controlled by your Event Source Mapping configuration. Properly sized batches optimize cost and performance.
+
+=== "Batch size configuration"
+ ```yaml
+ Resources:
+ OrderProcessingFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: LambdaFunction::LambdaFunction.Function::FunctionHandler
+ Runtime: dotnet8
+ Events:
+ KafkaEvent:
+ Type: MSK
+ Properties:
+ Stream: !GetAtt OrdersMSKCluster.Arn
+ Topics:
+ - order-events
+ - payment-events
+ # Configuration for optimal throughput/latency balance
+ BatchSize: 100
+ MaximumBatchingWindowInSeconds: 5
+ StartingPosition: LATEST
+ # Enable partial batch success reporting
+ FunctionResponseTypes:
+ - ReportBatchItemFailures
+ ```
+
+Different workloads benefit from different batch configurations:
+
+* **High-volume, simple processing**: Use larger batches (100-500 records) with short timeout
+* **Complex processing with database operations**: Use smaller batches (10-50 records)
+* **Mixed message sizes**: Set appropriate batching window (1-5 seconds) to handle variability
+
+#### Cross-language compatibility
+
+When using binary serialization formats across multiple programming languages, ensure consistent schema handling to prevent deserialization failures.
+
+In case where you have a Python producer and a C# consumer, you may need to adjust your C# code to handle Python's naming conventions (snake_case) and data types.
+
+=== "Using Python naming convention"
+
+ ```c#
+ using AWS.Lambda.Powertools.Kafka;
+ using AWS.Lambda.Powertools.Kafka.Protobuf;
+ using AWS.Lambda.Powertools.Logging;
+
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+
+ await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization
+ .Build()
+ .RunAsync();
+
+ // Example class that handles Python snake_case field names
+ public partial class CustomerProfile
+ {
+ [JsonPropertyName("user_id")] public string UserId { get; set; }
+
+ [JsonPropertyName("full_name")] public string FullName { get; set; }
+
+ [JsonPropertyName("age")] public long Age { get; set; }
+
+ [JsonPropertyName("account_status")] public string AccountStatus { get; set; }
+ }
+ ```
+
+Common cross-language challenges to address:
+
+* **Field naming conventions**: PascalCase in C# vs snake_case in Python
+* **Date/time**: representation differences
+* **Numeric precision handling**: especially decimals
+
+### Troubleshooting common errors
+
+### Troubleshooting
+
+#### Deserialization failures
+
+The Powertools .NET Kafka utility replaces the DefaultLambdaSerializer and performs **eager deserialization** of all records in the batch before your handler method is invoked.
+
+This means that if any record in the batch fails deserialization, a `RuntimeException` will be thrown with a concrete error message explaining why deserialization failed, and your handler method will never be called.
+
+**Key implications:**
+
+- **Batch-level failure**: If one record fails deserialization, the entire batch fails
+- **Early failure detection**: Deserialization errors are caught before your business logic runs
+- **Clear error messages**: The `RuntimeException` provides specific details about what went wrong
+- **No partial processing**: You cannot process some records while skipping failed ones within the same batch
+
+**Handling deserialization failures:**
+
+Since deserialization happens before your handler is called, you cannot catch these exceptions within your handler method. Instead, configure your Event Source Mapping with appropriate error handling:
+
+- **Dead Letter Queue (DLQ)**: Configure a DLQ to capture failed batches for later analysis
+- **Maximum Retry Attempts**: Set appropriate retry limits to avoid infinite retries
+- **Batch Size**: Use smaller batch sizes to minimize the impact of individual record failures
+
+```yaml
+# Example SAM template configuration for error handling
+Events:
+ KafkaEvent:
+ Type: MSK
+ Properties:
+ # ... other properties
+ BatchSize: 10 # Smaller batches reduce failure impact
+ MaximumRetryAttempts: 3
+ DestinationConfig:
+ OnFailure:
+ Type: SQS
+ Destination: !GetAtt DeadLetterQueue.Arn
+```
+
+#### Schema compatibility issues
+
+Schema compatibility issues often manifest as successful connections but failed deserialization. Common causes include:
+
+* **Schema evolution without backward compatibility**: New producer schema is incompatible with consumer schema
+* **Field type mismatches**: For example, a field changed from string to integer across systems
+* **Missing required fields**: Fields required by the consumer schema but absent in the message
+* **Default value discrepancies**: Different handling of default values between languages
+
+When using Schema Registry, verify schema compatibility rules are properly configured for your topics and that all applications use the same registry.
+
+#### Memory and timeout optimization
+
+Lambda functions processing Kafka messages may encounter resource constraints, particularly with large batches or complex processing logic.
+
+For memory errors:
+
+* Increase Lambda memory allocation, which also provides more CPU resources
+* Process fewer records per batch by adjusting the `BatchSize` parameter in your event source mapping
+* Consider optimizing your message format to reduce memory footprint
+
+For timeout issues:
+
+* Extend your Lambda function timeout setting to accommodate processing time
+* Implement chunked or asynchronous processing patterns for time-consuming operations
+* Monitor and optimize database operations, external API calls, or other I/O operations in your handler
+
+???+ tip "Monitoring memory usage"
+ Use CloudWatch metrics to track your function's memory utilization. If it consistently exceeds 80% of allocated memory, consider increasing the memory allocation or optimizing your code.
+
+## Kafka consumer workflow
+
+### Using ESM with Schema Registry validation (SOURCE)
+
+
+```mermaid
+sequenceDiagram
+ participant Kafka
+ participant ESM as Event Source Mapping
+ participant SchemaRegistry as Schema Registry
+ participant Lambda
+ participant KafkaConsumer
+ participant YourCode
+ Kafka->>+ESM: Send batch of records
+ ESM->>+SchemaRegistry: Validate schema
+ SchemaRegistry-->>-ESM: Confirm schema is valid
+ ESM->>+Lambda: Invoke with validated records (still encoded)
+ Lambda->>+KafkaConsumer: Pass Kafka event
+ KafkaConsumer->>KafkaConsumer: Parse event structure
+ loop For each record
+ KafkaConsumer->>KafkaConsumer: Decode base64 data
+ KafkaConsumer->>KafkaConsumer: Deserialize based on schema_type
+ alt Output serializer provided
+ KafkaConsumer->>KafkaConsumer: Apply output serializer
+ end
+ end
+ KafkaConsumer->>+YourCode: Provide ConsumerRecords
+ YourCode->>YourCode: Process records
+ YourCode-->>-KafkaConsumer: Return result
+ KafkaConsumer-->>-Lambda: Pass result back
+ Lambda-->>-ESM: Return response
+ ESM-->>-Kafka: Acknowledge processed batch
+```
+
+
+### Using ESM with Schema Registry deserialization (JSON)
+
+
+```mermaid
+sequenceDiagram
+ participant Kafka
+ participant ESM as Event Source Mapping
+ participant SchemaRegistry as Schema Registry
+ participant Lambda
+ participant KafkaConsumer
+ participant YourCode
+ Kafka->>+ESM: Send batch of records
+ ESM->>+SchemaRegistry: Validate and deserialize
+ SchemaRegistry->>SchemaRegistry: Deserialize records
+ SchemaRegistry-->>-ESM: Return deserialized data
+ ESM->>+Lambda: Invoke with pre-deserialized JSON records
+ Lambda->>+KafkaConsumer: Pass Kafka event
+ KafkaConsumer->>KafkaConsumer: Parse event structure
+ loop For each record
+ KafkaConsumer->>KafkaConsumer: Record is already deserialized
+ alt Output serializer provided
+ KafkaConsumer->>KafkaConsumer: Apply output serializer
+ end
+ end
+ KafkaConsumer->>+YourCode: Provide ConsumerRecords
+ YourCode->>YourCode: Process records
+ YourCode-->>-KafkaConsumer: Return result
+ KafkaConsumer-->>-Lambda: Pass result back
+ Lambda-->>-ESM: Return response
+ ESM-->>-Kafka: Acknowledge processed batch
+```
+
+
+### Using ESM without Schema Registry integration
+
+
+```mermaid
+sequenceDiagram
+ participant Kafka
+ participant Lambda
+ participant KafkaConsumer
+ participant YourCode
+ Kafka->>+Lambda: Invoke with batch of records (direct integration)
+ Lambda->>+KafkaConsumer: Pass raw Kafka event
+ KafkaConsumer->>KafkaConsumer: Parse event structure
+ loop For each record
+ KafkaConsumer->>KafkaConsumer: Decode base64 data
+ KafkaConsumer->>KafkaConsumer: Deserialize based on schema_type
+ alt Output serializer provided
+ KafkaConsumer->>KafkaConsumer: Apply output serializer
+ end
+ end
+ KafkaConsumer->>+YourCode: Provide ConsumerRecords
+ YourCode->>YourCode: Process records
+ YourCode-->>-KafkaConsumer: Return result
+ KafkaConsumer-->>-Lambda: Pass result back
+ Lambda-->>-Kafka: Acknowledge processed batch
+```
+
+
+## Testing your code
+
+Testing Kafka consumer functions is straightforward with Xunit. You can create simple test fixtures that simulate Kafka events without needing a real Kafka cluster.
+
+=== "Testing your code"
+
+ ```csharp
+ using System.Text;
+ using Amazon.Lambda.Core;
+ using Amazon.Lambda.TestUtilities;
+ using AWS.Lambda.Powertools.Kafka.Protobuf;
+ using Google.Protobuf;
+ using TestKafka;
+
+ public class KafkaTests
+ {
+ [Fact]
+ public void SimpleHandlerTest()
+ {
+ string Handler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ var product = record.Value;
+ context.Logger.LogInformation($"Processing {product.Name} at ${product.Price}");
+ }
+
+ return "Successfully processed Protobuf Kafka events";
+ }
+ // Simulate the handler execution
+ var mockLogger = new TestLambdaLogger();
+ var mockContext = new TestLambdaContext
+ {
+ Logger = mockLogger
+ };
+
+ var records = new ConsumerRecords
+ {
+ Records = new Dictionary>>
+ {
+ { "mytopic-0", new List>
+ {
+ new()
+ {
+ Topic = "mytopic",
+ Partition = 0,
+ Offset = 15,
+ Key = 42,
+ Value = new ProtobufProduct { Name = "Test Product", Id = 1, Price = 99.99 }
+ }
+ }
+ }
+ }
+ };
+
+ // Call the handler
+ var result = Handler(records, mockContext);
+
+ // Assert the result
+ Assert.Equal("Successfully processed Protobuf Kafka events", result);
+
+ // Verify the context logger output
+ Assert.Contains("Processing Test Product at $99.99", mockLogger.Buffer.ToString());
+
+ // Verify the records were processed
+ Assert.Single(records.Records);
+ Assert.Contains("mytopic-0", records.Records.Keys);
+ Assert.Single(records.Records["mytopic-0"]);
+ Assert.Equal("mytopic", records.Records["mytopic-0"][0].Topic);
+ Assert.Equal(0, records.Records["mytopic-0"][0].Partition);
+ Assert.Equal(15, records.Records["mytopic-0"][0].Offset);
+ Assert.Equal(42, records.Records["mytopic-0"][0].Key);
+ Assert.Equal("Test Product", records.Records["mytopic-0"][0].Value.Name);
+ Assert.Equal(1, records.Records["mytopic-0"][0].Value.Id);
+ Assert.Equal(99.99, records.Records["mytopic-0"][0].Value.Price);
+ }
+ }
+
+ ```
+
+## Code Generation for Serialization
+
+This guide explains how to automatically generate C# classes from Avro and Protobuf schema files in your Lambda projects.
+
+### Avro Class Generation
+
+#### Prerequisites
+
+Install the Apache Avro Tools globally:
+
+```bash
+dotnet tool install --global Apache.Avro.Tools
+```
+
+#### MSBuild Integration
+
+Add the following target to your `.csproj` file to automatically generate Avro classes during compilation:
+
+```xml
+
+
+
+```
+
+This target will:
+- Run before compilation
+- Generate C# classes from `CustomerProfile.avsc` schema file
+- Output generated classes to the `Generated` folder
+
+### Protobuf Class Generation
+
+#### Package Reference
+
+Add the Grpc.Tools package to your `.csproj` file:
+
+```xml
+
+ all
+ runtime; build; native; contentfiles; analyzers
+
+```
+
+#### Schema Files Configuration
+
+Add your `.proto` files to the project with the following configuration:
+
+```xml
+
+
+ Client
+ Public
+ True
+ True
+ obj\Debug/net8.0/
+ MSBuild:Compile
+ PreserveNewest
+
+
+```
+
+This configuration will:
+- Generate client-side gRPC services
+- Make generated classes public
+- Automatically compile and include generated files
+- Copy proto files to output directory
+
+### Generated Code Usage
+
+Both Avro and Protobuf generators create strongly-typed C# classes that can be used with the PowerTools serialization utilities for efficient Lambda function processing.
\ No newline at end of file
diff --git a/docs/we_made_this.md b/docs/we_made_this.md
index abbb7b3f7..df6299d0e 100644
--- a/docs/we_made_this.md
+++ b/docs/we_made_this.md
@@ -77,6 +77,19 @@ Check out the great series of videos from Rahul Nath on Powertools for AWS Lambd
+## Powertools for AWS Lambda - MCP Server
+
+> **Author: [Michael Walmsley](https://www.linkedin.com/in/walmsles/){target="_blank"} :material-linkedin:**
+This project implements an MCP server that enables Large Language Models (LLMs) to search through Powertools for AWS Lambda documentation.
+
+The server accesses the live documentation `search_index.json` data and re-constructs a local search index using lunr.js. This provides an identical search experience for AI Agents and returns the exact same results as a person would get on the website.
+
+With the index being local searches are super fast and the index is cached for the life of the server to save rebuilding used indexes. Since the MCP Server uses real search data it is capable of working for any Powertools for AWS document site so naturally supports all the runtimes.
+
+
+
+* [https://github.com/serverless-dna/powertools-mcp](https://github.com/serverless-dna/powertools-mcp){target="_blank"}
+
## Workshops
### Accelerate your serverless journey with Powertools for AWS Lambda
diff --git a/examples/AOT/AOT_Logging/src/AOT_Logging/AOT_Logging.csproj b/examples/AOT/AOT_Logging/src/AOT_Logging/AOT_Logging.csproj
index c523a8762..d06a0a531 100644
--- a/examples/AOT/AOT_Logging/src/AOT_Logging/AOT_Logging.csproj
+++ b/examples/AOT/AOT_Logging/src/AOT_Logging/AOT_Logging.csproj
@@ -18,8 +18,8 @@
-
-
+
+
diff --git a/examples/AOT/AOT_Logging/test/AOT_Logging.Tests/AOT_Logging.Tests.csproj b/examples/AOT/AOT_Logging/test/AOT_Logging.Tests/AOT_Logging.Tests.csproj
index 3d996e245..cba0ba03e 100644
--- a/examples/AOT/AOT_Logging/test/AOT_Logging.Tests/AOT_Logging.Tests.csproj
+++ b/examples/AOT/AOT_Logging/test/AOT_Logging.Tests/AOT_Logging.Tests.csproj
@@ -6,7 +6,7 @@
true
-
+
diff --git a/examples/AOT/AOT_Metrics/src/AOT_Metrics/AOT_Metrics.csproj b/examples/AOT/AOT_Metrics/src/AOT_Metrics/AOT_Metrics.csproj
index c6463f138..74caf11d7 100644
--- a/examples/AOT/AOT_Metrics/src/AOT_Metrics/AOT_Metrics.csproj
+++ b/examples/AOT/AOT_Metrics/src/AOT_Metrics/AOT_Metrics.csproj
@@ -18,8 +18,8 @@
-
-
+
+
\ No newline at end of file
diff --git a/examples/AOT/AOT_Metrics/test/AOT_Metrics.Tests/AOT_Metrics.Tests.csproj b/examples/AOT/AOT_Metrics/test/AOT_Metrics.Tests/AOT_Metrics.Tests.csproj
index 34fa6d4ce..fb935a9a9 100644
--- a/examples/AOT/AOT_Metrics/test/AOT_Metrics.Tests/AOT_Metrics.Tests.csproj
+++ b/examples/AOT/AOT_Metrics/test/AOT_Metrics.Tests/AOT_Metrics.Tests.csproj
@@ -6,7 +6,7 @@
true
-
+
diff --git a/examples/AOT/AOT_Tracing/src/AOT_Tracing/AOT_Tracing.csproj b/examples/AOT/AOT_Tracing/src/AOT_Tracing/AOT_Tracing.csproj
index 558effc2b..6e92d3312 100644
--- a/examples/AOT/AOT_Tracing/src/AOT_Tracing/AOT_Tracing.csproj
+++ b/examples/AOT/AOT_Tracing/src/AOT_Tracing/AOT_Tracing.csproj
@@ -18,8 +18,8 @@
-
-
+
+
\ No newline at end of file
diff --git a/examples/AOT/AOT_Tracing/test/AOT_Tracing.Tests/AOT_Tracing.Tests.csproj b/examples/AOT/AOT_Tracing/test/AOT_Tracing.Tests/AOT_Tracing.Tests.csproj
index 2bdc9557b..b62601e63 100644
--- a/examples/AOT/AOT_Tracing/test/AOT_Tracing.Tests/AOT_Tracing.Tests.csproj
+++ b/examples/AOT/AOT_Tracing/test/AOT_Tracing.Tests/AOT_Tracing.Tests.csproj
@@ -6,7 +6,7 @@
true
-
+
diff --git a/examples/BatchProcessing/src/HelloWorld/HelloWorld.csproj b/examples/BatchProcessing/src/HelloWorld/HelloWorld.csproj
index 01b0ecf92..7d3263e25 100644
--- a/examples/BatchProcessing/src/HelloWorld/HelloWorld.csproj
+++ b/examples/BatchProcessing/src/HelloWorld/HelloWorld.csproj
@@ -5,10 +5,10 @@
enable
-
-
+
+
-
+
diff --git a/examples/BatchProcessing/test/HelloWorld.Test/HelloWorld.Tests.csproj b/examples/BatchProcessing/test/HelloWorld.Test/HelloWorld.Tests.csproj
index 903aee7db..3990c0112 100644
--- a/examples/BatchProcessing/test/HelloWorld.Test/HelloWorld.Tests.csproj
+++ b/examples/BatchProcessing/test/HelloWorld.Test/HelloWorld.Tests.csproj
@@ -3,12 +3,12 @@
net8.0
-
+
-
+
diff --git a/examples/Event Handler/BedrockAgentFunction/infra/.gitignore b/examples/Event Handler/BedrockAgentFunction/infra/.gitignore
new file mode 100644
index 000000000..f60797b6a
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/infra/.gitignore
@@ -0,0 +1,8 @@
+*.js
+!jest.config.js
+*.d.ts
+node_modules
+
+# CDK asset staging directory
+.cdk.staging
+cdk.out
diff --git a/examples/Event Handler/BedrockAgentFunction/infra/.npmignore b/examples/Event Handler/BedrockAgentFunction/infra/.npmignore
new file mode 100644
index 000000000..c1d6d45dc
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/infra/.npmignore
@@ -0,0 +1,6 @@
+*.ts
+!*.d.ts
+
+# CDK asset staging directory
+.cdk.staging
+cdk.out
diff --git a/examples/Event Handler/BedrockAgentFunction/infra/README.md b/examples/Event Handler/BedrockAgentFunction/infra/README.md
new file mode 100644
index 000000000..9315fe5b9
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/infra/README.md
@@ -0,0 +1,14 @@
+# Welcome to your CDK TypeScript project
+
+This is a blank project for CDK development with TypeScript.
+
+The `cdk.json` file tells the CDK Toolkit how to execute your app.
+
+## Useful commands
+
+* `npm run build` compile typescript to js
+* `npm run watch` watch for changes and compile
+* `npm run test` perform the jest unit tests
+* `npx cdk deploy` deploy this stack to your default AWS account/region
+* `npx cdk diff` compare deployed stack with current state
+* `npx cdk synth` emits the synthesized CloudFormation template
diff --git a/examples/Event Handler/BedrockAgentFunction/infra/cdk.json b/examples/Event Handler/BedrockAgentFunction/infra/cdk.json
new file mode 100644
index 000000000..eea31fee9
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/infra/cdk.json
@@ -0,0 +1,96 @@
+{
+ "app": "npx ts-node --prefer-ts-exts bin/infra.ts",
+ "watch": {
+ "include": [
+ "**"
+ ],
+ "exclude": [
+ "README.md",
+ "cdk*.json",
+ "**/*.d.ts",
+ "**/*.js",
+ "tsconfig.json",
+ "package*.json",
+ "yarn.lock",
+ "node_modules",
+ "test"
+ ]
+ },
+ "context": {
+ "@aws-cdk/aws-lambda:recognizeLayerVersion": true,
+ "@aws-cdk/core:checkSecretUsage": true,
+ "@aws-cdk/core:target-partitions": [
+ "aws",
+ "aws-cn"
+ ],
+ "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true,
+ "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true,
+ "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true,
+ "@aws-cdk/aws-iam:minimizePolicies": true,
+ "@aws-cdk/core:validateSnapshotRemovalPolicy": true,
+ "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true,
+ "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true,
+ "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true,
+ "@aws-cdk/aws-apigateway:disableCloudWatchRole": true,
+ "@aws-cdk/core:enablePartitionLiterals": true,
+ "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true,
+ "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true,
+ "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true,
+ "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true,
+ "@aws-cdk/aws-route53-patters:useCertificate": true,
+ "@aws-cdk/customresources:installLatestAwsSdkDefault": false,
+ "@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true,
+ "@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true,
+ "@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true,
+ "@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true,
+ "@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true,
+ "@aws-cdk/aws-redshift:columnId": true,
+ "@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true,
+ "@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true,
+ "@aws-cdk/aws-apigateway:requestValidatorUniqueId": true,
+ "@aws-cdk/aws-kms:aliasNameRef": true,
+ "@aws-cdk/aws-autoscaling:generateLaunchTemplateInsteadOfLaunchConfig": true,
+ "@aws-cdk/core:includePrefixInUniqueNameGeneration": true,
+ "@aws-cdk/aws-efs:denyAnonymousAccess": true,
+ "@aws-cdk/aws-opensearchservice:enableOpensearchMultiAzWithStandby": true,
+ "@aws-cdk/aws-lambda-nodejs:useLatestRuntimeVersion": true,
+ "@aws-cdk/aws-efs:mountTargetOrderInsensitiveLogicalId": true,
+ "@aws-cdk/aws-rds:auroraClusterChangeScopeOfInstanceParameterGroupWithEachParameters": true,
+ "@aws-cdk/aws-appsync:useArnForSourceApiAssociationIdentifier": true,
+ "@aws-cdk/aws-rds:preventRenderingDeprecatedCredentials": true,
+ "@aws-cdk/aws-codepipeline-actions:useNewDefaultBranchForCodeCommitSource": true,
+ "@aws-cdk/aws-cloudwatch-actions:changeLambdaPermissionLogicalIdForLambdaAction": true,
+ "@aws-cdk/aws-codepipeline:crossAccountKeysDefaultValueToFalse": true,
+ "@aws-cdk/aws-codepipeline:defaultPipelineTypeToV2": true,
+ "@aws-cdk/aws-kms:reduceCrossAccountRegionPolicyScope": true,
+ "@aws-cdk/aws-eks:nodegroupNameAttribute": true,
+ "@aws-cdk/aws-ec2:ebsDefaultGp3Volume": true,
+ "@aws-cdk/aws-ecs:removeDefaultDeploymentAlarm": true,
+ "@aws-cdk/custom-resources:logApiResponseDataPropertyTrueDefault": false,
+ "@aws-cdk/aws-s3:keepNotificationInImportedBucket": false,
+ "@aws-cdk/aws-ecs:enableImdsBlockingDeprecatedFeature": false,
+ "@aws-cdk/aws-ecs:disableEcsImdsBlocking": true,
+ "@aws-cdk/aws-ecs:reduceEc2FargateCloudWatchPermissions": true,
+ "@aws-cdk/aws-dynamodb:resourcePolicyPerReplica": true,
+ "@aws-cdk/aws-ec2:ec2SumTImeoutEnabled": true,
+ "@aws-cdk/aws-appsync:appSyncGraphQLAPIScopeLambdaPermission": true,
+ "@aws-cdk/aws-rds:setCorrectValueForDatabaseInstanceReadReplicaInstanceResourceId": true,
+ "@aws-cdk/core:cfnIncludeRejectComplexResourceUpdateCreatePolicyIntrinsics": true,
+ "@aws-cdk/aws-lambda-nodejs:sdkV3ExcludeSmithyPackages": true,
+ "@aws-cdk/aws-stepfunctions-tasks:fixRunEcsTaskPolicy": true,
+ "@aws-cdk/aws-ec2:bastionHostUseAmazonLinux2023ByDefault": true,
+ "@aws-cdk/aws-route53-targets:userPoolDomainNameMethodWithoutCustomResource": true,
+ "@aws-cdk/aws-elasticloadbalancingV2:albDualstackWithoutPublicIpv4SecurityGroupRulesDefault": true,
+ "@aws-cdk/aws-iam:oidcRejectUnauthorizedConnections": true,
+ "@aws-cdk/core:enableAdditionalMetadataCollection": true,
+ "@aws-cdk/aws-lambda:createNewPoliciesWithAddToRolePolicy": false,
+ "@aws-cdk/aws-s3:setUniqueReplicationRoleName": true,
+ "@aws-cdk/aws-events:requireEventBusPolicySid": true,
+ "@aws-cdk/core:aspectPrioritiesMutating": true,
+ "@aws-cdk/aws-dynamodb:retainTableReplica": true,
+ "@aws-cdk/aws-stepfunctions:useDistributedMapResultWriterV2": true,
+ "@aws-cdk/s3-notifications:addS3TrustKeyPolicyForSnsSubscriptions": true,
+ "@aws-cdk/aws-ec2:requirePrivateSubnetsForEgressOnlyInternetGateway": true,
+ "@aws-cdk/aws-s3:publicAccessBlockedByDefault": true
+ }
+}
diff --git a/examples/Event Handler/BedrockAgentFunction/infra/jest.config.js b/examples/Event Handler/BedrockAgentFunction/infra/jest.config.js
new file mode 100644
index 000000000..08263b895
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/infra/jest.config.js
@@ -0,0 +1,8 @@
+module.exports = {
+ testEnvironment: 'node',
+ roots: ['/test'],
+ testMatch: ['**/*.test.ts'],
+ transform: {
+ '^.+\\.tsx?$': 'ts-jest'
+ }
+};
diff --git a/examples/Event Handler/BedrockAgentFunction/infra/lib/bedrockagents-stack.ts b/examples/Event Handler/BedrockAgentFunction/infra/lib/bedrockagents-stack.ts
new file mode 100644
index 000000000..001d9912d
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/infra/lib/bedrockagents-stack.ts
@@ -0,0 +1,121 @@
+import {
+ Stack,
+ type StackProps,
+ CfnOutput,
+ RemovalPolicy,
+ Arn,
+ Duration,
+} from 'aws-cdk-lib';
+import type { Construct } from 'constructs';
+import { Runtime, Function as LambdaFunction, Code, Architecture } from 'aws-cdk-lib/aws-lambda';
+import { LogGroup, RetentionDays } from 'aws-cdk-lib/aws-logs';
+import { CfnAgent } from 'aws-cdk-lib/aws-bedrock';
+import {
+ PolicyDocument,
+ PolicyStatement,
+ Role,
+ ServicePrincipal,
+} from 'aws-cdk-lib/aws-iam';
+
+export class BedrockAgentsStack extends Stack {
+ constructor(scope: Construct, id: string, props?: StackProps) {
+ super(scope, id, props);
+
+ const fnName = 'BedrockAgentsFn';
+ const logGroup = new LogGroup(this, 'MyLogGroup', {
+ logGroupName: `/aws/lambda/${fnName}`,
+ removalPolicy: RemovalPolicy.DESTROY,
+ retention: RetentionDays.ONE_DAY,
+ });
+
+ const fn = new LambdaFunction(this, 'MyFunction', {
+ functionName: fnName,
+ logGroup,
+ timeout: Duration.minutes(3),
+ runtime: Runtime.DOTNET_8,
+ handler: 'BedrockAgentFunction',
+ code: Code.fromAsset('../release/BedrockAgentFunction.zip'),
+ architecture: Architecture.X86_64,
+ });
+
+ const agentRole = new Role(this, 'MyAgentRole', {
+ assumedBy: new ServicePrincipal('bedrock.amazonaws.com'),
+ description: 'Role for Bedrock airport agent',
+ inlinePolicies: {
+ bedrock: new PolicyDocument({
+ statements: [
+ new PolicyStatement({
+ actions: [
+ 'bedrock:*',
+ ],
+ resources: [
+ Arn.format(
+ {
+ service: 'bedrock',
+ resource: 'foundation-model/*',
+ region: 'us-*',
+ account: '',
+ },
+ Stack.of(this)
+ ),
+ Arn.format(
+ {
+ service: 'bedrock',
+ resource: 'inference-profile/*',
+ region: 'us-*',
+ account: '*',
+ },
+ Stack.of(this)
+ ),
+ ],
+ }),
+ ],
+ }),
+ },
+ });
+
+ const agent = new CfnAgent(this, 'MyCfnAgent', {
+ agentName: 'airportAgent',
+ actionGroups: [
+ {
+ actionGroupName: 'airportActionGroup',
+ actionGroupExecutor: {
+ lambda: fn.functionArn,
+ },
+ functionSchema: {
+ functions: [
+ {
+ name: 'getAirportCodeForCity',
+ description: 'Get airport code and full airport name for a specific city',
+ parameters: {
+ city: {
+ type: 'string',
+ description: 'The name of the city to get the airport code for',
+ required: true,
+ },
+ },
+ },
+ ],
+ },
+ },
+ ],
+ agentResourceRoleArn: agentRole.roleArn,
+ autoPrepare: true,
+ description: 'A simple airport agent',
+ foundationModel: `arn:aws:bedrock:us-west-2:${Stack.of(this).account}:inference-profile/us.amazon.nova-pro-v1:0`,
+ instruction:
+ 'You are an airport traffic control agent. You will be given a city name and you will return the airport code and airport full name for that city.',
+ });
+
+ fn.addPermission('BedrockAgentInvokePermission', {
+ principal: new ServicePrincipal('bedrock.amazonaws.com'),
+ action: 'lambda:InvokeFunction',
+ sourceAccount: this.account,
+ sourceArn: `arn:aws:bedrock:${this.region}:${this.account}:agent/${agent.attrAgentId}`,
+ });
+
+ new CfnOutput(this, 'FunctionArn', {
+ value: fn.functionArn,
+ });
+ }
+}
diff --git a/examples/Event Handler/BedrockAgentFunction/infra/package-lock.json b/examples/Event Handler/BedrockAgentFunction/infra/package-lock.json
new file mode 100644
index 000000000..cb3ffa662
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/infra/package-lock.json
@@ -0,0 +1,4448 @@
+{
+ "name": "infra",
+ "version": "0.1.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "infra",
+ "version": "0.1.0",
+ "dependencies": {
+ "aws-cdk-lib": "2.198.0",
+ "constructs": "^10.0.0"
+ },
+ "bin": {
+ "infra": "bin/infra.js"
+ },
+ "devDependencies": {
+ "@types/jest": "^29.5.14",
+ "@types/node": "22.7.9",
+ "aws-cdk": "2.1017.1",
+ "jest": "^29.7.0",
+ "ts-jest": "^29.2.5",
+ "ts-node": "^10.9.2",
+ "typescript": "~5.6.3"
+ }
+ },
+ "node_modules/@ampproject/remapping": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
+ "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@jridgewell/gen-mapping": "^0.3.5",
+ "@jridgewell/trace-mapping": "^0.3.24"
+ },
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@aws-cdk/asset-awscli-v1": {
+ "version": "2.2.237",
+ "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.237.tgz",
+ "integrity": "sha512-OlXylbXI52lboFVJBFLae+WB99qWmI121x/wXQHEMj2RaVNVbWE+OAHcDk2Um1BitUQCaTf9ki57B0Fuqx0Rvw==",
+ "license": "Apache-2.0"
+ },
+ "node_modules/@aws-cdk/asset-node-proxy-agent-v6": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.1.0.tgz",
+ "integrity": "sha512-7bY3J8GCVxLupn/kNmpPc5VJz8grx+4RKfnnJiO1LG+uxkZfANZG3RMHhE+qQxxwkyQ9/MfPtTpf748UhR425A==",
+ "license": "Apache-2.0"
+ },
+ "node_modules/@aws-cdk/cloud-assembly-schema": {
+ "version": "41.2.0",
+ "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-41.2.0.tgz",
+ "integrity": "sha512-JaulVS6z9y5+u4jNmoWbHZRs9uGOnmn/ktXygNWKNu1k6lF3ad4so3s18eRu15XCbUIomxN9WPYT6Ehh7hzONw==",
+ "bundleDependencies": [
+ "jsonschema",
+ "semver"
+ ],
+ "license": "Apache-2.0",
+ "dependencies": {
+ "jsonschema": "~1.4.1",
+ "semver": "^7.7.1"
+ },
+ "engines": {
+ "node": ">= 14.15.0"
+ }
+ },
+ "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/jsonschema": {
+ "version": "1.4.1",
+ "inBundle": true,
+ "license": "MIT",
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/semver": {
+ "version": "7.7.1",
+ "inBundle": true,
+ "license": "ISC",
+ "bin": {
+ "semver": "bin/semver.js"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/@babel/code-frame": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
+ "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-validator-identifier": "^7.27.1",
+ "js-tokens": "^4.0.0",
+ "picocolors": "^1.1.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/compat-data": {
+ "version": "7.27.3",
+ "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.27.3.tgz",
+ "integrity": "sha512-V42wFfx1ymFte+ecf6iXghnnP8kWTO+ZLXIyZq+1LAXHHvTZdVxicn4yiVYdYMGaCO3tmqub11AorKkv+iodqw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/core": {
+ "version": "7.27.4",
+ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.27.4.tgz",
+ "integrity": "sha512-bXYxrXFubeYdvB0NhD/NBB3Qi6aZeV20GOWVI47t2dkecCEoneR4NPVcb7abpXDEvejgrUfFtG6vG/zxAKmg+g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@ampproject/remapping": "^2.2.0",
+ "@babel/code-frame": "^7.27.1",
+ "@babel/generator": "^7.27.3",
+ "@babel/helper-compilation-targets": "^7.27.2",
+ "@babel/helper-module-transforms": "^7.27.3",
+ "@babel/helpers": "^7.27.4",
+ "@babel/parser": "^7.27.4",
+ "@babel/template": "^7.27.2",
+ "@babel/traverse": "^7.27.4",
+ "@babel/types": "^7.27.3",
+ "convert-source-map": "^2.0.0",
+ "debug": "^4.1.0",
+ "gensync": "^1.0.0-beta.2",
+ "json5": "^2.2.3",
+ "semver": "^6.3.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/babel"
+ }
+ },
+ "node_modules/@babel/generator": {
+ "version": "7.27.3",
+ "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.3.tgz",
+ "integrity": "sha512-xnlJYj5zepml8NXtjkG0WquFUv8RskFqyFcVgTBp5k+NaA/8uw/K+OSVf8AMGw5e9HKP2ETd5xpK5MLZQD6b4Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/parser": "^7.27.3",
+ "@babel/types": "^7.27.3",
+ "@jridgewell/gen-mapping": "^0.3.5",
+ "@jridgewell/trace-mapping": "^0.3.25",
+ "jsesc": "^3.0.2"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-compilation-targets": {
+ "version": "7.27.2",
+ "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz",
+ "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/compat-data": "^7.27.2",
+ "@babel/helper-validator-option": "^7.27.1",
+ "browserslist": "^4.24.0",
+ "lru-cache": "^5.1.1",
+ "semver": "^6.3.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-module-imports": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz",
+ "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/traverse": "^7.27.1",
+ "@babel/types": "^7.27.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-module-transforms": {
+ "version": "7.27.3",
+ "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz",
+ "integrity": "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-module-imports": "^7.27.1",
+ "@babel/helper-validator-identifier": "^7.27.1",
+ "@babel/traverse": "^7.27.3"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0"
+ }
+ },
+ "node_modules/@babel/helper-plugin-utils": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz",
+ "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-string-parser": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
+ "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-validator-identifier": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
+ "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-validator-option": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz",
+ "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helpers": {
+ "version": "7.27.4",
+ "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.4.tgz",
+ "integrity": "sha512-Y+bO6U+I7ZKaM5G5rDUZiYfUvQPUibYmAFe7EnKdnKBbVXDZxvp+MWOH5gYciY0EPk4EScsuFMQBbEfpdRKSCQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/template": "^7.27.2",
+ "@babel/types": "^7.27.3"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/parser": {
+ "version": "7.27.4",
+ "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.4.tgz",
+ "integrity": "sha512-BRmLHGwpUqLFR2jzx9orBuX/ABDkj2jLKOXrHDTN2aOKL+jFDDKaRNo9nyYsIl9h/UE/7lMKdDjKQQyxKKDZ7g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/types": "^7.27.3"
+ },
+ "bin": {
+ "parser": "bin/babel-parser.js"
+ },
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-async-generators": {
+ "version": "7.8.4",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz",
+ "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.8.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-bigint": {
+ "version": "7.8.3",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz",
+ "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.8.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-class-properties": {
+ "version": "7.12.13",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz",
+ "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.12.13"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-class-static-block": {
+ "version": "7.14.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz",
+ "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.14.5"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-import-attributes": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz",
+ "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.27.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-import-meta": {
+ "version": "7.10.4",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz",
+ "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.10.4"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-json-strings": {
+ "version": "7.8.3",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz",
+ "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.8.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-jsx": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz",
+ "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.27.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-logical-assignment-operators": {
+ "version": "7.10.4",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz",
+ "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.10.4"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": {
+ "version": "7.8.3",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz",
+ "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.8.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-numeric-separator": {
+ "version": "7.10.4",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz",
+ "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.10.4"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-object-rest-spread": {
+ "version": "7.8.3",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz",
+ "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.8.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-optional-catch-binding": {
+ "version": "7.8.3",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz",
+ "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.8.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-optional-chaining": {
+ "version": "7.8.3",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz",
+ "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.8.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-private-property-in-object": {
+ "version": "7.14.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz",
+ "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.14.5"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-top-level-await": {
+ "version": "7.14.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz",
+ "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.14.5"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-syntax-typescript": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz",
+ "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.27.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/template": {
+ "version": "7.27.2",
+ "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
+ "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/code-frame": "^7.27.1",
+ "@babel/parser": "^7.27.2",
+ "@babel/types": "^7.27.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/traverse": {
+ "version": "7.27.4",
+ "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.4.tgz",
+ "integrity": "sha512-oNcu2QbHqts9BtOWJosOVJapWjBDSxGCpFvikNR5TGDYDQf3JwpIoMzIKrvfoti93cLfPJEG4tH9SPVeyCGgdA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/code-frame": "^7.27.1",
+ "@babel/generator": "^7.27.3",
+ "@babel/parser": "^7.27.4",
+ "@babel/template": "^7.27.2",
+ "@babel/types": "^7.27.3",
+ "debug": "^4.3.1",
+ "globals": "^11.1.0"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/types": {
+ "version": "7.27.3",
+ "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.3.tgz",
+ "integrity": "sha512-Y1GkI4ktrtvmawoSq+4FCVHNryea6uR+qUQy0AGxLSsjCX0nVmkYQMBLHDkXZuo5hGx7eYdnIaslsdBFm7zbUw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-string-parser": "^7.27.1",
+ "@babel/helper-validator-identifier": "^7.27.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@bcoe/v8-coverage": {
+ "version": "0.2.3",
+ "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz",
+ "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@cspotcode/source-map-support": {
+ "version": "0.8.1",
+ "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
+ "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/trace-mapping": "0.3.9"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": {
+ "version": "0.3.9",
+ "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
+ "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/resolve-uri": "^3.0.3",
+ "@jridgewell/sourcemap-codec": "^1.4.10"
+ }
+ },
+ "node_modules/@istanbuljs/load-nyc-config": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz",
+ "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "camelcase": "^5.3.1",
+ "find-up": "^4.1.0",
+ "get-package-type": "^0.1.0",
+ "js-yaml": "^3.13.1",
+ "resolve-from": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/@istanbuljs/schema": {
+ "version": "0.1.3",
+ "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz",
+ "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/@jest/console": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz",
+ "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/types": "^29.6.3",
+ "@types/node": "*",
+ "chalk": "^4.0.0",
+ "jest-message-util": "^29.7.0",
+ "jest-util": "^29.7.0",
+ "slash": "^3.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/core": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz",
+ "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/console": "^29.7.0",
+ "@jest/reporters": "^29.7.0",
+ "@jest/test-result": "^29.7.0",
+ "@jest/transform": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "@types/node": "*",
+ "ansi-escapes": "^4.2.1",
+ "chalk": "^4.0.0",
+ "ci-info": "^3.2.0",
+ "exit": "^0.1.2",
+ "graceful-fs": "^4.2.9",
+ "jest-changed-files": "^29.7.0",
+ "jest-config": "^29.7.0",
+ "jest-haste-map": "^29.7.0",
+ "jest-message-util": "^29.7.0",
+ "jest-regex-util": "^29.6.3",
+ "jest-resolve": "^29.7.0",
+ "jest-resolve-dependencies": "^29.7.0",
+ "jest-runner": "^29.7.0",
+ "jest-runtime": "^29.7.0",
+ "jest-snapshot": "^29.7.0",
+ "jest-util": "^29.7.0",
+ "jest-validate": "^29.7.0",
+ "jest-watcher": "^29.7.0",
+ "micromatch": "^4.0.4",
+ "pretty-format": "^29.7.0",
+ "slash": "^3.0.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ },
+ "peerDependencies": {
+ "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0"
+ },
+ "peerDependenciesMeta": {
+ "node-notifier": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@jest/environment": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz",
+ "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/fake-timers": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "@types/node": "*",
+ "jest-mock": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/expect": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz",
+ "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "expect": "^29.7.0",
+ "jest-snapshot": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/expect-utils": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz",
+ "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "jest-get-type": "^29.6.3"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/fake-timers": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz",
+ "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/types": "^29.6.3",
+ "@sinonjs/fake-timers": "^10.0.2",
+ "@types/node": "*",
+ "jest-message-util": "^29.7.0",
+ "jest-mock": "^29.7.0",
+ "jest-util": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/globals": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz",
+ "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/environment": "^29.7.0",
+ "@jest/expect": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "jest-mock": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/reporters": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz",
+ "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@bcoe/v8-coverage": "^0.2.3",
+ "@jest/console": "^29.7.0",
+ "@jest/test-result": "^29.7.0",
+ "@jest/transform": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "@jridgewell/trace-mapping": "^0.3.18",
+ "@types/node": "*",
+ "chalk": "^4.0.0",
+ "collect-v8-coverage": "^1.0.0",
+ "exit": "^0.1.2",
+ "glob": "^7.1.3",
+ "graceful-fs": "^4.2.9",
+ "istanbul-lib-coverage": "^3.0.0",
+ "istanbul-lib-instrument": "^6.0.0",
+ "istanbul-lib-report": "^3.0.0",
+ "istanbul-lib-source-maps": "^4.0.0",
+ "istanbul-reports": "^3.1.3",
+ "jest-message-util": "^29.7.0",
+ "jest-util": "^29.7.0",
+ "jest-worker": "^29.7.0",
+ "slash": "^3.0.0",
+ "string-length": "^4.0.1",
+ "strip-ansi": "^6.0.0",
+ "v8-to-istanbul": "^9.0.1"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ },
+ "peerDependencies": {
+ "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0"
+ },
+ "peerDependenciesMeta": {
+ "node-notifier": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@jest/schemas": {
+ "version": "29.6.3",
+ "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz",
+ "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@sinclair/typebox": "^0.27.8"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/source-map": {
+ "version": "29.6.3",
+ "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz",
+ "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/trace-mapping": "^0.3.18",
+ "callsites": "^3.0.0",
+ "graceful-fs": "^4.2.9"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/test-result": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz",
+ "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/console": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "@types/istanbul-lib-coverage": "^2.0.0",
+ "collect-v8-coverage": "^1.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/test-sequencer": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz",
+ "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/test-result": "^29.7.0",
+ "graceful-fs": "^4.2.9",
+ "jest-haste-map": "^29.7.0",
+ "slash": "^3.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/transform": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz",
+ "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/core": "^7.11.6",
+ "@jest/types": "^29.6.3",
+ "@jridgewell/trace-mapping": "^0.3.18",
+ "babel-plugin-istanbul": "^6.1.1",
+ "chalk": "^4.0.0",
+ "convert-source-map": "^2.0.0",
+ "fast-json-stable-stringify": "^2.1.0",
+ "graceful-fs": "^4.2.9",
+ "jest-haste-map": "^29.7.0",
+ "jest-regex-util": "^29.6.3",
+ "jest-util": "^29.7.0",
+ "micromatch": "^4.0.4",
+ "pirates": "^4.0.4",
+ "slash": "^3.0.0",
+ "write-file-atomic": "^4.0.2"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jest/types": {
+ "version": "29.6.3",
+ "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz",
+ "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/schemas": "^29.6.3",
+ "@types/istanbul-lib-coverage": "^2.0.0",
+ "@types/istanbul-reports": "^3.0.0",
+ "@types/node": "*",
+ "@types/yargs": "^17.0.8",
+ "chalk": "^4.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/@jridgewell/gen-mapping": {
+ "version": "0.3.8",
+ "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz",
+ "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/set-array": "^1.2.1",
+ "@jridgewell/sourcemap-codec": "^1.4.10",
+ "@jridgewell/trace-mapping": "^0.3.24"
+ },
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@jridgewell/resolve-uri": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
+ "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@jridgewell/set-array": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz",
+ "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@jridgewell/sourcemap-codec": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz",
+ "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@jridgewell/trace-mapping": {
+ "version": "0.3.25",
+ "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz",
+ "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/resolve-uri": "^3.1.0",
+ "@jridgewell/sourcemap-codec": "^1.4.14"
+ }
+ },
+ "node_modules/@sinclair/typebox": {
+ "version": "0.27.8",
+ "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz",
+ "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@sinonjs/commons": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz",
+ "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "type-detect": "4.0.8"
+ }
+ },
+ "node_modules/@sinonjs/fake-timers": {
+ "version": "10.3.0",
+ "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz",
+ "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "@sinonjs/commons": "^3.0.0"
+ }
+ },
+ "node_modules/@tsconfig/node10": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz",
+ "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@tsconfig/node12": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
+ "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@tsconfig/node14": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
+ "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@tsconfig/node16": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
+ "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@types/babel__core": {
+ "version": "7.20.5",
+ "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz",
+ "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/parser": "^7.20.7",
+ "@babel/types": "^7.20.7",
+ "@types/babel__generator": "*",
+ "@types/babel__template": "*",
+ "@types/babel__traverse": "*"
+ }
+ },
+ "node_modules/@types/babel__generator": {
+ "version": "7.27.0",
+ "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz",
+ "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/types": "^7.0.0"
+ }
+ },
+ "node_modules/@types/babel__template": {
+ "version": "7.4.4",
+ "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz",
+ "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/parser": "^7.1.0",
+ "@babel/types": "^7.0.0"
+ }
+ },
+ "node_modules/@types/babel__traverse": {
+ "version": "7.20.7",
+ "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.7.tgz",
+ "integrity": "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/types": "^7.20.7"
+ }
+ },
+ "node_modules/@types/graceful-fs": {
+ "version": "4.1.9",
+ "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz",
+ "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/node": "*"
+ }
+ },
+ "node_modules/@types/istanbul-lib-coverage": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz",
+ "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@types/istanbul-lib-report": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz",
+ "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/istanbul-lib-coverage": "*"
+ }
+ },
+ "node_modules/@types/istanbul-reports": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz",
+ "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/istanbul-lib-report": "*"
+ }
+ },
+ "node_modules/@types/jest": {
+ "version": "29.5.14",
+ "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz",
+ "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "expect": "^29.0.0",
+ "pretty-format": "^29.0.0"
+ }
+ },
+ "node_modules/@types/node": {
+ "version": "22.7.9",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.9.tgz",
+ "integrity": "sha512-jrTfRC7FM6nChvU7X2KqcrgquofrWLFDeYC1hKfwNWomVvrn7JIksqf344WN2X/y8xrgqBd2dJATZV4GbatBfg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "undici-types": "~6.19.2"
+ }
+ },
+ "node_modules/@types/stack-utils": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz",
+ "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@types/yargs": {
+ "version": "17.0.33",
+ "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz",
+ "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/yargs-parser": "*"
+ }
+ },
+ "node_modules/@types/yargs-parser": {
+ "version": "21.0.3",
+ "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz",
+ "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/acorn": {
+ "version": "8.14.1",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz",
+ "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "acorn": "bin/acorn"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/acorn-walk": {
+ "version": "8.3.4",
+ "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz",
+ "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "acorn": "^8.11.0"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/ansi-escapes": {
+ "version": "4.3.2",
+ "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz",
+ "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "type-fest": "^0.21.3"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "color-convert": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/anymatch": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
+ "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "normalize-path": "^3.0.0",
+ "picomatch": "^2.0.4"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/arg": {
+ "version": "4.1.3",
+ "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
+ "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/argparse": {
+ "version": "1.0.10",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
+ "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "sprintf-js": "~1.0.2"
+ }
+ },
+ "node_modules/async": {
+ "version": "3.2.6",
+ "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz",
+ "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/aws-cdk": {
+ "version": "2.1017.1",
+ "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1017.1.tgz",
+ "integrity": "sha512-KtDdkMhfVjDeexjpMrVoSlz2mTYI5BE/KotvJ7iFbZy1G0nkpW1ImZ54TdBefeeFmZ+8DAjU3I6nUFtymyOI1A==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "bin": {
+ "cdk": "bin/cdk"
+ },
+ "engines": {
+ "node": ">= 14.15.0"
+ },
+ "optionalDependencies": {
+ "fsevents": "2.3.2"
+ }
+ },
+ "node_modules/aws-cdk-lib": {
+ "version": "2.198.0",
+ "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.198.0.tgz",
+ "integrity": "sha512-CyZ+lnRsCsLskzQLPO0EiGl5EVcLluhfa67df3b8/gJfsm+91SHJa75OH+ymdGtUp5Vn/MWUPsujw0EhWMfsIQ==",
+ "bundleDependencies": [
+ "@balena/dockerignore",
+ "case",
+ "fs-extra",
+ "ignore",
+ "jsonschema",
+ "minimatch",
+ "punycode",
+ "semver",
+ "table",
+ "yaml",
+ "mime-types"
+ ],
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@aws-cdk/asset-awscli-v1": "2.2.237",
+ "@aws-cdk/asset-node-proxy-agent-v6": "^2.1.0",
+ "@aws-cdk/cloud-assembly-schema": "^41.2.0",
+ "@balena/dockerignore": "^1.0.2",
+ "case": "1.6.3",
+ "fs-extra": "^11.3.0",
+ "ignore": "^5.3.2",
+ "jsonschema": "^1.5.0",
+ "mime-types": "^2.1.35",
+ "minimatch": "^3.1.2",
+ "punycode": "^2.3.1",
+ "semver": "^7.7.2",
+ "table": "^6.9.0",
+ "yaml": "1.10.2"
+ },
+ "engines": {
+ "node": ">= 14.15.0"
+ },
+ "peerDependencies": {
+ "constructs": "^10.0.0"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/@balena/dockerignore": {
+ "version": "1.0.2",
+ "inBundle": true,
+ "license": "Apache-2.0"
+ },
+ "node_modules/aws-cdk-lib/node_modules/ajv": {
+ "version": "8.17.1",
+ "inBundle": true,
+ "license": "MIT",
+ "dependencies": {
+ "fast-deep-equal": "^3.1.3",
+ "fast-uri": "^3.0.1",
+ "json-schema-traverse": "^1.0.0",
+ "require-from-string": "^2.0.2"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/epoberezkin"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "inBundle": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/ansi-styles": {
+ "version": "4.3.0",
+ "inBundle": true,
+ "license": "MIT",
+ "dependencies": {
+ "color-convert": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/astral-regex": {
+ "version": "2.0.0",
+ "inBundle": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/balanced-match": {
+ "version": "1.0.2",
+ "inBundle": true,
+ "license": "MIT"
+ },
+ "node_modules/aws-cdk-lib/node_modules/brace-expansion": {
+ "version": "1.1.11",
+ "inBundle": true,
+ "license": "MIT",
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/case": {
+ "version": "1.6.3",
+ "inBundle": true,
+ "license": "(MIT OR GPL-3.0-or-later)",
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/color-convert": {
+ "version": "2.0.1",
+ "inBundle": true,
+ "license": "MIT",
+ "dependencies": {
+ "color-name": "~1.1.4"
+ },
+ "engines": {
+ "node": ">=7.0.0"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/color-name": {
+ "version": "1.1.4",
+ "inBundle": true,
+ "license": "MIT"
+ },
+ "node_modules/aws-cdk-lib/node_modules/concat-map": {
+ "version": "0.0.1",
+ "inBundle": true,
+ "license": "MIT"
+ },
+ "node_modules/aws-cdk-lib/node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "inBundle": true,
+ "license": "MIT"
+ },
+ "node_modules/aws-cdk-lib/node_modules/fast-deep-equal": {
+ "version": "3.1.3",
+ "inBundle": true,
+ "license": "MIT"
+ },
+ "node_modules/aws-cdk-lib/node_modules/fast-uri": {
+ "version": "3.0.6",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/fastify"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/fastify"
+ }
+ ],
+ "inBundle": true,
+ "license": "BSD-3-Clause"
+ },
+ "node_modules/aws-cdk-lib/node_modules/fs-extra": {
+ "version": "11.3.0",
+ "inBundle": true,
+ "license": "MIT",
+ "dependencies": {
+ "graceful-fs": "^4.2.0",
+ "jsonfile": "^6.0.1",
+ "universalify": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=14.14"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/graceful-fs": {
+ "version": "4.2.11",
+ "inBundle": true,
+ "license": "ISC"
+ },
+ "node_modules/aws-cdk-lib/node_modules/ignore": {
+ "version": "5.3.2",
+ "inBundle": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 4"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "inBundle": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/json-schema-traverse": {
+ "version": "1.0.0",
+ "inBundle": true,
+ "license": "MIT"
+ },
+ "node_modules/aws-cdk-lib/node_modules/jsonfile": {
+ "version": "6.1.0",
+ "inBundle": true,
+ "license": "MIT",
+ "dependencies": {
+ "universalify": "^2.0.0"
+ },
+ "optionalDependencies": {
+ "graceful-fs": "^4.1.6"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/jsonschema": {
+ "version": "1.5.0",
+ "inBundle": true,
+ "license": "MIT",
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/lodash.truncate": {
+ "version": "4.4.2",
+ "inBundle": true,
+ "license": "MIT"
+ },
+ "node_modules/aws-cdk-lib/node_modules/mime-db": {
+ "version": "1.52.0",
+ "inBundle": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/mime-types": {
+ "version": "2.1.35",
+ "inBundle": true,
+ "license": "MIT",
+ "dependencies": {
+ "mime-db": "1.52.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/minimatch": {
+ "version": "3.1.2",
+ "inBundle": true,
+ "license": "ISC",
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/punycode": {
+ "version": "2.3.1",
+ "inBundle": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/require-from-string": {
+ "version": "2.0.2",
+ "inBundle": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/semver": {
+ "version": "7.7.2",
+ "inBundle": true,
+ "license": "ISC",
+ "bin": {
+ "semver": "bin/semver.js"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/slice-ansi": {
+ "version": "4.0.0",
+ "inBundle": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-styles": "^4.0.0",
+ "astral-regex": "^2.0.0",
+ "is-fullwidth-code-point": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/slice-ansi?sponsor=1"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/string-width": {
+ "version": "4.2.3",
+ "inBundle": true,
+ "license": "MIT",
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "inBundle": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/table": {
+ "version": "6.9.0",
+ "inBundle": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "ajv": "^8.0.1",
+ "lodash.truncate": "^4.4.2",
+ "slice-ansi": "^4.0.0",
+ "string-width": "^4.2.3",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=10.0.0"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/universalify": {
+ "version": "2.0.1",
+ "inBundle": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 10.0.0"
+ }
+ },
+ "node_modules/aws-cdk-lib/node_modules/yaml": {
+ "version": "1.10.2",
+ "inBundle": true,
+ "license": "ISC",
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/babel-jest": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz",
+ "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/transform": "^29.7.0",
+ "@types/babel__core": "^7.1.14",
+ "babel-plugin-istanbul": "^6.1.1",
+ "babel-preset-jest": "^29.6.3",
+ "chalk": "^4.0.0",
+ "graceful-fs": "^4.2.9",
+ "slash": "^3.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.8.0"
+ }
+ },
+ "node_modules/babel-plugin-istanbul": {
+ "version": "6.1.1",
+ "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz",
+ "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.0.0",
+ "@istanbuljs/load-nyc-config": "^1.0.0",
+ "@istanbuljs/schema": "^0.1.2",
+ "istanbul-lib-instrument": "^5.0.4",
+ "test-exclude": "^6.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz",
+ "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "@babel/core": "^7.12.3",
+ "@babel/parser": "^7.14.7",
+ "@istanbuljs/schema": "^0.1.2",
+ "istanbul-lib-coverage": "^3.2.0",
+ "semver": "^6.3.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/babel-plugin-jest-hoist": {
+ "version": "29.6.3",
+ "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz",
+ "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/template": "^7.3.3",
+ "@babel/types": "^7.3.3",
+ "@types/babel__core": "^7.1.14",
+ "@types/babel__traverse": "^7.0.6"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/babel-preset-current-node-syntax": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz",
+ "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/plugin-syntax-async-generators": "^7.8.4",
+ "@babel/plugin-syntax-bigint": "^7.8.3",
+ "@babel/plugin-syntax-class-properties": "^7.12.13",
+ "@babel/plugin-syntax-class-static-block": "^7.14.5",
+ "@babel/plugin-syntax-import-attributes": "^7.24.7",
+ "@babel/plugin-syntax-import-meta": "^7.10.4",
+ "@babel/plugin-syntax-json-strings": "^7.8.3",
+ "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4",
+ "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3",
+ "@babel/plugin-syntax-numeric-separator": "^7.10.4",
+ "@babel/plugin-syntax-object-rest-spread": "^7.8.3",
+ "@babel/plugin-syntax-optional-catch-binding": "^7.8.3",
+ "@babel/plugin-syntax-optional-chaining": "^7.8.3",
+ "@babel/plugin-syntax-private-property-in-object": "^7.14.5",
+ "@babel/plugin-syntax-top-level-await": "^7.14.5"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0"
+ }
+ },
+ "node_modules/babel-preset-jest": {
+ "version": "29.6.3",
+ "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz",
+ "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "babel-plugin-jest-hoist": "^29.6.3",
+ "babel-preset-current-node-syntax": "^1.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0"
+ }
+ },
+ "node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "license": "MIT"
+ },
+ "node_modules/brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "license": "MIT",
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "node_modules/braces": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
+ "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "fill-range": "^7.1.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/browserslist": {
+ "version": "4.25.0",
+ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.0.tgz",
+ "integrity": "sha512-PJ8gYKeS5e/whHBh8xrwYK+dAvEj7JXtz6uTucnMRB8OiGTsKccFekoRrjajPBHV8oOY+2tI4uxeceSimKwMFA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/browserslist"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/browserslist"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "caniuse-lite": "^1.0.30001718",
+ "electron-to-chromium": "^1.5.160",
+ "node-releases": "^2.0.19",
+ "update-browserslist-db": "^1.1.3"
+ },
+ "bin": {
+ "browserslist": "cli.js"
+ },
+ "engines": {
+ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
+ }
+ },
+ "node_modules/bs-logger": {
+ "version": "0.2.6",
+ "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz",
+ "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "fast-json-stable-stringify": "2.x"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/bser": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz",
+ "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "node-int64": "^0.4.0"
+ }
+ },
+ "node_modules/buffer-from": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
+ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/callsites": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
+ "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/camelcase": {
+ "version": "5.3.1",
+ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
+ "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/caniuse-lite": {
+ "version": "1.0.30001720",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001720.tgz",
+ "integrity": "sha512-Ec/2yV2nNPwb4DnTANEV99ZWwm3ZWfdlfkQbWSDDt+PsXEVYwlhPH8tdMaPunYTKKmz7AnHi2oNEi1GcmKCD8g==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/browserslist"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/caniuse-lite"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "license": "CC-BY-4.0"
+ },
+ "node_modules/chalk": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
+ "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-styles": "^4.1.0",
+ "supports-color": "^7.1.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/chalk?sponsor=1"
+ }
+ },
+ "node_modules/char-regex": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz",
+ "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/ci-info": {
+ "version": "3.9.0",
+ "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz",
+ "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/sibiraj-s"
+ }
+ ],
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/cjs-module-lexer": {
+ "version": "1.4.3",
+ "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz",
+ "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/cliui": {
+ "version": "8.0.1",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
+ "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.1",
+ "wrap-ansi": "^7.0.0"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/co": {
+ "version": "4.6.0",
+ "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz",
+ "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "iojs": ">= 1.0.0",
+ "node": ">= 0.12.0"
+ }
+ },
+ "node_modules/collect-v8-coverage": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz",
+ "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "color-name": "~1.1.4"
+ },
+ "engines": {
+ "node": ">=7.0.0"
+ }
+ },
+ "node_modules/color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
+ "license": "MIT"
+ },
+ "node_modules/constructs": {
+ "version": "10.4.2",
+ "resolved": "https://registry.npmjs.org/constructs/-/constructs-10.4.2.tgz",
+ "integrity": "sha512-wsNxBlAott2qg8Zv87q3eYZYgheb9lchtBfjHzzLHtXbttwSrHPs1NNQbBrmbb1YZvYg2+Vh0Dor76w4mFxJkA==",
+ "license": "Apache-2.0"
+ },
+ "node_modules/convert-source-map": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
+ "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/create-jest": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz",
+ "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/types": "^29.6.3",
+ "chalk": "^4.0.0",
+ "exit": "^0.1.2",
+ "graceful-fs": "^4.2.9",
+ "jest-config": "^29.7.0",
+ "jest-util": "^29.7.0",
+ "prompts": "^2.0.1"
+ },
+ "bin": {
+ "create-jest": "bin/create-jest.js"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/create-require": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
+ "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/cross-spawn": {
+ "version": "7.0.6",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
+ "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "path-key": "^3.1.0",
+ "shebang-command": "^2.0.0",
+ "which": "^2.0.1"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/debug": {
+ "version": "4.4.1",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
+ "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ms": "^2.1.3"
+ },
+ "engines": {
+ "node": ">=6.0"
+ },
+ "peerDependenciesMeta": {
+ "supports-color": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/dedent": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.6.0.tgz",
+ "integrity": "sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA==",
+ "dev": true,
+ "license": "MIT",
+ "peerDependencies": {
+ "babel-plugin-macros": "^3.1.0"
+ },
+ "peerDependenciesMeta": {
+ "babel-plugin-macros": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/deepmerge": {
+ "version": "4.3.1",
+ "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz",
+ "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/detect-newline": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz",
+ "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/diff": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
+ "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "engines": {
+ "node": ">=0.3.1"
+ }
+ },
+ "node_modules/diff-sequences": {
+ "version": "29.6.3",
+ "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz",
+ "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/ejs": {
+ "version": "3.1.10",
+ "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz",
+ "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "jake": "^10.8.5"
+ },
+ "bin": {
+ "ejs": "bin/cli.js"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/electron-to-chromium": {
+ "version": "1.5.161",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.161.tgz",
+ "integrity": "sha512-hwtetwfKNZo/UlwHIVBlKZVdy7o8bIZxxKs0Mv/ROPiQQQmDgdm5a+KvKtBsxM8ZjFzTaCeLoodZ8jiBE3o9rA==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/emittery": {
+ "version": "0.13.1",
+ "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz",
+ "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sindresorhus/emittery?sponsor=1"
+ }
+ },
+ "node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/error-ex": {
+ "version": "1.3.2",
+ "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
+ "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "is-arrayish": "^0.2.1"
+ }
+ },
+ "node_modules/escalade": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
+ "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/escape-string-regexp": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
+ "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/esprima": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
+ "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
+ "dev": true,
+ "license": "BSD-2-Clause",
+ "bin": {
+ "esparse": "bin/esparse.js",
+ "esvalidate": "bin/esvalidate.js"
+ },
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/execa": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz",
+ "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "cross-spawn": "^7.0.3",
+ "get-stream": "^6.0.0",
+ "human-signals": "^2.1.0",
+ "is-stream": "^2.0.0",
+ "merge-stream": "^2.0.0",
+ "npm-run-path": "^4.0.1",
+ "onetime": "^5.1.2",
+ "signal-exit": "^3.0.3",
+ "strip-final-newline": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sindresorhus/execa?sponsor=1"
+ }
+ },
+ "node_modules/exit": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz",
+ "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==",
+ "dev": true,
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "node_modules/expect": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz",
+ "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/expect-utils": "^29.7.0",
+ "jest-get-type": "^29.6.3",
+ "jest-matcher-utils": "^29.7.0",
+ "jest-message-util": "^29.7.0",
+ "jest-util": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/fast-json-stable-stringify": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
+ "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/fb-watchman": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz",
+ "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "bser": "2.1.1"
+ }
+ },
+ "node_modules/filelist": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz",
+ "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "minimatch": "^5.0.1"
+ }
+ },
+ "node_modules/filelist/node_modules/brace-expansion": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+ "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "node_modules/filelist/node_modules/minimatch": {
+ "version": "5.1.6",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
+ "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "brace-expansion": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/fill-range": {
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
+ "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "to-regex-range": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/find-up": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
+ "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "locate-path": "^5.0.0",
+ "path-exists": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/fsevents": {
+ "version": "2.3.2",
+ "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
+ "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
+ }
+ },
+ "node_modules/function-bind": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
+ "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
+ "dev": true,
+ "license": "MIT",
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/gensync": {
+ "version": "1.0.0-beta.2",
+ "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
+ "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/get-caller-file": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
+ "dev": true,
+ "license": "ISC",
+ "engines": {
+ "node": "6.* || 8.* || >= 10.*"
+ }
+ },
+ "node_modules/get-package-type": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz",
+ "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8.0.0"
+ }
+ },
+ "node_modules/get-stream": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
+ "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/glob": {
+ "version": "7.2.3",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+ "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+ "deprecated": "Glob versions prior to v9 are no longer supported",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.1.1",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/globals": {
+ "version": "11.12.0",
+ "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz",
+ "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/graceful-fs": {
+ "version": "4.2.11",
+ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
+ "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/hasown": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
+ "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "function-bind": "^1.1.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/html-escaper": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz",
+ "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/human-signals": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz",
+ "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=10.17.0"
+ }
+ },
+ "node_modules/import-local": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz",
+ "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "pkg-dir": "^4.2.0",
+ "resolve-cwd": "^3.0.0"
+ },
+ "bin": {
+ "import-local-fixture": "fixtures/cli.js"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/imurmurhash": {
+ "version": "0.1.4",
+ "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
+ "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.8.19"
+ }
+ },
+ "node_modules/inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
+ "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ }
+ },
+ "node_modules/inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/is-arrayish": {
+ "version": "0.2.1",
+ "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
+ "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/is-core-module": {
+ "version": "2.16.1",
+ "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
+ "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "hasown": "^2.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/is-generator-fn": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz",
+ "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/is-number": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.12.0"
+ }
+ },
+ "node_modules/is-stream": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
+ "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/istanbul-lib-coverage": {
+ "version": "3.2.2",
+ "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz",
+ "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/istanbul-lib-instrument": {
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz",
+ "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "@babel/core": "^7.23.9",
+ "@babel/parser": "^7.23.9",
+ "@istanbuljs/schema": "^0.1.3",
+ "istanbul-lib-coverage": "^3.2.0",
+ "semver": "^7.5.4"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/istanbul-lib-instrument/node_modules/semver": {
+ "version": "7.7.2",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
+ "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
+ "dev": true,
+ "license": "ISC",
+ "bin": {
+ "semver": "bin/semver.js"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/istanbul-lib-report": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz",
+ "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "istanbul-lib-coverage": "^3.0.0",
+ "make-dir": "^4.0.0",
+ "supports-color": "^7.1.0"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/istanbul-lib-source-maps": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz",
+ "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "debug": "^4.1.1",
+ "istanbul-lib-coverage": "^3.0.0",
+ "source-map": "^0.6.1"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/istanbul-reports": {
+ "version": "3.1.7",
+ "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz",
+ "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "html-escaper": "^2.0.0",
+ "istanbul-lib-report": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/jake": {
+ "version": "10.9.2",
+ "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz",
+ "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "async": "^3.2.3",
+ "chalk": "^4.0.2",
+ "filelist": "^1.0.4",
+ "minimatch": "^3.1.2"
+ },
+ "bin": {
+ "jake": "bin/cli.js"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/jest": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz",
+ "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/core": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "import-local": "^3.0.2",
+ "jest-cli": "^29.7.0"
+ },
+ "bin": {
+ "jest": "bin/jest.js"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ },
+ "peerDependencies": {
+ "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0"
+ },
+ "peerDependenciesMeta": {
+ "node-notifier": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/jest-changed-files": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz",
+ "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "execa": "^5.0.0",
+ "jest-util": "^29.7.0",
+ "p-limit": "^3.1.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-circus": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz",
+ "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/environment": "^29.7.0",
+ "@jest/expect": "^29.7.0",
+ "@jest/test-result": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "@types/node": "*",
+ "chalk": "^4.0.0",
+ "co": "^4.6.0",
+ "dedent": "^1.0.0",
+ "is-generator-fn": "^2.0.0",
+ "jest-each": "^29.7.0",
+ "jest-matcher-utils": "^29.7.0",
+ "jest-message-util": "^29.7.0",
+ "jest-runtime": "^29.7.0",
+ "jest-snapshot": "^29.7.0",
+ "jest-util": "^29.7.0",
+ "p-limit": "^3.1.0",
+ "pretty-format": "^29.7.0",
+ "pure-rand": "^6.0.0",
+ "slash": "^3.0.0",
+ "stack-utils": "^2.0.3"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-cli": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz",
+ "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/core": "^29.7.0",
+ "@jest/test-result": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "chalk": "^4.0.0",
+ "create-jest": "^29.7.0",
+ "exit": "^0.1.2",
+ "import-local": "^3.0.2",
+ "jest-config": "^29.7.0",
+ "jest-util": "^29.7.0",
+ "jest-validate": "^29.7.0",
+ "yargs": "^17.3.1"
+ },
+ "bin": {
+ "jest": "bin/jest.js"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ },
+ "peerDependencies": {
+ "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0"
+ },
+ "peerDependenciesMeta": {
+ "node-notifier": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/jest-config": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz",
+ "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/core": "^7.11.6",
+ "@jest/test-sequencer": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "babel-jest": "^29.7.0",
+ "chalk": "^4.0.0",
+ "ci-info": "^3.2.0",
+ "deepmerge": "^4.2.2",
+ "glob": "^7.1.3",
+ "graceful-fs": "^4.2.9",
+ "jest-circus": "^29.7.0",
+ "jest-environment-node": "^29.7.0",
+ "jest-get-type": "^29.6.3",
+ "jest-regex-util": "^29.6.3",
+ "jest-resolve": "^29.7.0",
+ "jest-runner": "^29.7.0",
+ "jest-util": "^29.7.0",
+ "jest-validate": "^29.7.0",
+ "micromatch": "^4.0.4",
+ "parse-json": "^5.2.0",
+ "pretty-format": "^29.7.0",
+ "slash": "^3.0.0",
+ "strip-json-comments": "^3.1.1"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ },
+ "peerDependencies": {
+ "@types/node": "*",
+ "ts-node": ">=9.0.0"
+ },
+ "peerDependenciesMeta": {
+ "@types/node": {
+ "optional": true
+ },
+ "ts-node": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/jest-diff": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz",
+ "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "chalk": "^4.0.0",
+ "diff-sequences": "^29.6.3",
+ "jest-get-type": "^29.6.3",
+ "pretty-format": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-docblock": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz",
+ "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "detect-newline": "^3.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-each": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz",
+ "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/types": "^29.6.3",
+ "chalk": "^4.0.0",
+ "jest-get-type": "^29.6.3",
+ "jest-util": "^29.7.0",
+ "pretty-format": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-environment-node": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz",
+ "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/environment": "^29.7.0",
+ "@jest/fake-timers": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "@types/node": "*",
+ "jest-mock": "^29.7.0",
+ "jest-util": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-get-type": {
+ "version": "29.6.3",
+ "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz",
+ "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-haste-map": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz",
+ "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/types": "^29.6.3",
+ "@types/graceful-fs": "^4.1.3",
+ "@types/node": "*",
+ "anymatch": "^3.0.3",
+ "fb-watchman": "^2.0.0",
+ "graceful-fs": "^4.2.9",
+ "jest-regex-util": "^29.6.3",
+ "jest-util": "^29.7.0",
+ "jest-worker": "^29.7.0",
+ "micromatch": "^4.0.4",
+ "walker": "^1.0.8"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ },
+ "optionalDependencies": {
+ "fsevents": "^2.3.2"
+ }
+ },
+ "node_modules/jest-leak-detector": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz",
+ "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "jest-get-type": "^29.6.3",
+ "pretty-format": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-matcher-utils": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz",
+ "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "chalk": "^4.0.0",
+ "jest-diff": "^29.7.0",
+ "jest-get-type": "^29.6.3",
+ "pretty-format": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-message-util": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz",
+ "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/code-frame": "^7.12.13",
+ "@jest/types": "^29.6.3",
+ "@types/stack-utils": "^2.0.0",
+ "chalk": "^4.0.0",
+ "graceful-fs": "^4.2.9",
+ "micromatch": "^4.0.4",
+ "pretty-format": "^29.7.0",
+ "slash": "^3.0.0",
+ "stack-utils": "^2.0.3"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-mock": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz",
+ "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/types": "^29.6.3",
+ "@types/node": "*",
+ "jest-util": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-pnp-resolver": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz",
+ "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ },
+ "peerDependencies": {
+ "jest-resolve": "*"
+ },
+ "peerDependenciesMeta": {
+ "jest-resolve": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/jest-regex-util": {
+ "version": "29.6.3",
+ "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz",
+ "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-resolve": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz",
+ "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "chalk": "^4.0.0",
+ "graceful-fs": "^4.2.9",
+ "jest-haste-map": "^29.7.0",
+ "jest-pnp-resolver": "^1.2.2",
+ "jest-util": "^29.7.0",
+ "jest-validate": "^29.7.0",
+ "resolve": "^1.20.0",
+ "resolve.exports": "^2.0.0",
+ "slash": "^3.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-resolve-dependencies": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz",
+ "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "jest-regex-util": "^29.6.3",
+ "jest-snapshot": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-runner": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz",
+ "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/console": "^29.7.0",
+ "@jest/environment": "^29.7.0",
+ "@jest/test-result": "^29.7.0",
+ "@jest/transform": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "@types/node": "*",
+ "chalk": "^4.0.0",
+ "emittery": "^0.13.1",
+ "graceful-fs": "^4.2.9",
+ "jest-docblock": "^29.7.0",
+ "jest-environment-node": "^29.7.0",
+ "jest-haste-map": "^29.7.0",
+ "jest-leak-detector": "^29.7.0",
+ "jest-message-util": "^29.7.0",
+ "jest-resolve": "^29.7.0",
+ "jest-runtime": "^29.7.0",
+ "jest-util": "^29.7.0",
+ "jest-watcher": "^29.7.0",
+ "jest-worker": "^29.7.0",
+ "p-limit": "^3.1.0",
+ "source-map-support": "0.5.13"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-runtime": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz",
+ "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/environment": "^29.7.0",
+ "@jest/fake-timers": "^29.7.0",
+ "@jest/globals": "^29.7.0",
+ "@jest/source-map": "^29.6.3",
+ "@jest/test-result": "^29.7.0",
+ "@jest/transform": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "@types/node": "*",
+ "chalk": "^4.0.0",
+ "cjs-module-lexer": "^1.0.0",
+ "collect-v8-coverage": "^1.0.0",
+ "glob": "^7.1.3",
+ "graceful-fs": "^4.2.9",
+ "jest-haste-map": "^29.7.0",
+ "jest-message-util": "^29.7.0",
+ "jest-mock": "^29.7.0",
+ "jest-regex-util": "^29.6.3",
+ "jest-resolve": "^29.7.0",
+ "jest-snapshot": "^29.7.0",
+ "jest-util": "^29.7.0",
+ "slash": "^3.0.0",
+ "strip-bom": "^4.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-snapshot": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz",
+ "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/core": "^7.11.6",
+ "@babel/generator": "^7.7.2",
+ "@babel/plugin-syntax-jsx": "^7.7.2",
+ "@babel/plugin-syntax-typescript": "^7.7.2",
+ "@babel/types": "^7.3.3",
+ "@jest/expect-utils": "^29.7.0",
+ "@jest/transform": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "babel-preset-current-node-syntax": "^1.0.0",
+ "chalk": "^4.0.0",
+ "expect": "^29.7.0",
+ "graceful-fs": "^4.2.9",
+ "jest-diff": "^29.7.0",
+ "jest-get-type": "^29.6.3",
+ "jest-matcher-utils": "^29.7.0",
+ "jest-message-util": "^29.7.0",
+ "jest-util": "^29.7.0",
+ "natural-compare": "^1.4.0",
+ "pretty-format": "^29.7.0",
+ "semver": "^7.5.3"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-snapshot/node_modules/semver": {
+ "version": "7.7.2",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
+ "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
+ "dev": true,
+ "license": "ISC",
+ "bin": {
+ "semver": "bin/semver.js"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/jest-util": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz",
+ "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/types": "^29.6.3",
+ "@types/node": "*",
+ "chalk": "^4.0.0",
+ "ci-info": "^3.2.0",
+ "graceful-fs": "^4.2.9",
+ "picomatch": "^2.2.3"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-validate": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz",
+ "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/types": "^29.6.3",
+ "camelcase": "^6.2.0",
+ "chalk": "^4.0.0",
+ "jest-get-type": "^29.6.3",
+ "leven": "^3.1.0",
+ "pretty-format": "^29.7.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-validate/node_modules/camelcase": {
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
+ "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/jest-watcher": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz",
+ "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/test-result": "^29.7.0",
+ "@jest/types": "^29.6.3",
+ "@types/node": "*",
+ "ansi-escapes": "^4.2.1",
+ "chalk": "^4.0.0",
+ "emittery": "^0.13.1",
+ "jest-util": "^29.7.0",
+ "string-length": "^4.0.1"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-worker": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz",
+ "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/node": "*",
+ "jest-util": "^29.7.0",
+ "merge-stream": "^2.0.0",
+ "supports-color": "^8.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/jest-worker/node_modules/supports-color": {
+ "version": "8.1.1",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
+ "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "has-flag": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/supports-color?sponsor=1"
+ }
+ },
+ "node_modules/js-tokens": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
+ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/js-yaml": {
+ "version": "3.14.1",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
+ "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "argparse": "^1.0.7",
+ "esprima": "^4.0.0"
+ },
+ "bin": {
+ "js-yaml": "bin/js-yaml.js"
+ }
+ },
+ "node_modules/jsesc": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
+ "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "jsesc": "bin/jsesc"
+ },
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/json-parse-even-better-errors": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
+ "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/json5": {
+ "version": "2.2.3",
+ "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
+ "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "json5": "lib/cli.js"
+ },
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/kleur": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz",
+ "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/leven": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz",
+ "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/lines-and-columns": {
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
+ "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/locate-path": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
+ "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "p-locate": "^4.1.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/lodash.memoize": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
+ "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/lru-cache": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
+ "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "yallist": "^3.0.2"
+ }
+ },
+ "node_modules/make-dir": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz",
+ "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "semver": "^7.5.3"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/make-dir/node_modules/semver": {
+ "version": "7.7.2",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
+ "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
+ "dev": true,
+ "license": "ISC",
+ "bin": {
+ "semver": "bin/semver.js"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/make-error": {
+ "version": "1.3.6",
+ "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
+ "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/makeerror": {
+ "version": "1.0.12",
+ "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz",
+ "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "tmpl": "1.0.5"
+ }
+ },
+ "node_modules/merge-stream": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
+ "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/micromatch": {
+ "version": "4.0.8",
+ "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
+ "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "braces": "^3.0.3",
+ "picomatch": "^2.3.1"
+ },
+ "engines": {
+ "node": ">=8.6"
+ }
+ },
+ "node_modules/mimic-fn": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
+ "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/minimatch": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+ "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
+ "license": "ISC",
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/natural-compare": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
+ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/node-int64": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz",
+ "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/node-releases": {
+ "version": "2.0.19",
+ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz",
+ "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/normalize-path": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
+ "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/npm-run-path": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz",
+ "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "path-key": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "wrappy": "1"
+ }
+ },
+ "node_modules/onetime": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz",
+ "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "mimic-fn": "^2.1.0"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/p-limit": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+ "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "yocto-queue": "^0.1.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/p-locate": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
+ "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "p-limit": "^2.2.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/p-locate/node_modules/p-limit": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
+ "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "p-try": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/p-try": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
+ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/parse-json": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz",
+ "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/code-frame": "^7.0.0",
+ "error-ex": "^1.3.1",
+ "json-parse-even-better-errors": "^2.3.0",
+ "lines-and-columns": "^1.1.6"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/path-exists": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/path-key": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+ "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/path-parse": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
+ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/picocolors": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
+ "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/picomatch": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
+ "node_modules/pirates": {
+ "version": "4.0.7",
+ "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz",
+ "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/pkg-dir": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
+ "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "find-up": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/pretty-format": {
+ "version": "29.7.0",
+ "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz",
+ "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jest/schemas": "^29.6.3",
+ "ansi-styles": "^5.0.0",
+ "react-is": "^18.0.0"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
+ }
+ },
+ "node_modules/pretty-format/node_modules/ansi-styles": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz",
+ "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/prompts": {
+ "version": "2.4.2",
+ "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz",
+ "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "kleur": "^3.0.3",
+ "sisteransi": "^1.0.5"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/pure-rand": {
+ "version": "6.1.0",
+ "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz",
+ "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://github.com/sponsors/dubzzz"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/fast-check"
+ }
+ ],
+ "license": "MIT"
+ },
+ "node_modules/react-is": {
+ "version": "18.3.1",
+ "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz",
+ "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/require-directory": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/resolve": {
+ "version": "1.22.10",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
+ "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "is-core-module": "^2.16.0",
+ "path-parse": "^1.0.7",
+ "supports-preserve-symlinks-flag": "^1.0.0"
+ },
+ "bin": {
+ "resolve": "bin/resolve"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/resolve-cwd": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz",
+ "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "resolve-from": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/resolve-from": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz",
+ "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/resolve.exports": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz",
+ "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/semver": {
+ "version": "6.3.1",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+ "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
+ "license": "ISC",
+ "bin": {
+ "semver": "bin/semver.js"
+ }
+ },
+ "node_modules/shebang-command": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+ "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "shebang-regex": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/shebang-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/signal-exit": {
+ "version": "3.0.7",
+ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
+ "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/sisteransi": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
+ "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/source-map": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
+ "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/source-map-support": {
+ "version": "0.5.13",
+ "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz",
+ "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "buffer-from": "^1.0.0",
+ "source-map": "^0.6.0"
+ }
+ },
+ "node_modules/sprintf-js": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
+ "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
+ "dev": true,
+ "license": "BSD-3-Clause"
+ },
+ "node_modules/stack-utils": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz",
+ "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "escape-string-regexp": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/string-length": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz",
+ "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "char-regex": "^1.0.2",
+ "strip-ansi": "^6.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-bom": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz",
+ "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-final-newline": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz",
+ "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/strip-json-comments": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
+ "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/supports-color": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+ "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "has-flag": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/supports-preserve-symlinks-flag": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
+ "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/test-exclude": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
+ "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "@istanbuljs/schema": "^0.1.2",
+ "glob": "^7.1.4",
+ "minimatch": "^3.0.4"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/tmpl": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz",
+ "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==",
+ "dev": true,
+ "license": "BSD-3-Clause"
+ },
+ "node_modules/to-regex-range": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+ "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "is-number": "^7.0.0"
+ },
+ "engines": {
+ "node": ">=8.0"
+ }
+ },
+ "node_modules/ts-jest": {
+ "version": "29.3.4",
+ "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.3.4.tgz",
+ "integrity": "sha512-Iqbrm8IXOmV+ggWHOTEbjwyCf2xZlUMv5npExksXohL+tk8va4Fjhb+X2+Rt9NBmgO7bJ8WpnMLOwih/DnMlFA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "bs-logger": "^0.2.6",
+ "ejs": "^3.1.10",
+ "fast-json-stable-stringify": "^2.1.0",
+ "jest-util": "^29.0.0",
+ "json5": "^2.2.3",
+ "lodash.memoize": "^4.1.2",
+ "make-error": "^1.3.6",
+ "semver": "^7.7.2",
+ "type-fest": "^4.41.0",
+ "yargs-parser": "^21.1.1"
+ },
+ "bin": {
+ "ts-jest": "cli.js"
+ },
+ "engines": {
+ "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0"
+ },
+ "peerDependencies": {
+ "@babel/core": ">=7.0.0-beta.0 <8",
+ "@jest/transform": "^29.0.0",
+ "@jest/types": "^29.0.0",
+ "babel-jest": "^29.0.0",
+ "jest": "^29.0.0",
+ "typescript": ">=4.3 <6"
+ },
+ "peerDependenciesMeta": {
+ "@babel/core": {
+ "optional": true
+ },
+ "@jest/transform": {
+ "optional": true
+ },
+ "@jest/types": {
+ "optional": true
+ },
+ "babel-jest": {
+ "optional": true
+ },
+ "esbuild": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/ts-jest/node_modules/semver": {
+ "version": "7.7.2",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
+ "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
+ "dev": true,
+ "license": "ISC",
+ "bin": {
+ "semver": "bin/semver.js"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/ts-jest/node_modules/type-fest": {
+ "version": "4.41.0",
+ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz",
+ "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==",
+ "dev": true,
+ "license": "(MIT OR CC0-1.0)",
+ "engines": {
+ "node": ">=16"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/ts-node": {
+ "version": "10.9.2",
+ "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
+ "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@cspotcode/source-map-support": "^0.8.0",
+ "@tsconfig/node10": "^1.0.7",
+ "@tsconfig/node12": "^1.0.7",
+ "@tsconfig/node14": "^1.0.0",
+ "@tsconfig/node16": "^1.0.2",
+ "acorn": "^8.4.1",
+ "acorn-walk": "^8.1.1",
+ "arg": "^4.1.0",
+ "create-require": "^1.1.0",
+ "diff": "^4.0.1",
+ "make-error": "^1.1.1",
+ "v8-compile-cache-lib": "^3.0.1",
+ "yn": "3.1.1"
+ },
+ "bin": {
+ "ts-node": "dist/bin.js",
+ "ts-node-cwd": "dist/bin-cwd.js",
+ "ts-node-esm": "dist/bin-esm.js",
+ "ts-node-script": "dist/bin-script.js",
+ "ts-node-transpile-only": "dist/bin-transpile.js",
+ "ts-script": "dist/bin-script-deprecated.js"
+ },
+ "peerDependencies": {
+ "@swc/core": ">=1.2.50",
+ "@swc/wasm": ">=1.2.50",
+ "@types/node": "*",
+ "typescript": ">=2.7"
+ },
+ "peerDependenciesMeta": {
+ "@swc/core": {
+ "optional": true
+ },
+ "@swc/wasm": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/type-detect": {
+ "version": "4.0.8",
+ "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz",
+ "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/type-fest": {
+ "version": "0.21.3",
+ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz",
+ "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==",
+ "dev": true,
+ "license": "(MIT OR CC0-1.0)",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/typescript": {
+ "version": "5.6.3",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz",
+ "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "bin": {
+ "tsc": "bin/tsc",
+ "tsserver": "bin/tsserver"
+ },
+ "engines": {
+ "node": ">=14.17"
+ }
+ },
+ "node_modules/undici-types": {
+ "version": "6.19.8",
+ "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
+ "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/update-browserslist-db": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz",
+ "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/browserslist"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/browserslist"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "escalade": "^3.2.0",
+ "picocolors": "^1.1.1"
+ },
+ "bin": {
+ "update-browserslist-db": "cli.js"
+ },
+ "peerDependencies": {
+ "browserslist": ">= 4.21.0"
+ }
+ },
+ "node_modules/v8-compile-cache-lib": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
+ "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/v8-to-istanbul": {
+ "version": "9.3.0",
+ "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz",
+ "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "@jridgewell/trace-mapping": "^0.3.12",
+ "@types/istanbul-lib-coverage": "^2.0.1",
+ "convert-source-map": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=10.12.0"
+ }
+ },
+ "node_modules/walker": {
+ "version": "1.0.8",
+ "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz",
+ "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "makeerror": "1.0.12"
+ }
+ },
+ "node_modules/which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "isexe": "^2.0.0"
+ },
+ "bin": {
+ "node-which": "bin/node-which"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/wrap-ansi": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+ }
+ },
+ "node_modules/wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/write-file-atomic": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz",
+ "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "imurmurhash": "^0.1.4",
+ "signal-exit": "^3.0.7"
+ },
+ "engines": {
+ "node": "^12.13.0 || ^14.15.0 || >=16.0.0"
+ }
+ },
+ "node_modules/y18n": {
+ "version": "5.0.8",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
+ "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
+ "dev": true,
+ "license": "ISC",
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/yallist": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
+ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/yargs": {
+ "version": "17.7.2",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
+ "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "cliui": "^8.0.1",
+ "escalade": "^3.1.1",
+ "get-caller-file": "^2.0.5",
+ "require-directory": "^2.1.1",
+ "string-width": "^4.2.3",
+ "y18n": "^5.0.5",
+ "yargs-parser": "^21.1.1"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/yargs-parser": {
+ "version": "21.1.1",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
+ "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
+ "dev": true,
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/yn": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
+ "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/yocto-queue": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+ "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ }
+ }
+}
diff --git a/examples/Event Handler/BedrockAgentFunction/infra/package.json b/examples/Event Handler/BedrockAgentFunction/infra/package.json
new file mode 100644
index 000000000..eb6545cac
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/infra/package.json
@@ -0,0 +1,26 @@
+{
+ "name": "infra",
+ "version": "0.1.0",
+ "bin": {
+ "infra": "bin/infra.js"
+ },
+ "scripts": {
+ "build": "tsc",
+ "watch": "tsc -w",
+ "test": "jest",
+ "cdk": "cdk"
+ },
+ "devDependencies": {
+ "@types/jest": "^29.5.14",
+ "@types/node": "22.7.9",
+ "jest": "^29.7.0",
+ "ts-jest": "^29.2.5",
+ "aws-cdk": "2.1017.1",
+ "ts-node": "^10.9.2",
+ "typescript": "~5.6.3"
+ },
+ "dependencies": {
+ "aws-cdk-lib": "2.198.0",
+ "constructs": "^10.0.0"
+ }
+}
diff --git a/examples/Event Handler/BedrockAgentFunction/infra/tsconfig.json b/examples/Event Handler/BedrockAgentFunction/infra/tsconfig.json
new file mode 100644
index 000000000..28bb557fa
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/infra/tsconfig.json
@@ -0,0 +1,31 @@
+{
+ "compilerOptions": {
+ "target": "ES2022",
+ "module": "NodeNext",
+ "moduleResolution": "NodeNext",
+ "lib": [
+ "es2022"
+ ],
+ "declaration": true,
+ "strict": true,
+ "noImplicitAny": true,
+ "strictNullChecks": true,
+ "noImplicitThis": true,
+ "alwaysStrict": true,
+ "noUnusedLocals": false,
+ "noUnusedParameters": false,
+ "noImplicitReturns": true,
+ "noFallthroughCasesInSwitch": false,
+ "inlineSourceMap": true,
+ "inlineSources": true,
+ "experimentalDecorators": true,
+ "strictPropertyInitialization": false,
+ "typeRoots": [
+ "./node_modules/@types"
+ ]
+ },
+ "exclude": [
+ "node_modules",
+ "cdk.out"
+ ]
+}
diff --git a/examples/Event Handler/BedrockAgentFunction/src/AirportService.cs b/examples/Event Handler/BedrockAgentFunction/src/AirportService.cs
new file mode 100644
index 000000000..aa26e7f9f
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/src/AirportService.cs
@@ -0,0 +1,222 @@
+namespace BedrockAgentFunction;
+
+public class AirportService
+{
+ private readonly Dictionary _airportsByCity = new(StringComparer.OrdinalIgnoreCase)
+ {
+ {
+ "New York",
+ new AirportInfo { City = "New York", Code = "JFK", Name = "John F. Kennedy International Airport" }
+ },
+ { "London", new AirportInfo { City = "London", Code = "LHR", Name = "London Heathrow Airport" } },
+ { "Paris", new AirportInfo { City = "Paris", Code = "CDG", Name = "Charles de Gaulle Airport" } },
+ { "Tokyo", new AirportInfo { City = "Tokyo", Code = "HND", Name = "Tokyo Haneda Airport" } },
+ { "Sydney", new AirportInfo { City = "Sydney", Code = "SYD", Name = "Sydney Airport" } },
+ {
+ "Los Angeles",
+ new AirportInfo { City = "Los Angeles", Code = "LAX", Name = "Los Angeles International Airport" }
+ },
+ { "Berlin", new AirportInfo { City = "Berlin", Code = "TXL", Name = "Berlin Tegel Airport" } },
+ { "Dubai", new AirportInfo { City = "Dubai", Code = "DXB", Name = "Dubai International Airport" } },
+ {
+ "Toronto",
+ new AirportInfo { City = "Toronto", Code = "YYZ", Name = "Toronto Pearson International Airport" }
+ },
+ { "Singapore", new AirportInfo { City = "Singapore", Code = "SIN", Name = "Singapore Changi Airport" } },
+ { "Hong Kong", new AirportInfo { City = "Hong Kong", Code = "HKG", Name = "Hong Kong International Airport" } },
+ { "Madrid", new AirportInfo { City = "Madrid", Code = "MAD", Name = "Adolfo SuÃĄrez MadridâBarajas Airport" } },
+ { "Rome", new AirportInfo { City = "Rome", Code = "FCO", Name = "Leonardo da Vinci International Airport" } },
+ { "Moscow", new AirportInfo { City = "Moscow", Code = "SVO", Name = "Sheremetyevo International Airport" } },
+ {
+ "SÃŖo Paulo",
+ new AirportInfo
+ {
+ City = "SÃŖo Paulo", Code = "GRU",
+ Name = "SÃŖo Paulo/GuarulhosâGovernador AndrÊ Franco Montoro International Airport"
+ }
+ },
+ { "Istanbul", new AirportInfo { City = "Istanbul", Code = "IST", Name = "Istanbul Airport" } },
+ { "Bangkok", new AirportInfo { City = "Bangkok", Code = "BKK", Name = "Suvarnabhumi Airport" } },
+ {
+ "Mexico City",
+ new AirportInfo { City = "Mexico City", Code = "MEX", Name = "Mexico City International Airport" }
+ },
+ { "Cairo", new AirportInfo { City = "Cairo", Code = "CAI", Name = "Cairo International Airport" } },
+ {
+ "Buenos Aires",
+ new AirportInfo { City = "Buenos Aires", Code = "EZE", Name = "Ministro Pistarini International Airport" }
+ },
+ {
+ "Kuala Lumpur",
+ new AirportInfo { City = "Kuala Lumpur", Code = "KUL", Name = "Kuala Lumpur International Airport" }
+ },
+ { "Amsterdam", new AirportInfo { City = "Amsterdam", Code = "AMS", Name = "Amsterdam Airport Schiphol" } },
+ { "Barcelona", new AirportInfo { City = "Barcelona", Code = "BCN", Name = "BarcelonaâEl Prat Airport" } },
+ { "Lima", new AirportInfo { City = "Lima", Code = "LIM", Name = "Jorge ChÃĄvez International Airport" } },
+ { "Seoul", new AirportInfo { City = "Seoul", Code = "ICN", Name = "Incheon International Airport" } },
+ {
+ "Rio de Janeiro",
+ new AirportInfo
+ {
+ City = "Rio de Janeiro", Code = "GIG",
+ Name = "Rio de Janeiro/GaleÃŖoâAntonio Carlos Jobim International Airport"
+ }
+ },
+ { "Dublin", new AirportInfo { City = "Dublin", Code = "DUB", Name = "Dublin Airport" } },
+ { "Brussels", new AirportInfo { City = "Brussels", Code = "BRU", Name = "Brussels Airport" } },
+ { "Lisbon", new AirportInfo { City = "Lisbon", Code = "LIS", Name = "Lisbon Portela Airport" } },
+ { "Athens", new AirportInfo { City = "Athens", Code = "ATH", Name = "Athens International Airport" } },
+ { "Oslo", new AirportInfo { City = "Oslo", Code = "OSL", Name = "Oslo Airport, Gardermoen" } },
+ { "Stockholm", new AirportInfo { City = "Stockholm", Code = "ARN", Name = "Stockholm Arlanda Airport" } },
+ { "Helsinki", new AirportInfo { City = "Helsinki", Code = "HEL", Name = "Helsinki-Vantaa Airport" } },
+ { "Prague", new AirportInfo { City = "Prague", Code = "PRG", Name = "VÃĄclav Havel Airport Prague" } },
+ { "Warsaw", new AirportInfo { City = "Warsaw", Code = "WAW", Name = "Warsaw Chopin Airport" } },
+ { "Copenhagen", new AirportInfo { City = "Copenhagen", Code = "CPH", Name = "Copenhagen Airport" } },
+ {
+ "Budapest",
+ new AirportInfo { City = "Budapest", Code = "BUD", Name = "Budapest Ferenc Liszt International Airport" }
+ },
+ { "Osaka", new AirportInfo { City = "Osaka", Code = "KIX", Name = "Kansai International Airport" } },
+ {
+ "San Francisco",
+ new AirportInfo { City = "San Francisco", Code = "SFO", Name = "San Francisco International Airport" }
+ },
+ { "Miami", new AirportInfo { City = "Miami", Code = "MIA", Name = "Miami International Airport" } },
+ {
+ "Seattle", new AirportInfo { City = "Seattle", Code = "SEA", Name = "SeattleâTacoma International Airport" }
+ },
+ { "Vancouver", new AirportInfo { City = "Vancouver", Code = "YVR", Name = "Vancouver International Airport" } },
+ { "Melbourne", new AirportInfo { City = "Melbourne", Code = "MEL", Name = "Melbourne Airport" } },
+ { "Auckland", new AirportInfo { City = "Auckland", Code = "AKL", Name = "Auckland Airport" } },
+ { "Doha", new AirportInfo { City = "Doha", Code = "DOH", Name = "Hamad International Airport" } },
+ {
+ "Kuwait City", new AirportInfo { City = "Kuwait City", Code = "KWI", Name = "Kuwait International Airport" }
+ },
+ {
+ "Bangalore", new AirportInfo { City = "Bangalore", Code = "BLR", Name = "Kempegowda International Airport" }
+ },
+ {
+ "Beijing",
+ new AirportInfo { City = "Beijing", Code = "PEK", Name = "Beijing Capital International Airport" }
+ },
+ {
+ "Shanghai",
+ new AirportInfo { City = "Shanghai", Code = "PVG", Name = "Shanghai Pudong International Airport" }
+ },
+ { "Manila", new AirportInfo { City = "Manila", Code = "MNL", Name = "Ninoy Aquino International Airport" } },
+ {
+ "Jakarta", new AirportInfo { City = "Jakarta", Code = "CGK", Name = "SoekarnoâHatta International Airport" }
+ },
+ {
+ "Santiago",
+ new AirportInfo
+ { City = "Santiago", Code = "SCL", Name = "Comodoro Arturo Merino BenÃtez International Airport" }
+ },
+ { "Lagos", new AirportInfo { City = "Lagos", Code = "LOS", Name = "Murtala Muhammed International Airport" } },
+ { "Nairobi", new AirportInfo { City = "Nairobi", Code = "NBO", Name = "Jomo Kenyatta International Airport" } },
+ { "Chicago", new AirportInfo { City = "Chicago", Code = "ORD", Name = "O'Hare International Airport" } },
+ {
+ "Atlanta",
+ new AirportInfo
+ { City = "Atlanta", Code = "ATL", Name = "HartsfieldâJackson Atlanta International Airport" }
+ },
+ {
+ "Dallas",
+ new AirportInfo { City = "Dallas", Code = "DFW", Name = "Dallas/Fort Worth International Airport" }
+ },
+ {
+ "Washington, D.C.",
+ new AirportInfo
+ { City = "Washington, D.C.", Code = "IAD", Name = "Washington Dulles International Airport" }
+ },
+ { "Boston", new AirportInfo { City = "Boston", Code = "BOS", Name = "Logan International Airport" } },
+ {
+ "Philadelphia",
+ new AirportInfo { City = "Philadelphia", Code = "PHL", Name = "Philadelphia International Airport" }
+ },
+ { "Orlando", new AirportInfo { City = "Orlando", Code = "MCO", Name = "Orlando International Airport" } },
+ { "Denver", new AirportInfo { City = "Denver", Code = "DEN", Name = "Denver International Airport" } },
+ {
+ "Phoenix",
+ new AirportInfo { City = "Phoenix", Code = "PHX", Name = "Phoenix Sky Harbor International Airport" }
+ },
+ { "Las Vegas", new AirportInfo { City = "Las Vegas", Code = "LAS", Name = "McCarran International Airport" } },
+ {
+ "Houston", new AirportInfo { City = "Houston", Code = "IAH", Name = "George Bush Intercontinental Airport" }
+ },
+ {
+ "Detroit",
+ new AirportInfo { City = "Detroit", Code = "DTW", Name = "Detroit Metropolitan Wayne County Airport" }
+ },
+ {
+ "Charlotte",
+ new AirportInfo { City = "Charlotte", Code = "CLT", Name = "Charlotte Douglas International Airport" }
+ },
+ {
+ "Baltimore",
+ new AirportInfo
+ {
+ City = "Baltimore", Code = "BWI", Name = "Baltimore/Washington International Thurgood Marshall Airport"
+ }
+ },
+ {
+ "Minneapolis",
+ new AirportInfo
+ { City = "Minneapolis", Code = "MSP", Name = "MinneapolisâSaint Paul International Airport" }
+ },
+ { "San Diego", new AirportInfo { City = "San Diego", Code = "SAN", Name = "San Diego International Airport" } },
+ { "Portland", new AirportInfo { City = "Portland", Code = "PDX", Name = "Portland International Airport" } },
+ {
+ "Salt Lake City",
+ new AirportInfo { City = "Salt Lake City", Code = "SLC", Name = "Salt Lake City International Airport" }
+ },
+ {
+ "Cincinnati",
+ new AirportInfo
+ { City = "Cincinnati", Code = "CVG", Name = "Cincinnati/Northern Kentucky International Airport" }
+ },
+ {
+ "St. Louis",
+ new AirportInfo { City = "St. Louis", Code = "STL", Name = "St. Louis Lambert International Airport" }
+ },
+ {
+ "Indianapolis",
+ new AirportInfo { City = "Indianapolis", Code = "IND", Name = "Indianapolis International Airport" }
+ },
+ { "Tampa", new AirportInfo { City = "Tampa", Code = "TPA", Name = "Tampa International Airport" } },
+ { "Milan", new AirportInfo { City = "Milan", Code = "MXP", Name = "Milan Malpensa Airport" } },
+ { "Frankfurt", new AirportInfo { City = "Frankfurt", Code = "FRA", Name = "Frankfurt am Main Airport" } },
+ { "Munich", new AirportInfo { City = "Munich", Code = "MUC", Name = "Munich Airport" } },
+ {
+ "Mumbai",
+ new AirportInfo
+ { City = "Mumbai", Code = "BOM", Name = "Chhatrapati Shivaji Maharaj International Airport" }
+ },
+ { "Cape Town", new AirportInfo { City = "Cape Town", Code = "CPT", Name = "Cape Town International Airport" } },
+ { "Zurich", new AirportInfo { City = "Zurich", Code = "ZRH", Name = "Zurich Airport" } },
+ { "Vienna", new AirportInfo { City = "Vienna", Code = "VIE", Name = "Vienna International Airport" } }
+ // Add more airports as needed
+ };
+
+ public AirportInfo GetAirportInfoForCity(string city)
+ {
+ if (_airportsByCity.TryGetValue(city, out var airportInfo))
+ {
+ return airportInfo;
+ }
+
+ throw new KeyNotFoundException($"No airport information found for city: {city}");
+ }
+}
+
+public class AirportInfo
+{
+ public string City { get; set; } = string.Empty;
+ public string Code { get; set; } = string.Empty;
+ public string Name { get; set; } = string.Empty;
+
+ public override string ToString()
+ {
+ return $"{Name} ({Code}) in {City}";
+ }
+}
\ No newline at end of file
diff --git a/examples/Event Handler/BedrockAgentFunction/src/BedrockAgentFunction.csproj b/examples/Event Handler/BedrockAgentFunction/src/BedrockAgentFunction.csproj
new file mode 100644
index 000000000..bcd2c51cd
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/src/BedrockAgentFunction.csproj
@@ -0,0 +1,22 @@
+
+
+ Exe
+ net8.0
+ enable
+ enable
+ true
+ Lambda
+
+ true
+
+ true
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/Event Handler/BedrockAgentFunction/src/Function.cs b/examples/Event Handler/BedrockAgentFunction/src/Function.cs
new file mode 100644
index 000000000..c4e847ef0
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/src/Function.cs
@@ -0,0 +1,45 @@
+using Amazon.Lambda.Core;
+using Amazon.Lambda.RuntimeSupport;
+using Amazon.Lambda.Serialization.SystemTextJson;
+using AWS.Lambda.Powertools.EventHandler.Resolvers;
+using AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.Models;
+using AWS.Lambda.Powertools.Logging;
+using BedrockAgentFunction;
+using Microsoft.Extensions.Logging;
+
+
+var logger = LoggerFactory.Create(builder =>
+{
+ builder.AddPowertoolsLogger(config => { config.Service = "AirportService"; });
+}).CreatePowertoolsLogger();
+
+var resolver = new BedrockAgentFunctionResolver();
+
+
+resolver.Tool("getAirportCodeForCity", "Get airport code and full name for a specific city", (string city, ILambdaContext context) =>
+{
+ logger.LogInformation("Getting airport code for city: {City}", city);
+ var airportService = new AirportService();
+ var airportInfo = airportService.GetAirportInfoForCity(city);
+
+ logger.LogInformation("Airport for {City}: {AirportInfoCode} - {AirportInfoName}", city, airportInfo.Code, airportInfo.Name);
+
+ // Note: Best approach is to override the ToString method in the AirportInfo class
+ return airportInfo;
+});
+
+
+// The function handler that will be called for each Lambda event
+var handler = async (BedrockFunctionRequest input, ILambdaContext context) =>
+{
+ return await resolver.ResolveAsync(input, context);
+};
+
+// Build the Lambda runtime client passing in the handler to call for each
+// event and the JSON serializer to use for translating Lambda JSON documents
+// to .NET types.
+await LambdaBootstrapBuilder.Create(handler, new DefaultLambdaJsonSerializer())
+ .Build()
+ .RunAsync();
+
+
diff --git a/examples/Event Handler/BedrockAgentFunction/src/Readme.md b/examples/Event Handler/BedrockAgentFunction/src/Readme.md
new file mode 100644
index 000000000..d0cfb6684
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/src/Readme.md
@@ -0,0 +1,47 @@
+# Powertools for AWS Lambda .NET - Bedrock Agent Function example
+
+This starter project consists of:
+* Function.cs - file contain C# top level statements that define the function to be called for each event and starts the Lambda runtime client.
+* AirportService.cs - Static list of airport codes and names used by the function.
+* aws-lambda-tools-defaults.json - default argument settings for use with Visual Studio and command line deployment tools for AWS
+
+## Executable Assembly
+
+.NET Lambda projects that use C# top level statements like this project must be deployed as an executable assembly instead of a class library. To indicate to Lambda that the .NET function is an executable assembly the
+Lambda function handler value is set to the .NET Assembly name. This is different then deploying as a class library where the function handler string includes the assembly, type and method name.
+
+To deploy as an executable assembly the Lambda runtime client must be started to listen for incoming events to process. To start
+the Lambda runtime client add the `Amazon.Lambda.RuntimeSupport` NuGet package and add the following code at the end of the
+of the file containing top-level statements to start the runtime.
+
+```csharp
+await LambdaBootstrapBuilder.Create(handler, new DefaultLambdaJsonSerializer())
+ .Build()
+ .RunAsync();
+```
+
+Pass into the Lambda runtime client a function handler as either an `Action<>` or `Func<>` for the code that
+should be called for each event. If the handler takes in an input event besides `System.IO.Stream` then
+the JSON serializer must also be passed into the `Create` method.
+
+## Here are some steps to follow to get started from the command line:
+
+Once you have edited your template and code you can deploy your application using the [Amazon.Lambda.Tools Global Tool](https://github.com/aws/aws-extensions-for-dotnet-cli#aws-lambda-amazonlambdatools) from the command line.
+
+Install Amazon.Lambda.Tools Global Tools if not already installed.
+```
+ dotnet tool install -g Amazon.Lambda.Tools
+```
+
+If already installed check if new version is available.
+```
+ dotnet tool update -g Amazon.Lambda.Tools
+```
+
+Deploy function to AWS Lambda
+```
+ cd "BedrockAgentFunction/src"
+ dotnet lambda package --output-package ../release/BedrockAgentFunction.zip
+ cd ../infra
+ npm run cdk deploy -- --require-approval never
+```
\ No newline at end of file
diff --git a/examples/Event Handler/BedrockAgentFunction/src/aws-lambda-tools-defaults.json b/examples/Event Handler/BedrockAgentFunction/src/aws-lambda-tools-defaults.json
new file mode 100644
index 000000000..1dc447ae8
--- /dev/null
+++ b/examples/Event Handler/BedrockAgentFunction/src/aws-lambda-tools-defaults.json
@@ -0,0 +1,15 @@
+{
+ "Information": [
+ "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.",
+ "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.",
+ "dotnet lambda help",
+ "All the command line options for the Lambda command can be specified in this file."
+ ],
+ "profile": "",
+ "region": "",
+ "configuration": "Release",
+ "function-runtime": "dotnet8",
+ "function-memory-size": 512,
+ "function-timeout": 30,
+ "function-handler": "BedrockAgentFunction"
+}
\ No newline at end of file
diff --git a/examples/Idempotency/src/HelloWorld/HelloWorld.csproj b/examples/Idempotency/src/HelloWorld/HelloWorld.csproj
index edf4ee5f0..39615764a 100644
--- a/examples/Idempotency/src/HelloWorld/HelloWorld.csproj
+++ b/examples/Idempotency/src/HelloWorld/HelloWorld.csproj
@@ -5,9 +5,9 @@
enable
-
+
-
+
diff --git a/examples/Idempotency/test/HelloWorld.Test/HelloWorld.Tests.csproj b/examples/Idempotency/test/HelloWorld.Test/HelloWorld.Tests.csproj
index b00a6873d..e143aa862 100644
--- a/examples/Idempotency/test/HelloWorld.Test/HelloWorld.Tests.csproj
+++ b/examples/Idempotency/test/HelloWorld.Test/HelloWorld.Tests.csproj
@@ -3,9 +3,9 @@
net6.0;net8.0
-
+
-
+
diff --git a/examples/Kafka/Avro/src/Avro.csproj b/examples/Kafka/Avro/src/Avro.csproj
new file mode 100644
index 000000000..05314f2fb
--- /dev/null
+++ b/examples/Kafka/Avro/src/Avro.csproj
@@ -0,0 +1,35 @@
+
+
+ Exe
+ net8.0
+ enable
+ enable
+ true
+ Lambda
+
+ true
+
+ true
+
+ Avro.Example
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ PreserveNewest
+
+
+
\ No newline at end of file
diff --git a/examples/Kafka/Avro/src/CustomerProfile.avsc b/examples/Kafka/Avro/src/CustomerProfile.avsc
new file mode 100644
index 000000000..bf8cc090c
--- /dev/null
+++ b/examples/Kafka/Avro/src/CustomerProfile.avsc
@@ -0,0 +1,46 @@
+{
+ "type": "record",
+ "name": "CustomerProfile",
+ "namespace": "com.example",
+ "fields": [
+ {"name": "user_id", "type": "string"},
+ {"name": "full_name", "type": "string"},
+ {"name": "email", "type": {
+ "type": "record",
+ "name": "EmailAddress",
+ "fields": [
+ {"name": "address", "type": "string"},
+ {"name": "verified", "type": "boolean"},
+ {"name": "primary", "type": "boolean"}
+ ]
+ }},
+ {"name": "age", "type": "int"},
+ {"name": "address", "type": {
+ "type": "record",
+ "name": "Address",
+ "fields": [
+ {"name": "street", "type": "string"},
+ {"name": "city", "type": "string"},
+ {"name": "state", "type": "string"},
+ {"name": "country", "type": "string"},
+ {"name": "zip_code", "type": "string"}
+ ]
+ }},
+ {"name": "phone_numbers", "type": {
+ "type": "array",
+ "items": {
+ "type": "record",
+ "name": "PhoneNumber",
+ "fields": [
+ {"name": "number", "type": "string"},
+ {"name": "type", "type": {"type": "enum", "name": "PhoneType", "symbols": ["HOME", "WORK", "MOBILE"]}}
+ ]
+ }
+ }},
+ {"name": "preferences", "type": {
+ "type": "map",
+ "values": "string"
+ }},
+ {"name": "account_status", "type": {"type": "enum", "name": "AccountStatus", "symbols": ["ACTIVE", "INACTIVE", "SUSPENDED"]}}
+ ]
+}
\ No newline at end of file
diff --git a/examples/Kafka/Avro/src/Function.cs b/examples/Kafka/Avro/src/Function.cs
new file mode 100644
index 000000000..6ca9ebdb5
--- /dev/null
+++ b/examples/Kafka/Avro/src/Function.cs
@@ -0,0 +1,21 @@
+using Amazon.Lambda.Core;
+using Amazon.Lambda.RuntimeSupport;
+using AWS.Lambda.Powertools.Kafka;
+using AWS.Lambda.Powertools.Kafka.Avro;
+using AWS.Lambda.Powertools.Logging;
+using com.example;
+
+string Handler(ConsumerRecords records, ILambdaContext context)
+{
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+}
+
+await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization
+ .Build()
+ .RunAsync();
\ No newline at end of file
diff --git a/examples/Kafka/Avro/src/Generated/com/example/AccountStatus.cs b/examples/Kafka/Avro/src/Generated/com/example/AccountStatus.cs
new file mode 100644
index 000000000..c7809f518
--- /dev/null
+++ b/examples/Kafka/Avro/src/Generated/com/example/AccountStatus.cs
@@ -0,0 +1,23 @@
+// ------------------------------------------------------------------------------
+//
+// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e
+// Changes to this file may cause incorrect behavior and will be lost if code
+// is regenerated
+//
+// ------------------------------------------------------------------------------
+namespace com.example
+{
+ using System;
+ using System.Collections.Generic;
+ using System.Text;
+ using global::Avro;
+ using global::Avro.Specific;
+
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")]
+ public enum AccountStatus
+ {
+ ACTIVE,
+ INACTIVE,
+ SUSPENDED,
+ }
+}
diff --git a/examples/Kafka/Avro/src/Generated/com/example/Address.cs b/examples/Kafka/Avro/src/Generated/com/example/Address.cs
new file mode 100644
index 000000000..e2053e0f2
--- /dev/null
+++ b/examples/Kafka/Avro/src/Generated/com/example/Address.cs
@@ -0,0 +1,115 @@
+// ------------------------------------------------------------------------------
+//
+// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e
+// Changes to this file may cause incorrect behavior and will be lost if code
+// is regenerated
+//
+// ------------------------------------------------------------------------------
+namespace com.example
+{
+ using System;
+ using System.Collections.Generic;
+ using System.Text;
+ using global::Avro;
+ using global::Avro.Specific;
+
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")]
+ public partial class Address : global::Avro.Specific.ISpecificRecord
+ {
+ public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"Address\",\"namespace\":\"com.example\",\"fields\":[{\"name\":\"st" +
+ "reet\",\"type\":\"string\"},{\"name\":\"city\",\"type\":\"string\"},{\"name\":\"state\",\"type\":\"s" +
+ "tring\"},{\"name\":\"country\",\"type\":\"string\"},{\"name\":\"zip_code\",\"type\":\"string\"}]}" +
+ "");
+ private string _street;
+ private string _city;
+ private string _state;
+ private string _country;
+ private string _zip_code;
+ public virtual global::Avro.Schema Schema
+ {
+ get
+ {
+ return Address._SCHEMA;
+ }
+ }
+ public string street
+ {
+ get
+ {
+ return this._street;
+ }
+ set
+ {
+ this._street = value;
+ }
+ }
+ public string city
+ {
+ get
+ {
+ return this._city;
+ }
+ set
+ {
+ this._city = value;
+ }
+ }
+ public string state
+ {
+ get
+ {
+ return this._state;
+ }
+ set
+ {
+ this._state = value;
+ }
+ }
+ public string country
+ {
+ get
+ {
+ return this._country;
+ }
+ set
+ {
+ this._country = value;
+ }
+ }
+ public string zip_code
+ {
+ get
+ {
+ return this._zip_code;
+ }
+ set
+ {
+ this._zip_code = value;
+ }
+ }
+ public virtual object Get(int fieldPos)
+ {
+ switch (fieldPos)
+ {
+ case 0: return this.street;
+ case 1: return this.city;
+ case 2: return this.state;
+ case 3: return this.country;
+ case 4: return this.zip_code;
+ default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+ };
+ }
+ public virtual void Put(int fieldPos, object fieldValue)
+ {
+ switch (fieldPos)
+ {
+ case 0: this.street = (System.String)fieldValue; break;
+ case 1: this.city = (System.String)fieldValue; break;
+ case 2: this.state = (System.String)fieldValue; break;
+ case 3: this.country = (System.String)fieldValue; break;
+ case 4: this.zip_code = (System.String)fieldValue; break;
+ default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+ };
+ }
+ }
+}
diff --git a/examples/Kafka/Avro/src/Generated/com/example/CustomerProfile.cs b/examples/Kafka/Avro/src/Generated/com/example/CustomerProfile.cs
new file mode 100644
index 000000000..15d62095d
--- /dev/null
+++ b/examples/Kafka/Avro/src/Generated/com/example/CustomerProfile.cs
@@ -0,0 +1,154 @@
+// ------------------------------------------------------------------------------
+//
+// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e
+// Changes to this file may cause incorrect behavior and will be lost if code
+// is regenerated
+//
+// ------------------------------------------------------------------------------
+namespace com.example
+{
+ using System;
+ using System.Collections.Generic;
+ using System.Text;
+ using global::Avro;
+ using global::Avro.Specific;
+
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")]
+ public partial class CustomerProfile : global::Avro.Specific.ISpecificRecord
+ {
+ public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse(@"{""type"":""record"",""name"":""CustomerProfile"",""namespace"":""com.example"",""fields"":[{""name"":""user_id"",""type"":""string""},{""name"":""full_name"",""type"":""string""},{""name"":""email"",""type"":{""type"":""record"",""name"":""EmailAddress"",""namespace"":""com.example"",""fields"":[{""name"":""address"",""type"":""string""},{""name"":""verified"",""type"":""boolean""},{""name"":""primary"",""type"":""boolean""}]}},{""name"":""age"",""type"":""int""},{""name"":""address"",""type"":{""type"":""record"",""name"":""Address"",""namespace"":""com.example"",""fields"":[{""name"":""street"",""type"":""string""},{""name"":""city"",""type"":""string""},{""name"":""state"",""type"":""string""},{""name"":""country"",""type"":""string""},{""name"":""zip_code"",""type"":""string""}]}},{""name"":""phone_numbers"",""type"":{""type"":""array"",""items"":{""type"":""record"",""name"":""PhoneNumber"",""namespace"":""com.example"",""fields"":[{""name"":""number"",""type"":""string""},{""name"":""type"",""type"":{""type"":""enum"",""name"":""PhoneType"",""namespace"":""com.example"",""symbols"":[""HOME"",""WORK"",""MOBILE""]}}]}}},{""name"":""preferences"",""type"":{""type"":""map"",""values"":""string""}},{""name"":""account_status"",""type"":{""type"":""enum"",""name"":""AccountStatus"",""namespace"":""com.example"",""symbols"":[""ACTIVE"",""INACTIVE"",""SUSPENDED""]}}]}");
+ private string _user_id;
+ private string _full_name;
+ private com.example.EmailAddress _email;
+ private int _age;
+ private com.example.Address _address;
+ private IList _phone_numbers;
+ private IDictionary _preferences;
+ private com.example.AccountStatus _account_status;
+ public virtual global::Avro.Schema Schema
+ {
+ get
+ {
+ return CustomerProfile._SCHEMA;
+ }
+ }
+ public string user_id
+ {
+ get
+ {
+ return this._user_id;
+ }
+ set
+ {
+ this._user_id = value;
+ }
+ }
+ public string full_name
+ {
+ get
+ {
+ return this._full_name;
+ }
+ set
+ {
+ this._full_name = value;
+ }
+ }
+ public com.example.EmailAddress email
+ {
+ get
+ {
+ return this._email;
+ }
+ set
+ {
+ this._email = value;
+ }
+ }
+ public int age
+ {
+ get
+ {
+ return this._age;
+ }
+ set
+ {
+ this._age = value;
+ }
+ }
+ public com.example.Address address
+ {
+ get
+ {
+ return this._address;
+ }
+ set
+ {
+ this._address = value;
+ }
+ }
+ public IList phone_numbers
+ {
+ get
+ {
+ return this._phone_numbers;
+ }
+ set
+ {
+ this._phone_numbers = value;
+ }
+ }
+ public IDictionary preferences
+ {
+ get
+ {
+ return this._preferences;
+ }
+ set
+ {
+ this._preferences = value;
+ }
+ }
+ public com.example.AccountStatus account_status
+ {
+ get
+ {
+ return this._account_status;
+ }
+ set
+ {
+ this._account_status = value;
+ }
+ }
+ public virtual object Get(int fieldPos)
+ {
+ switch (fieldPos)
+ {
+ case 0: return this.user_id;
+ case 1: return this.full_name;
+ case 2: return this.email;
+ case 3: return this.age;
+ case 4: return this.address;
+ case 5: return this.phone_numbers;
+ case 6: return this.preferences;
+ case 7: return this.account_status;
+ default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+ };
+ }
+ public virtual void Put(int fieldPos, object fieldValue)
+ {
+ switch (fieldPos)
+ {
+ case 0: this.user_id = (System.String)fieldValue; break;
+ case 1: this.full_name = (System.String)fieldValue; break;
+ case 2: this.email = (com.example.EmailAddress)fieldValue; break;
+ case 3: this.age = (System.Int32)fieldValue; break;
+ case 4: this.address = (com.example.Address)fieldValue; break;
+ case 5: this.phone_numbers = (IList)fieldValue; break;
+ case 6: this.preferences = (IDictionary)fieldValue; break;
+ case 7: this.account_status = (com.example.AccountStatus)fieldValue; break;
+ default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+ };
+ }
+ }
+}
diff --git a/examples/Kafka/Avro/src/Generated/com/example/EmailAddress.cs b/examples/Kafka/Avro/src/Generated/com/example/EmailAddress.cs
new file mode 100644
index 000000000..4a25a6e0b
--- /dev/null
+++ b/examples/Kafka/Avro/src/Generated/com/example/EmailAddress.cs
@@ -0,0 +1,86 @@
+// ------------------------------------------------------------------------------
+//
+// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e
+// Changes to this file may cause incorrect behavior and will be lost if code
+// is regenerated
+//
+// ------------------------------------------------------------------------------
+namespace com.example
+{
+ using System;
+ using System.Collections.Generic;
+ using System.Text;
+ using global::Avro;
+ using global::Avro.Specific;
+
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")]
+ public partial class EmailAddress : global::Avro.Specific.ISpecificRecord
+ {
+ public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"EmailAddress\",\"namespace\":\"com.example\",\"fields\":[{\"name" +
+ "\":\"address\",\"type\":\"string\"},{\"name\":\"verified\",\"type\":\"boolean\"},{\"name\":\"prima" +
+ "ry\",\"type\":\"boolean\"}]}");
+ private string _address;
+ private bool _verified;
+ private bool _primary;
+ public virtual global::Avro.Schema Schema
+ {
+ get
+ {
+ return EmailAddress._SCHEMA;
+ }
+ }
+ public string address
+ {
+ get
+ {
+ return this._address;
+ }
+ set
+ {
+ this._address = value;
+ }
+ }
+ public bool verified
+ {
+ get
+ {
+ return this._verified;
+ }
+ set
+ {
+ this._verified = value;
+ }
+ }
+ public bool primary
+ {
+ get
+ {
+ return this._primary;
+ }
+ set
+ {
+ this._primary = value;
+ }
+ }
+ public virtual object Get(int fieldPos)
+ {
+ switch (fieldPos)
+ {
+ case 0: return this.address;
+ case 1: return this.verified;
+ case 2: return this.primary;
+ default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+ };
+ }
+ public virtual void Put(int fieldPos, object fieldValue)
+ {
+ switch (fieldPos)
+ {
+ case 0: this.address = (System.String)fieldValue; break;
+ case 1: this.verified = (System.Boolean)fieldValue; break;
+ case 2: this.primary = (System.Boolean)fieldValue; break;
+ default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+ };
+ }
+ }
+}
diff --git a/examples/Kafka/Avro/src/Generated/com/example/PhoneNumber.cs b/examples/Kafka/Avro/src/Generated/com/example/PhoneNumber.cs
new file mode 100644
index 000000000..ea3d2b8ed
--- /dev/null
+++ b/examples/Kafka/Avro/src/Generated/com/example/PhoneNumber.cs
@@ -0,0 +1,72 @@
+// ------------------------------------------------------------------------------
+//
+// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e
+// Changes to this file may cause incorrect behavior and will be lost if code
+// is regenerated
+//
+// ------------------------------------------------------------------------------
+namespace com.example
+{
+ using System;
+ using System.Collections.Generic;
+ using System.Text;
+ using global::Avro;
+ using global::Avro.Specific;
+
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")]
+ public partial class PhoneNumber : global::Avro.Specific.ISpecificRecord
+ {
+ public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"PhoneNumber\",\"namespace\":\"com.example\",\"fields\":[{\"name\"" +
+ ":\"number\",\"type\":\"string\"},{\"name\":\"type\",\"type\":{\"type\":\"enum\",\"name\":\"PhoneTyp" +
+ "e\",\"namespace\":\"com.example\",\"symbols\":[\"HOME\",\"WORK\",\"MOBILE\"]}}]}");
+ private string _number;
+ private com.example.PhoneType _type;
+ public virtual global::Avro.Schema Schema
+ {
+ get
+ {
+ return PhoneNumber._SCHEMA;
+ }
+ }
+ public string number
+ {
+ get
+ {
+ return this._number;
+ }
+ set
+ {
+ this._number = value;
+ }
+ }
+ public com.example.PhoneType type
+ {
+ get
+ {
+ return this._type;
+ }
+ set
+ {
+ this._type = value;
+ }
+ }
+ public virtual object Get(int fieldPos)
+ {
+ switch (fieldPos)
+ {
+ case 0: return this.number;
+ case 1: return this.type;
+ default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+ };
+ }
+ public virtual void Put(int fieldPos, object fieldValue)
+ {
+ switch (fieldPos)
+ {
+ case 0: this.number = (System.String)fieldValue; break;
+ case 1: this.type = (com.example.PhoneType)fieldValue; break;
+ default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+ };
+ }
+ }
+}
diff --git a/examples/Kafka/Avro/src/Generated/com/example/PhoneType.cs b/examples/Kafka/Avro/src/Generated/com/example/PhoneType.cs
new file mode 100644
index 000000000..f592d8692
--- /dev/null
+++ b/examples/Kafka/Avro/src/Generated/com/example/PhoneType.cs
@@ -0,0 +1,23 @@
+// ------------------------------------------------------------------------------
+//
+// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e
+// Changes to this file may cause incorrect behavior and will be lost if code
+// is regenerated
+//
+// ------------------------------------------------------------------------------
+namespace com.example
+{
+ using System;
+ using System.Collections.Generic;
+ using System.Text;
+ using global::Avro;
+ using global::Avro.Specific;
+
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")]
+ public enum PhoneType
+ {
+ HOME,
+ WORK,
+ MOBILE,
+ }
+}
diff --git a/examples/Kafka/Avro/src/Readme.md b/examples/Kafka/Avro/src/Readme.md
new file mode 100644
index 000000000..23e64e8e2
--- /dev/null
+++ b/examples/Kafka/Avro/src/Readme.md
@@ -0,0 +1,131 @@
+# AWS Powertools for AWS Lambda .NET - Kafka Avro Example
+
+This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics.
+
+## Overview
+
+This example showcases a Lambda functions that consume messages from Kafka topics with Avro serialization format.
+
+It uses the `AWS.Lambda.Powertools.Kafka.Avro` NuGet package to easily deserialize and process Kafka records.
+
+## Project Structure
+
+```bash
+examples/Kafka/Avro/src/
+âââ Function.cs # Entry point for the Lambda function
+âââ aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment
+âââ template.yaml # AWS SAM template for deploying the function
+âââ CustomerProfile.avsc # Avro schema definition file for the data structure used in the Kafka messages
+âââ kafka-avro-event.json # Sample Avro event to test the function
+```
+
+## Prerequisites
+
+- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later)
+- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html)
+- [AWS CLI](https://aws.amazon.com/cli/)
+- An AWS account with appropriate permissions
+- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from
+- [AWS.Lambda.Powertools.Kafka.Avro](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Avro/) NuGet package installed in your project
+- [Avro Tools](https://www.nuget.org/packages/Apache.Avro.Tools/) codegen tool to generate C# classes from the Avro schema
+
+## Installation
+
+1. Clone the repository:
+
+ ```bash
+ git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git
+ ```
+
+2. Navigate to the project directory:
+
+ ```bash
+ cd powertools-lambda-dotnet/examples/Kafka/Avro/src
+ ```
+
+3. Build the project:
+
+ ```bash
+ dotnet build
+ ```
+4. Install the Avro Tools globally to generate C# classes from the Avro schema:
+
+ ```bash
+ dotnet tool install --global Apache.Avro.Tools
+ ```
+
+## Deployment
+
+Deploy the application using the AWS SAM CLI:
+
+```bash
+sam build
+sam deploy --guided
+```
+
+Follow the prompts to configure your deployment.
+
+## Avro Format
+Avro is a binary serialization format that provides a compact and efficient way to serialize structured data. It uses schemas to define the structure of the data, which allows for robust data evolution.
+
+In this example we provide a schema called `CustomerProfile.avsc`. The schema is used to serialize and deserialize the data in the Kafka messages.
+
+The classes are generated from the .cs file using the Avro Tools command:
+
+```xml
+
+
+
+```
+
+## Usage Examples
+
+Once deployed, you can test the Lambda function by sending a sample Avro event to the configured Kafka topic.
+You can use the `kafka-avro-event.json` file as a sample event to test the function.
+
+### Testing
+
+You can test the function locally using the AWS SAM CLI (Requires Docker to be installed):
+
+```bash
+sam local invoke AvroDeserializationFunction --event kafka-avro-event.json
+```
+
+This command simulates an invocation of the Lambda function with the provided event data.
+
+## How It Works
+
+1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source.
+2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format.
+3. **Processing**: Each record is processed within the handler function.
+
+## Event Deserialization
+
+Pass the `PowertoolsKafkaAvroSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable Avro deserialization of Kafka records:
+
+```csharp
+await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization
+ .Build()
+ .RunAsync();
+ ```
+
+## Configuration
+
+The SAM template (`template.yaml`) defines three Lambda function:
+
+- **AvroDeserializationFunction**: Handles Avro-formatted Kafka messages
+
+## Customization
+
+To customize the examples:
+
+1. Modify the schema definitions to match your data structures
+2. Update the handler logic to process the records according to your requirements
+
+## Resources
+
+- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/)
+- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/)
+- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/)
+- [Apache Avro Documentation](https://avro.apache.org/docs/)
\ No newline at end of file
diff --git a/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json b/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json
new file mode 100644
index 000000000..cd93437eb
--- /dev/null
+++ b/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json
@@ -0,0 +1,15 @@
+{
+ "Information": [
+ "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.",
+ "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.",
+ "dotnet lambda help",
+ "All the command line options for the Lambda command can be specified in this file."
+ ],
+ "profile": "",
+ "region": "",
+ "configuration": "Release",
+ "function-runtime": "dotnet8",
+ "function-memory-size": 512,
+ "function-timeout": 30,
+ "function-handler": "Avro.Example"
+}
\ No newline at end of file
diff --git a/examples/Kafka/Avro/src/kafka-avro-event.json b/examples/Kafka/Avro/src/kafka-avro-event.json
new file mode 100644
index 000000000..6f5e045e3
--- /dev/null
+++ b/examples/Kafka/Avro/src/kafka-avro-event.json
@@ -0,0 +1,23 @@
+{
+ "eventSource": "aws:kafka",
+ "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4",
+ "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092",
+ "records": {
+ "customer-topic-0": [
+ {
+ "topic": "customer-topic",
+ "partition": 0,
+ "offset": 15,
+ "timestamp": 1545084650987,
+ "timestampType": "CREATE_TIME",
+ "key": "dXNlcl85NzU0",
+ "value": "EnVzZXJfOTc1NBxVc2VyIHVzZXJfOTc1NCh1c2VyXzk3NTRAaWNsb3VkLmNvbQABahg5MzQwIE1haW4gU3QQU2FuIEpvc2UEQ0EGVVNBCjM5NTk2AhgyNDQtNDA3LTg4NzECAAYQdGltZXpvbmUOZW5hYmxlZBBsYW5ndWFnZRBkaXNhYmxlZBpub3RpZmljYXRpb25zCGRhcmsABA==",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/examples/Kafka/Avro/src/template.yaml b/examples/Kafka/Avro/src/template.yaml
new file mode 100644
index 000000000..a08325be2
--- /dev/null
+++ b/examples/Kafka/Avro/src/template.yaml
@@ -0,0 +1,27 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: AWS::Serverless-2016-10-31
+Description: >
+ kafka
+
+ Sample SAM Template for kafka
+
+# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst
+Globals:
+ Function:
+ Timeout: 15
+ MemorySize: 512
+ Runtime: dotnet8
+
+Resources:
+ AvroDeserializationFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: Avro.Example
+ Architectures:
+ - x86_64
+ Tracing: Active
+ Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables
+ Variables:
+ POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld
+ POWERTOOLS_LOG_LEVEL: Info
+ POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default)
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/Function.cs b/examples/Kafka/Json/src/Function.cs
new file mode 100644
index 000000000..d7d96bfca
--- /dev/null
+++ b/examples/Kafka/Json/src/Function.cs
@@ -0,0 +1,21 @@
+using Amazon.Lambda.Core;
+using Amazon.Lambda.RuntimeSupport;
+using AWS.Lambda.Powertools.Kafka;
+using AWS.Lambda.Powertools.Kafka.Json;
+using AWS.Lambda.Powertools.Logging;
+using Json.Models;
+
+string Handler(ConsumerRecords records, ILambdaContext context)
+{
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+}
+
+await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaJsonSerializer for Json serialization
+ .Build()
+ .RunAsync();
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/Json.csproj b/examples/Kafka/Json/src/Json.csproj
new file mode 100644
index 000000000..aba6cde89
--- /dev/null
+++ b/examples/Kafka/Json/src/Json.csproj
@@ -0,0 +1,30 @@
+
+
+ Exe
+ net8.0
+ enable
+ enable
+ true
+ Lambda
+
+ true
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+ PreserveNewest
+
+
+
+
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/Models/Address.cs b/examples/Kafka/Json/src/Models/Address.cs
new file mode 100644
index 000000000..a011b3cee
--- /dev/null
+++ b/examples/Kafka/Json/src/Models/Address.cs
@@ -0,0 +1,16 @@
+using System.Text.Json.Serialization;
+
+namespace Json.Models;
+
+public partial class Address
+{
+ [JsonPropertyName("street")] public string Street { get; set; }
+
+ [JsonPropertyName("city")] public string City { get; set; }
+
+ [JsonPropertyName("state")] public string State { get; set; }
+
+ [JsonPropertyName("country")] public string Country { get; set; }
+
+ [JsonPropertyName("zip_code")] public string ZipCode { get; set; }
+}
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/Models/CustomerProfile.cs b/examples/Kafka/Json/src/Models/CustomerProfile.cs
new file mode 100644
index 000000000..1e7ab62b6
--- /dev/null
+++ b/examples/Kafka/Json/src/Models/CustomerProfile.cs
@@ -0,0 +1,22 @@
+using System.Text.Json.Serialization;
+
+namespace Json.Models;
+
+public partial class CustomerProfile
+{
+ [JsonPropertyName("user_id")] public string UserId { get; set; }
+
+ [JsonPropertyName("full_name")] public string FullName { get; set; }
+
+ [JsonPropertyName("email")] public Email Email { get; set; }
+
+ [JsonPropertyName("age")] public long Age { get; set; }
+
+ [JsonPropertyName("address")] public Address Address { get; set; }
+
+ [JsonPropertyName("phone_numbers")] public List PhoneNumbers { get; set; }
+
+ [JsonPropertyName("preferences")] public Preferences Preferences { get; set; }
+
+ [JsonPropertyName("account_status")] public string AccountStatus { get; set; }
+}
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/Models/Email.cs b/examples/Kafka/Json/src/Models/Email.cs
new file mode 100644
index 000000000..045118baf
--- /dev/null
+++ b/examples/Kafka/Json/src/Models/Email.cs
@@ -0,0 +1,12 @@
+using System.Text.Json.Serialization;
+
+namespace Json.Models;
+
+public partial class Email
+{
+ [JsonPropertyName("address")] public string Address { get; set; }
+
+ [JsonPropertyName("verified")] public bool Verified { get; set; }
+
+ [JsonPropertyName("primary")] public bool Primary { get; set; }
+}
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/Models/PhoneNumber.cs b/examples/Kafka/Json/src/Models/PhoneNumber.cs
new file mode 100644
index 000000000..7681265d1
--- /dev/null
+++ b/examples/Kafka/Json/src/Models/PhoneNumber.cs
@@ -0,0 +1,10 @@
+using System.Text.Json.Serialization;
+
+namespace Json.Models;
+
+public partial class PhoneNumber
+{
+ [JsonPropertyName("number")] public string Number { get; set; }
+
+ [JsonPropertyName("type")] public string Type { get; set; }
+}
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/Models/Preferences.cs b/examples/Kafka/Json/src/Models/Preferences.cs
new file mode 100644
index 000000000..5dd84aa99
--- /dev/null
+++ b/examples/Kafka/Json/src/Models/Preferences.cs
@@ -0,0 +1,12 @@
+using System.Text.Json.Serialization;
+
+namespace Json.Models;
+
+public partial class Preferences
+{
+ [JsonPropertyName("language")] public string Language { get; set; }
+
+ [JsonPropertyName("notifications")] public string Notifications { get; set; }
+
+ [JsonPropertyName("timezone")] public string Timezone { get; set; }
+}
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/Readme.md b/examples/Kafka/Json/src/Readme.md
new file mode 100644
index 000000000..4315f2da7
--- /dev/null
+++ b/examples/Kafka/Json/src/Readme.md
@@ -0,0 +1,111 @@
+# AWS Powertools for AWS Lambda .NET - Kafka Json Example
+
+This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics.
+
+## Overview
+
+This example showcases a Lambda functions that consume messages from Kafka topics with Json serialization format.
+
+It uses the `AWS.Lambda.Powertools.Kafka.Json` NuGet package to easily deserialize and process Kafka records.
+
+## Project Structure
+
+```bash
+examples/Kafka/Json/src/
+âââ Function.cs # Entry point for the Lambda function
+âââ aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment
+âââ template.yaml # AWS SAM template for deploying the function
+âââ kafka-json-event.json # Sample Json event to test the function
+```
+
+## Prerequisites
+
+- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later)
+- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html)
+- [AWS CLI](https://aws.amazon.com/cli/)
+- An AWS account with appropriate permissions
+- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from
+- [AWS.Lambda.Powertools.Kafka.Json](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Json/) NuGet package installed in your project
+
+## Installation
+
+1. Clone the repository:
+
+ ```bash
+ git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git
+ ```
+
+2. Navigate to the project directory:
+
+ ```bash
+ cd powertools-lambda-dotnet/examples/Kafka/Json/src
+ ```
+
+3. Build the project:
+
+ ```bash
+ dotnet build
+ ```
+
+## Deployment
+
+Deploy the application using the AWS SAM CLI:
+
+```bash
+sam build
+sam deploy --guided
+```
+
+Follow the prompts to configure your deployment.
+
+
+## Usage Examples
+
+Once deployed, you can test the Lambda function by sending a sample Json event to the configured Kafka topic.
+You can use the `kafka-json-event.json` file as a sample event to test the function.
+
+### Testing
+
+You can test the function locally using the AWS SAM CLI (Requires Docker to be installed):
+
+```bash
+sam local invoke JsonDeserializationFunction --event kafka-json-event.json
+```
+
+This command simulates an invocation of the Lambda function with the provided event data.
+
+## How It Works
+
+1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source.
+2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format.
+3. **Processing**: Each record is processed within the handler function.
+
+## Event Deserialization
+
+Pass the `PowertoolsKafkaJsonSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable JSON deserialization of Kafka records:
+
+```csharp
+await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization
+ .Build()
+ .RunAsync();
+ ```
+
+## Configuration
+
+The SAM template (`template.yaml`) defines three Lambda function:
+
+- **JsonDeserializationFunction**: Handles json-formatted Kafka messages
+
+## Customization
+
+To customize the examples:
+
+1. Modify the schema definitions to match your data structures
+2. Update the handler logic to process the records according to your requirements
+
+## Resources
+
+- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/)
+- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/)
+- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/)
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/aws-lambda-tools-defaults.json b/examples/Kafka/Json/src/aws-lambda-tools-defaults.json
new file mode 100644
index 000000000..fb3240903
--- /dev/null
+++ b/examples/Kafka/Json/src/aws-lambda-tools-defaults.json
@@ -0,0 +1,15 @@
+{
+ "Information": [
+ "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.",
+ "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.",
+ "dotnet lambda help",
+ "All the command line options for the Lambda command can be specified in this file."
+ ],
+ "profile": "",
+ "region": "",
+ "configuration": "Release",
+ "function-runtime": "dotnet8",
+ "function-memory-size": 512,
+ "function-timeout": 30,
+ "function-handler": "Json"
+}
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/kafka-json-event.json b/examples/Kafka/Json/src/kafka-json-event.json
new file mode 100644
index 000000000..66dc2ab5a
--- /dev/null
+++ b/examples/Kafka/Json/src/kafka-json-event.json
@@ -0,0 +1,23 @@
+{
+ "eventSource": "aws:kafka",
+ "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4",
+ "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092",
+ "records": {
+ "customer-topic-0": [
+ {
+ "topic": "customer-topic",
+ "partition": 0,
+ "offset": 15,
+ "timestamp": 1545084650987,
+ "timestampType": "CREATE_TIME",
+ "key": "dXNlcl85NzU0",
+ "value": "eyJwaG9uZV9udW1iZXJzIjpbeyJudW1iZXIiOiIyNDQtNDA3LTg4NzEiLCJ0eXBlIjoiV09SSyJ9XSwicHJlZmVyZW5jZXMiOnsidGltZXpvbmUiOiJlbmFibGVkIiwibGFuZ3VhZ2UiOiJkaXNhYmxlZCIsIm5vdGlmaWNhdGlvbnMiOiJkYXJrIn0sImZ1bGxfbmFtZSI6IlVzZXIgdXNlcl85NzU0IiwiYWRkcmVzcyI6eyJjb3VudHJ5IjoiVVNBIiwiY2l0eSI6IlNhbiBKb3NlIiwic3RyZWV0IjoiOTM0MCBNYWluIFN0Iiwic3RhdGUiOiJDQSIsInppcF9jb2RlIjoiMzk1OTYifSwidXNlcl9pZCI6InVzZXJfOTc1NCIsImFjY291bnRfc3RhdHVzIjoiU1VTUEVOREVEIiwiYWdlIjo1MywiZW1haWwiOnsiYWRkcmVzcyI6InVzZXJfOTc1NEBpY2xvdWQuY29tIiwidmVyaWZpZWQiOmZhbHNlLCJwcmltYXJ5Ijp0cnVlfX0=",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/examples/Kafka/Json/src/template.yaml b/examples/Kafka/Json/src/template.yaml
new file mode 100644
index 000000000..dd4bfb9ff
--- /dev/null
+++ b/examples/Kafka/Json/src/template.yaml
@@ -0,0 +1,27 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: AWS::Serverless-2016-10-31
+Description: >
+ kafka
+
+ Sample SAM Template for kafka
+
+# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst
+Globals:
+ Function:
+ Timeout: 15
+ MemorySize: 512
+ Runtime: dotnet8
+
+Resources:
+ JsonDeserializationFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: Json
+ Architectures:
+ - x86_64
+ Tracing: Active
+ Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables
+ Variables:
+ POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld
+ POWERTOOLS_LOG_LEVEL: Info
+ POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default)
\ No newline at end of file
diff --git a/examples/Kafka/JsonClassLibrary/src/CustomerProfile.proto b/examples/Kafka/JsonClassLibrary/src/CustomerProfile.proto
new file mode 100644
index 000000000..9c69b1c41
--- /dev/null
+++ b/examples/Kafka/JsonClassLibrary/src/CustomerProfile.proto
@@ -0,0 +1,49 @@
+syntax = "proto3";
+
+package com.example;
+
+enum PhoneType {
+ HOME = 0;
+ WORK = 1;
+ MOBILE = 2;
+}
+
+enum AccountStatus {
+ ACTIVE = 0;
+ INACTIVE = 1;
+ SUSPENDED = 2;
+}
+
+// EmailAddress message
+message EmailAddress {
+ string address = 1;
+ bool verified = 2;
+ bool primary = 3;
+}
+
+// Address message
+message Address {
+ string street = 1;
+ string city = 2;
+ string state = 3;
+ string country = 4;
+ string zip_code = 5;
+}
+
+// PhoneNumber message
+message PhoneNumber {
+ string number = 1;
+ PhoneType type = 2;
+}
+
+// CustomerProfile message
+message CustomerProfile {
+ string user_id = 1;
+ string full_name = 2;
+ EmailAddress email = 3;
+ int32 age = 4;
+ Address address = 5;
+ repeated PhoneNumber phone_numbers = 6;
+ map preferences = 7;
+ AccountStatus account_status = 8;
+}
\ No newline at end of file
diff --git a/examples/Kafka/JsonClassLibrary/src/Function.cs b/examples/Kafka/JsonClassLibrary/src/Function.cs
new file mode 100644
index 000000000..98795029e
--- /dev/null
+++ b/examples/Kafka/JsonClassLibrary/src/Function.cs
@@ -0,0 +1,32 @@
+using Amazon.Lambda.Core;
+using AWS.Lambda.Powertools.Kafka;
+using AWS.Lambda.Powertools.Kafka.Protobuf;
+using AWS.Lambda.Powertools.Logging;
+using Com.Example;
+
+// Assembly attribute to enable the Lambda function's JSON input to be converted into a .NET class.
+[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))]
+
+namespace ProtoBufClassLibrary;
+
+public class Function
+{
+ public string FunctionHandler(ConsumerRecords records, ILambdaContext context)
+ {
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Processing messagem from topic: {topic}", record.Topic);
+ Logger.LogInformation("Partition: {partition}, Offset: {offset}", record.Partition, record.Offset);
+ Logger.LogInformation("Produced at: {timestamp}", record.Timestamp);
+
+ foreach (var header in record.Headers.DecodedValues())
+ {
+ Logger.LogInformation($"{header.Key}: {header.Value}");
+ }
+
+ Logger.LogInformation("Processing order for: {fullName}", record.Value.FullName);
+ }
+
+ return "Processed " + records.Count() + " records";
+ }
+}
\ No newline at end of file
diff --git a/examples/Kafka/JsonClassLibrary/src/ProtoBufClassLibrary.csproj b/examples/Kafka/JsonClassLibrary/src/ProtoBufClassLibrary.csproj
new file mode 100644
index 000000000..a28e1a2f8
--- /dev/null
+++ b/examples/Kafka/JsonClassLibrary/src/ProtoBufClassLibrary.csproj
@@ -0,0 +1,42 @@
+
+
+ net8.0
+ enable
+ enable
+ true
+ Lambda
+
+ true
+
+ true
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
+
+
+
+ PreserveNewest
+
+
+
+
+ Client
+ Public
+ True
+ True
+ obj/Debug/net8.0/
+ MSBuild:Compile
+ PreserveNewest
+
+
+
+
\ No newline at end of file
diff --git a/examples/Kafka/JsonClassLibrary/src/Readme.md b/examples/Kafka/JsonClassLibrary/src/Readme.md
new file mode 100644
index 000000000..ae7e610f4
--- /dev/null
+++ b/examples/Kafka/JsonClassLibrary/src/Readme.md
@@ -0,0 +1,130 @@
+# AWS Powertools for AWS Lambda .NET - Kafka Protobuf Example
+
+This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics.
+
+## Overview
+
+This example showcases a Lambda functions that consume messages from Kafka topics with Protocol Buffers serialization format.
+
+It uses the `AWS.Lambda.Powertools.Kafka.Protobuf` NuGet package to easily deserialize and process Kafka records.
+
+## Project Structure
+
+```bash
+examples/Kafka/Protobuf/src/
+âââ Function.cs # Entry point for the Lambda function
+âââ aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment
+âââ template.yaml # AWS SAM template for deploying the function
+âââ CustomerProfile.proto # Protocol Buffers definition file for the data structure used in the Kafka messages
+âââ kafka-protobuf-event.json # Sample Protocol Buffers event to test the function
+```
+
+## Prerequisites
+
+- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later)
+- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html)
+- [AWS CLI](https://aws.amazon.com/cli/)
+- An AWS account with appropriate permissions
+- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from
+- [AWS.Lambda.Powertools.Kafka.Protobuf](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Protobuf/) NuGet package installed in your project
+
+## Installation
+
+1. Clone the repository:
+
+ ```bash
+ git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git
+ ```
+
+2. Navigate to the project directory:
+
+ ```bash
+ cd powertools-lambda-dotnet/examples/Kafka/Protobuf/src
+ ```
+
+3. Build the project:
+
+ ```bash
+ dotnet build
+ ```
+
+## Deployment
+
+Deploy the application using the AWS SAM CLI:
+
+```bash
+sam build
+sam deploy --guided
+```
+
+Follow the prompts to configure your deployment.
+
+## Protocol Buffers Format
+
+The Protobuf example handles messages serialized with Protocol Buffers. The schema is defined in a `.proto` file (which would need to be created), and the C# code is generated from that schema.
+
+This requires the `Grpc.Tools` package to deserialize the messages correctly.
+
+And update the `.csproj` file to include the `.proto` files.
+
+```xml
+
+ Client
+ Public
+ True
+ True
+ obj\Debug/net8.0/
+ MSBuild:Compile
+ PreserveNewest
+
+```
+
+## Usage Examples
+
+Once deployed, you can test the Lambda function by sending a sample Protocol Buffers event to the configured Kafka topic.
+You can use the `kafka-protobuf-event.json` file as a sample event to test the function.
+
+### Testing
+
+You can test the function locally using the AWS SAM CLI (Requires Docker to be installed):
+
+```bash
+sam local invoke ProtobufDeserializationFunction --event kafka-protobuf-event.json
+```
+
+This command simulates an invocation of the Lambda function with the provided event data.
+
+## How It Works
+
+1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source.
+2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format.
+3. **Processing**: Each record is processed within the handler function.
+
+## Event Deserialization
+
+Pass the `PowertoolsKafkaProtobufSerializer` to the `[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))]`:
+
+```csharp
+[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))]
+ ```
+
+## Configuration
+
+The SAM template (`template.yaml`) defines three Lambda function:
+
+- **ProtobufDeserializationFunction**: Handles Protobuf-formatted Kafka messages
+
+## Customization
+
+To customize the examples:
+
+1. Modify the schema definitions to match your data structures
+2. Update the handler logic to process the records according to your requirements
+3. Ensure you have the proper `.proto` files and that they are included in your project for Protocol Buffers serialization/deserialization.
+
+## Resources
+
+- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/)
+- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/)
+- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/)
+- [Protocol Buffers Documentation](https://developers.google.com/protocol-buffers)
\ No newline at end of file
diff --git a/examples/Kafka/JsonClassLibrary/src/aws-lambda-tools-defaults.json b/examples/Kafka/JsonClassLibrary/src/aws-lambda-tools-defaults.json
new file mode 100644
index 000000000..d4ec43f14
--- /dev/null
+++ b/examples/Kafka/JsonClassLibrary/src/aws-lambda-tools-defaults.json
@@ -0,0 +1,16 @@
+{
+ "Information": [
+ "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.",
+ "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.",
+ "dotnet lambda help",
+ "All the command line options for the Lambda command can be specified in this file."
+ ],
+ "profile": "",
+ "region": "",
+ "configuration": "Release",
+ "function-architecture": "x86_64",
+ "function-runtime": "dotnet8",
+ "function-memory-size": 512,
+ "function-timeout": 30,
+ "function-handler": "ProtoBufClassLibrary::ProtoBufClassLibrary.Function::FunctionHandler"
+}
\ No newline at end of file
diff --git a/examples/Kafka/JsonClassLibrary/src/kafka-protobuf-event.json b/examples/Kafka/JsonClassLibrary/src/kafka-protobuf-event.json
new file mode 100644
index 000000000..6731ceb40
--- /dev/null
+++ b/examples/Kafka/JsonClassLibrary/src/kafka-protobuf-event.json
@@ -0,0 +1,23 @@
+{
+ "eventSource": "aws:kafka",
+ "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4",
+ "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092",
+ "records": {
+ "customer-topic-0": [
+ {
+ "topic": "customer-topic",
+ "partition": 0,
+ "offset": 15,
+ "timestamp": 1545084650987,
+ "timestampType": "CREATE_TIME",
+ "key": "dXNlcl85NzU0",
+ "value": "Cgl1c2VyXzk3NTQSDlVzZXIgdXNlcl85NzU0GhgKFHVzZXJfOTc1NEBpY2xvdWQuY29tGAEgNSooCgw5MzQwIE1haW4gU3QSCFNhbiBKb3NlGgJDQSIDVVNBKgUzOTU5NjIQCgwyNDQtNDA3LTg4NzEQAToUCghsYW5ndWFnZRIIZGlzYWJsZWQ6FQoNbm90aWZpY2F0aW9ucxIEZGFyazoTCgh0aW1lem9uZRIHZW5hYmxlZEAC",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/examples/Kafka/JsonClassLibrary/src/template.yaml b/examples/Kafka/JsonClassLibrary/src/template.yaml
new file mode 100644
index 000000000..0df5feaa2
--- /dev/null
+++ b/examples/Kafka/JsonClassLibrary/src/template.yaml
@@ -0,0 +1,27 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: AWS::Serverless-2016-10-31
+Description: >
+ kafka
+
+ Sample SAM Template for kafka
+
+# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst
+Globals:
+ Function:
+ Timeout: 15
+ MemorySize: 512
+ Runtime: dotnet8
+
+Resources:
+ ProtobufClassLibraryDeserializationFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: ProtoBufClassLibrary::ProtoBufClassLibrary.Function::FunctionHandler
+ Architectures:
+ - x86_64
+ Tracing: Active
+ Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables
+ Variables:
+ POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld
+ POWERTOOLS_LOG_LEVEL: Info
+ POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default)
\ No newline at end of file
diff --git a/examples/Kafka/Protobuf/src/CustomerProfile.proto b/examples/Kafka/Protobuf/src/CustomerProfile.proto
new file mode 100644
index 000000000..9c69b1c41
--- /dev/null
+++ b/examples/Kafka/Protobuf/src/CustomerProfile.proto
@@ -0,0 +1,49 @@
+syntax = "proto3";
+
+package com.example;
+
+enum PhoneType {
+ HOME = 0;
+ WORK = 1;
+ MOBILE = 2;
+}
+
+enum AccountStatus {
+ ACTIVE = 0;
+ INACTIVE = 1;
+ SUSPENDED = 2;
+}
+
+// EmailAddress message
+message EmailAddress {
+ string address = 1;
+ bool verified = 2;
+ bool primary = 3;
+}
+
+// Address message
+message Address {
+ string street = 1;
+ string city = 2;
+ string state = 3;
+ string country = 4;
+ string zip_code = 5;
+}
+
+// PhoneNumber message
+message PhoneNumber {
+ string number = 1;
+ PhoneType type = 2;
+}
+
+// CustomerProfile message
+message CustomerProfile {
+ string user_id = 1;
+ string full_name = 2;
+ EmailAddress email = 3;
+ int32 age = 4;
+ Address address = 5;
+ repeated PhoneNumber phone_numbers = 6;
+ map preferences = 7;
+ AccountStatus account_status = 8;
+}
\ No newline at end of file
diff --git a/examples/Kafka/Protobuf/src/Function.cs b/examples/Kafka/Protobuf/src/Function.cs
new file mode 100644
index 000000000..446328696
--- /dev/null
+++ b/examples/Kafka/Protobuf/src/Function.cs
@@ -0,0 +1,22 @@
+using Amazon.Lambda.Core;
+using Amazon.Lambda.RuntimeSupport;
+using AWS.Lambda.Powertools.Kafka;
+using AWS.Lambda.Powertools.Kafka.Protobuf;
+using AWS.Lambda.Powertools.Logging;
+using Com.Example;
+
+string Handler(ConsumerRecords records, ILambdaContext context)
+{
+ foreach (var record in records)
+ {
+ Logger.LogInformation("Record Value: {@record}", record.Value);
+ }
+
+ return "Processed " + records.Count() + " records";
+}
+
+await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization
+ .Build()
+ .RunAsync();
+
diff --git a/examples/Kafka/Protobuf/src/Protobuf.csproj b/examples/Kafka/Protobuf/src/Protobuf.csproj
new file mode 100644
index 000000000..858ccfb49
--- /dev/null
+++ b/examples/Kafka/Protobuf/src/Protobuf.csproj
@@ -0,0 +1,44 @@
+
+
+ Exe
+ net8.0
+ enable
+ enable
+ true
+ Lambda
+
+ true
+
+ true
+
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
+ PreserveNewest
+
+
+
+
+ Client
+ Public
+ True
+
+ True
+ obj\Debug/net8.0/
+ MSBuild:Compile
+ PreserveNewest
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/Kafka/Protobuf/src/Readme.md b/examples/Kafka/Protobuf/src/Readme.md
new file mode 100644
index 000000000..886bbffa1
--- /dev/null
+++ b/examples/Kafka/Protobuf/src/Readme.md
@@ -0,0 +1,133 @@
+# AWS Powertools for AWS Lambda .NET - Kafka Protobuf Example
+
+This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics.
+
+## Overview
+
+This example showcases a Lambda functions that consume messages from Kafka topics with Protocol Buffers serialization format.
+
+It uses the `AWS.Lambda.Powertools.Kafka.Protobuf` NuGet package to easily deserialize and process Kafka records.
+
+## Project Structure
+
+```bash
+examples/Kafka/Protobuf/src/
+âââ Function.cs # Entry point for the Lambda function
+âââ aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment
+âââ template.yaml # AWS SAM template for deploying the function
+âââ CustomerProfile.proto # Protocol Buffers definition file for the data structure used in the Kafka messages
+âââ kafka-protobuf-event.json # Sample Protocol Buffers event to test the function
+```
+
+## Prerequisites
+
+- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later)
+- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html)
+- [AWS CLI](https://aws.amazon.com/cli/)
+- An AWS account with appropriate permissions
+- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from
+- [AWS.Lambda.Powertools.Kafka.Protobuf](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Protobuf/) NuGet package installed in your project
+
+## Installation
+
+1. Clone the repository:
+
+ ```bash
+ git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git
+ ```
+
+2. Navigate to the project directory:
+
+ ```bash
+ cd powertools-lambda-dotnet/examples/Kafka/Protobuf/src
+ ```
+
+3. Build the project:
+
+ ```bash
+ dotnet build
+ ```
+
+## Deployment
+
+Deploy the application using the AWS SAM CLI:
+
+```bash
+sam build
+sam deploy --guided
+```
+
+Follow the prompts to configure your deployment.
+
+## Protocol Buffers Format
+
+The Protobuf example handles messages serialized with Protocol Buffers. The schema is defined in a `.proto` file (which would need to be created), and the C# code is generated from that schema.
+
+This requires the `Grpc.Tools` package to deserialize the messages correctly.
+
+And update the `.csproj` file to include the `.proto` files.
+
+```xml
+
+ Client
+ Public
+ True
+ True
+ obj\Debug/net8.0/
+ MSBuild:Compile
+ PreserveNewest
+
+```
+
+## Usage Examples
+
+Once deployed, you can test the Lambda function by sending a sample Protocol Buffers event to the configured Kafka topic.
+You can use the `kafka-protobuf-event.json` file as a sample event to test the function.
+
+### Testing
+
+You can test the function locally using the AWS SAM CLI (Requires Docker to be installed):
+
+```bash
+sam local invoke ProtobufDeserializationFunction --event kafka-protobuf-event.json
+```
+
+This command simulates an invocation of the Lambda function with the provided event data.
+
+## How It Works
+
+1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source.
+2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format.
+3. **Processing**: Each record is processed within the handler function.
+
+## Event Deserialization
+
+Pass the `PowertoolsKafkaProtobufSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable Protobuf deserialization of Kafka records:
+
+```csharp
+await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler,
+ new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization
+ .Build()
+ .RunAsync();
+ ```
+
+## Configuration
+
+The SAM template (`template.yaml`) defines three Lambda function:
+
+- **ProtobufDeserializationFunction**: Handles Protobuf-formatted Kafka messages
+
+## Customization
+
+To customize the examples:
+
+1. Modify the schema definitions to match your data structures
+2. Update the handler logic to process the records according to your requirements
+3. Ensure you have the proper `.proto` files and that they are included in your project for Protocol Buffers serialization/deserialization.
+
+## Resources
+
+- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/)
+- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/)
+- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/)
+- [Protocol Buffers Documentation](https://developers.google.com/protocol-buffers)
\ No newline at end of file
diff --git a/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json b/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json
new file mode 100644
index 000000000..1a1c5de1d
--- /dev/null
+++ b/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json
@@ -0,0 +1,15 @@
+{
+ "Information": [
+ "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.",
+ "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.",
+ "dotnet lambda help",
+ "All the command line options for the Lambda command can be specified in this file."
+ ],
+ "profile": "",
+ "region": "",
+ "configuration": "Release",
+ "function-runtime": "dotnet8",
+ "function-memory-size": 512,
+ "function-timeout": 30,
+ "function-handler": "Protobuf"
+}
\ No newline at end of file
diff --git a/examples/Kafka/Protobuf/src/kafka-protobuf-event.json b/examples/Kafka/Protobuf/src/kafka-protobuf-event.json
new file mode 100644
index 000000000..6731ceb40
--- /dev/null
+++ b/examples/Kafka/Protobuf/src/kafka-protobuf-event.json
@@ -0,0 +1,23 @@
+{
+ "eventSource": "aws:kafka",
+ "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4",
+ "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092",
+ "records": {
+ "customer-topic-0": [
+ {
+ "topic": "customer-topic",
+ "partition": 0,
+ "offset": 15,
+ "timestamp": 1545084650987,
+ "timestampType": "CREATE_TIME",
+ "key": "dXNlcl85NzU0",
+ "value": "Cgl1c2VyXzk3NTQSDlVzZXIgdXNlcl85NzU0GhgKFHVzZXJfOTc1NEBpY2xvdWQuY29tGAEgNSooCgw5MzQwIE1haW4gU3QSCFNhbiBKb3NlGgJDQSIDVVNBKgUzOTU5NjIQCgwyNDQtNDA3LTg4NzEQAToUCghsYW5ndWFnZRIIZGlzYWJsZWQ6FQoNbm90aWZpY2F0aW9ucxIEZGFyazoTCgh0aW1lem9uZRIHZW5hYmxlZEAC",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/examples/Kafka/Protobuf/src/template.yaml b/examples/Kafka/Protobuf/src/template.yaml
new file mode 100644
index 000000000..b8f7df6a5
--- /dev/null
+++ b/examples/Kafka/Protobuf/src/template.yaml
@@ -0,0 +1,27 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: AWS::Serverless-2016-10-31
+Description: >
+ kafka
+
+ Sample SAM Template for kafka
+
+# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst
+Globals:
+ Function:
+ Timeout: 15
+ MemorySize: 512
+ Runtime: dotnet8
+
+Resources:
+ ProtobufDeserializationFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: Protobuf
+ Architectures:
+ - x86_64
+ Tracing: Active
+ Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables
+ Variables:
+ POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld
+ POWERTOOLS_LOG_LEVEL: Info
+ POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default)
\ No newline at end of file
diff --git a/examples/Logging/src/HelloWorld/HelloWorld.csproj b/examples/Logging/src/HelloWorld/HelloWorld.csproj
index 21b606a68..36e8ed0df 100644
--- a/examples/Logging/src/HelloWorld/HelloWorld.csproj
+++ b/examples/Logging/src/HelloWorld/HelloWorld.csproj
@@ -5,9 +5,9 @@
enable
-
+
-
+
diff --git a/examples/Logging/test/HelloWorld.Test/HelloWorld.Tests.csproj b/examples/Logging/test/HelloWorld.Test/HelloWorld.Tests.csproj
index 446d7f284..14917e4cb 100644
--- a/examples/Logging/test/HelloWorld.Test/HelloWorld.Tests.csproj
+++ b/examples/Logging/test/HelloWorld.Test/HelloWorld.Tests.csproj
@@ -3,9 +3,9 @@
net6.0;net8.0
-
+
-
+
diff --git a/examples/Metrics/src/HelloWorld/HelloWorld.csproj b/examples/Metrics/src/HelloWorld/HelloWorld.csproj
index b914377b9..dc82111b9 100644
--- a/examples/Metrics/src/HelloWorld/HelloWorld.csproj
+++ b/examples/Metrics/src/HelloWorld/HelloWorld.csproj
@@ -5,9 +5,9 @@
enable
-
+
-
+
diff --git a/examples/Metrics/test/HelloWorld.Test/HelloWorld.Tests.csproj b/examples/Metrics/test/HelloWorld.Test/HelloWorld.Tests.csproj
index 446d7f284..14917e4cb 100644
--- a/examples/Metrics/test/HelloWorld.Test/HelloWorld.Tests.csproj
+++ b/examples/Metrics/test/HelloWorld.Test/HelloWorld.Tests.csproj
@@ -3,9 +3,9 @@
net6.0;net8.0
-
+
-
+
diff --git a/examples/Parameters/cfn/HelloWorld.Cfn/HelloWorld.Cfn.csproj b/examples/Parameters/cfn/HelloWorld.Cfn/HelloWorld.Cfn.csproj
index 713914f28..cf97597d8 100644
--- a/examples/Parameters/cfn/HelloWorld.Cfn/HelloWorld.Cfn.csproj
+++ b/examples/Parameters/cfn/HelloWorld.Cfn/HelloWorld.Cfn.csproj
@@ -6,8 +6,8 @@
HelloWorld.Cfn
-
-
+
+
diff --git a/examples/Parameters/src/HelloWorld/HelloWorld.csproj b/examples/Parameters/src/HelloWorld/HelloWorld.csproj
index 6b29f4253..99b13a66e 100644
--- a/examples/Parameters/src/HelloWorld/HelloWorld.csproj
+++ b/examples/Parameters/src/HelloWorld/HelloWorld.csproj
@@ -5,9 +5,9 @@
enable
-
+
-
+
diff --git a/examples/Parameters/test/HelloWorld.Test/HelloWorld.Tests.csproj b/examples/Parameters/test/HelloWorld.Test/HelloWorld.Tests.csproj
index 9b17d57f0..589c8306c 100644
--- a/examples/Parameters/test/HelloWorld.Test/HelloWorld.Tests.csproj
+++ b/examples/Parameters/test/HelloWorld.Test/HelloWorld.Tests.csproj
@@ -3,9 +3,9 @@
net6.0;net8.0
-
+
-
+
diff --git a/examples/ServerlessApi/test/LambdaPowertoolsAPI.Tests/LambdaPowertoolsAPI.Tests.csproj b/examples/ServerlessApi/test/LambdaPowertoolsAPI.Tests/LambdaPowertoolsAPI.Tests.csproj
index d9cdaef49..edfda0a56 100644
--- a/examples/ServerlessApi/test/LambdaPowertoolsAPI.Tests/LambdaPowertoolsAPI.Tests.csproj
+++ b/examples/ServerlessApi/test/LambdaPowertoolsAPI.Tests/LambdaPowertoolsAPI.Tests.csproj
@@ -16,7 +16,7 @@
-
+
diff --git a/examples/Tracing/src/HelloWorld/HelloWorld.csproj b/examples/Tracing/src/HelloWorld/HelloWorld.csproj
index af0b24353..f6c4873c8 100644
--- a/examples/Tracing/src/HelloWorld/HelloWorld.csproj
+++ b/examples/Tracing/src/HelloWorld/HelloWorld.csproj
@@ -5,9 +5,9 @@
enable
-
+
-
+
diff --git a/examples/Tracing/test/HelloWorld.Test/HelloWorld.Tests.csproj b/examples/Tracing/test/HelloWorld.Test/HelloWorld.Tests.csproj
index 446d7f284..14917e4cb 100644
--- a/examples/Tracing/test/HelloWorld.Test/HelloWorld.Tests.csproj
+++ b/examples/Tracing/test/HelloWorld.Test/HelloWorld.Tests.csproj
@@ -3,9 +3,9 @@
net6.0;net8.0
-
+
-
+
diff --git a/examples/examples.sln b/examples/examples.sln
index 10ec48509..6b9fa877a 100644
--- a/examples/examples.sln
+++ b/examples/examples.sln
@@ -109,6 +109,16 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AOT_Logging", "AOT\AOT_Logg
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AOT_Logging.Tests", "AOT\AOT_Logging\test\AOT_Logging.Tests\AOT_Logging.Tests.csproj", "{FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}"
EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Kafka", "Kafka", "{71027B81-CA39-498C-9A50-ADDAFA2AC2F5}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Json", "Kafka\Json\src\Json.csproj", "{58EC305E-353A-4996-A541-3CF7FC0EDD80}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Protobuf", "Kafka\Protobuf\src\Protobuf.csproj", "{853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Avro", "Kafka\Avro\src\Avro.csproj", "{B03F22B2-315C-429B-9CC0-C15BE94CBF77}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProtoBufClassLibrary", "Kafka\JsonClassLibrary\src\ProtoBufClassLibrary.csproj", "{B6B3136D-B739-4917-AD3D-30F19FE12D3F}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -202,6 +212,22 @@ Global
{FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Release|Any CPU.Build.0 = Release|Any CPU
+ {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Release|Any CPU.Build.0 = Release|Any CPU
+ {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Release|Any CPU.Build.0 = Release|Any CPU
+ {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Release|Any CPU.Build.0 = Release|Any CPU
+ {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{0CC66DBC-C1DF-4AF6-8EEB-FFED6C578BF4} = {526F1EF7-5A9C-4BFF-ABAE-75992ACD8F78}
@@ -249,5 +275,9 @@ Global
{343CF6B9-C006-43F8-924C-BF5BF5B6D051} = {FE1CAA26-87E9-4B71-800E-81D2997A7B53}
{FC02CF45-DE15-4413-958A-D86808B99146} = {FEE72EAB-494F-403B-A75A-825E713C3D43}
{FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5} = {F3480212-EE7F-46FE-9ED5-24ACAB5B681D}
+ {58EC305E-353A-4996-A541-3CF7FC0EDD80} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5}
+ {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5}
+ {B03F22B2-315C-429B-9CC0-C15BE94CBF77} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5}
+ {B6B3136D-B739-4917-AD3D-30F19FE12D3F} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5}
EndGlobalSection
EndGlobal
diff --git a/libraries/AWS.Lambda.Powertools.sln b/libraries/AWS.Lambda.Powertools.sln
index c0dc580fb..325c683e0 100644
--- a/libraries/AWS.Lambda.Powertools.sln
+++ b/libraries/AWS.Lambda.Powertools.sln
@@ -103,6 +103,26 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Metri
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Metrics", "Metrics", "{A566F2D7-F8FE-466A-8306-85F266B7E656}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AOT-Function-ILogger", "tests\e2e\functions\core\logging\AOT-Function-ILogger\src\AOT-Function-ILogger\AOT-Function-ILogger.csproj", "{7FC6DD65-0352-4139-8D08-B25C0A0403E3}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.EventHandler.Tests", "tests\AWS.Lambda.Powertools.EventHandler.Tests\AWS.Lambda.Powertools.EventHandler.Tests.csproj", "{61374D8E-F77C-4A31-AE07-35DAF1847369}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.EventHandler", "src\AWS.Lambda.Powertools.EventHandler\AWS.Lambda.Powertools.EventHandler.csproj", "{F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction", "src\AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction\AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.csproj", "{281F7EB5-ACE5-458F-BC88-46A8899DF3BA}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore", "src\AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore\AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore.csproj", "{8A22F22E-D10A-4897-A89A-DC76C267F6BB}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka", "src\AWS.Lambda.Powertools.Kafka\AWS.Lambda.Powertools.Kafka.csproj", "{5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Tests", "tests\AWS.Lambda.Powertools.Kafka.Tests\AWS.Lambda.Powertools.Kafka.Tests.csproj", "{FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Avro", "src\AWS.Lambda.Powertools.Kafka.Avro\AWS.Lambda.Powertools.Kafka.Avro.csproj", "{25F0929B-2E04-4ED6-A0ED-5379A0A755B0}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Json", "src\AWS.Lambda.Powertools.Kafka.Json\AWS.Lambda.Powertools.Kafka.Json.csproj", "{9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Protobuf", "src\AWS.Lambda.Powertools.Kafka.Protobuf\AWS.Lambda.Powertools.Kafka.Protobuf.csproj", "{B640DB80-C982-407B-A2EC-CD29AC77DDB8}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -548,6 +568,126 @@ Global
{F8F80477-1EAD-4C5C-A329-CBC0A60C7CAB}.Release|x64.Build.0 = Release|Any CPU
{F8F80477-1EAD-4C5C-A329-CBC0A60C7CAB}.Release|x86.ActiveCfg = Release|Any CPU
{F8F80477-1EAD-4C5C-A329-CBC0A60C7CAB}.Release|x86.Build.0 = Release|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Debug|x64.Build.0 = Debug|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Debug|x86.Build.0 = Debug|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Release|Any CPU.Build.0 = Release|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Release|x64.ActiveCfg = Release|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Release|x64.Build.0 = Release|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Release|x86.ActiveCfg = Release|Any CPU
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3}.Release|x86.Build.0 = Release|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Debug|x64.Build.0 = Debug|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Debug|x86.Build.0 = Debug|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Release|Any CPU.Build.0 = Release|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Release|x64.ActiveCfg = Release|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Release|x64.Build.0 = Release|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Release|x86.ActiveCfg = Release|Any CPU
+ {61374D8E-F77C-4A31-AE07-35DAF1847369}.Release|x86.Build.0 = Release|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Debug|x64.Build.0 = Debug|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Debug|x86.Build.0 = Debug|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Release|Any CPU.Build.0 = Release|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Release|x64.ActiveCfg = Release|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Release|x64.Build.0 = Release|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Release|x86.ActiveCfg = Release|Any CPU
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE}.Release|x86.Build.0 = Release|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Debug|x64.Build.0 = Debug|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Debug|x86.Build.0 = Debug|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Release|Any CPU.Build.0 = Release|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Release|x64.ActiveCfg = Release|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Release|x64.Build.0 = Release|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Release|x86.ActiveCfg = Release|Any CPU
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA}.Release|x86.Build.0 = Release|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Debug|x64.Build.0 = Debug|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Debug|x86.Build.0 = Debug|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|Any CPU.Build.0 = Release|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|x64.ActiveCfg = Release|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|x64.Build.0 = Release|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|x86.ActiveCfg = Release|Any CPU
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|x86.Build.0 = Release|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x64.Build.0 = Debug|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x86.Build.0 = Debug|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|Any CPU.Build.0 = Release|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x64.ActiveCfg = Release|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x64.Build.0 = Release|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x86.ActiveCfg = Release|Any CPU
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x86.Build.0 = Release|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x64.Build.0 = Debug|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x86.Build.0 = Debug|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|Any CPU.Build.0 = Release|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x64.ActiveCfg = Release|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x64.Build.0 = Release|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x86.ActiveCfg = Release|Any CPU
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x86.Build.0 = Release|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x64.Build.0 = Debug|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x86.Build.0 = Debug|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|Any CPU.Build.0 = Release|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x64.ActiveCfg = Release|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x64.Build.0 = Release|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x86.ActiveCfg = Release|Any CPU
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x86.Build.0 = Release|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x64.Build.0 = Debug|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x86.Build.0 = Debug|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|Any CPU.Build.0 = Release|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x64.ActiveCfg = Release|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x64.Build.0 = Release|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x86.ActiveCfg = Release|Any CPU
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x86.Build.0 = Release|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x64.Build.0 = Debug|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x86.Build.0 = Debug|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|Any CPU.Build.0 = Release|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x64.ActiveCfg = Release|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x64.Build.0 = Release|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x86.ActiveCfg = Release|Any CPU
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
@@ -596,5 +736,15 @@ Global
{A566F2D7-F8FE-466A-8306-85F266B7E656} = {1CFF5568-8486-475F-81F6-06105C437528}
{F8F80477-1EAD-4C5C-A329-CBC0A60C7CAB} = {A566F2D7-F8FE-466A-8306-85F266B7E656}
{A422C742-2CF9-409D-BDAE-15825AB62113} = {A566F2D7-F8FE-466A-8306-85F266B7E656}
+ {7FC6DD65-0352-4139-8D08-B25C0A0403E3} = {4EAB66F9-C9CB-4E8A-BEE6-A14CD7FDE02F}
+ {61374D8E-F77C-4A31-AE07-35DAF1847369} = {1CFF5568-8486-475F-81F6-06105C437528}
+ {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5}
+ {281F7EB5-ACE5-458F-BC88-46A8899DF3BA} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5}
+ {8A22F22E-D10A-4897-A89A-DC76C267F6BB} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5}
+ {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5}
+ {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645} = {1CFF5568-8486-475F-81F6-06105C437528}
+ {25F0929B-2E04-4ED6-A0ED-5379A0A755B0} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5}
+ {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5}
+ {B640DB80-C982-407B-A2EC-CD29AC77DDB8} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5}
EndGlobalSection
EndGlobal
diff --git a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/BatchProcessor.cs b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/BatchProcessor.cs
index ba3c5f3fc..6afeebfa6 100644
--- a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/BatchProcessor.cs
+++ b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/BatchProcessor.cs
@@ -1,19 +1,4 @@
-īģŋ/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
-using System;
+īģŋusing System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
diff --git a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/BatchProcessorAttribute.cs b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/BatchProcessorAttribute.cs
index d693d4ec7..f2782e980 100644
--- a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/BatchProcessorAttribute.cs
+++ b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/BatchProcessorAttribute.cs
@@ -15,6 +15,7 @@
using System;
using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
@@ -141,21 +142,25 @@ public class BatchProcessorAttribute : UniversalWrapperAttribute
///
/// Type of batch processor.
///
+ [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
public Type BatchProcessor { get; set; }
///
/// Type of batch processor provider.
///
+ [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
public Type BatchProcessorProvider { get; set; }
///
/// Type of record handler.
///
+ [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
public Type RecordHandler { get; set; }
///
/// Type of record handler provider.
///
+ [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
public Type RecordHandlerProvider { get; set; }
///
diff --git a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/DynamoDb/IDynamoDbStreamRecordHandler.cs b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/DynamoDb/IDynamoDbStreamRecordHandler.cs
index ed24545d4..1d910daaa 100644
--- a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/DynamoDb/IDynamoDbStreamRecordHandler.cs
+++ b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/DynamoDb/IDynamoDbStreamRecordHandler.cs
@@ -1,18 +1,3 @@
-/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
using Amazon.Lambda.DynamoDBEvents;
namespace AWS.Lambda.Powertools.BatchProcessing.DynamoDb;
diff --git a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/IKinesisEventBatchProcessor.cs b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/IKinesisEventBatchProcessor.cs
index 097a1fcaf..d911e6e37 100644
--- a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/IKinesisEventBatchProcessor.cs
+++ b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/IKinesisEventBatchProcessor.cs
@@ -1,19 +1,4 @@
-īģŋ/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
-using Amazon.Lambda.KinesisEvents;
+īģŋusing Amazon.Lambda.KinesisEvents;
namespace AWS.Lambda.Powertools.BatchProcessing.Kinesis;
diff --git a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/IKinesisEventRecordHandler.cs b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/IKinesisEventRecordHandler.cs
index 5a21afc43..37def333d 100644
--- a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/IKinesisEventRecordHandler.cs
+++ b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/IKinesisEventRecordHandler.cs
@@ -1,18 +1,3 @@
-/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
using Amazon.Lambda.KinesisEvents;
namespace AWS.Lambda.Powertools.BatchProcessing.Kinesis;
diff --git a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/KinesisEventBatchProcessor.cs b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/KinesisEventBatchProcessor.cs
index 1ea01041c..6c3323080 100644
--- a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/KinesisEventBatchProcessor.cs
+++ b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Kinesis/KinesisEventBatchProcessor.cs
@@ -1,19 +1,4 @@
-īģŋ/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
-using System.Collections.Generic;
+īģŋusing System.Collections.Generic;
using Amazon.Lambda.KinesisEvents;
using AWS.Lambda.Powertools.Common;
diff --git a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/ISqsBatchProcessor.cs b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/ISqsBatchProcessor.cs
index 72e933af1..8fb6021be 100644
--- a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/ISqsBatchProcessor.cs
+++ b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/ISqsBatchProcessor.cs
@@ -1,19 +1,4 @@
-īģŋ/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
-using Amazon.Lambda.SQSEvents;
+īģŋusing Amazon.Lambda.SQSEvents;
namespace AWS.Lambda.Powertools.BatchProcessing.Sqs;
diff --git a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/ISqsRecordHandler.cs b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/ISqsRecordHandler.cs
index 67213a158..232c7ff84 100644
--- a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/ISqsRecordHandler.cs
+++ b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/ISqsRecordHandler.cs
@@ -1,18 +1,3 @@
-/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
using Amazon.Lambda.SQSEvents;
namespace AWS.Lambda.Powertools.BatchProcessing.Sqs;
diff --git a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/SqsBatchProcessor.cs b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/SqsBatchProcessor.cs
index bf191f9c9..f7741e522 100644
--- a/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/SqsBatchProcessor.cs
+++ b/libraries/src/AWS.Lambda.Powertools.BatchProcessing/Sqs/SqsBatchProcessor.cs
@@ -1,19 +1,4 @@
-īģŋ/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
-using System;
+īģŋusing System;
using System.Collections.Generic;
using System.Linq;
using Amazon.Lambda.SQSEvents;
diff --git a/libraries/src/AWS.Lambda.Powertools.Common/Aspects/UniversalWrapperAspect.cs b/libraries/src/AWS.Lambda.Powertools.Common/Aspects/UniversalWrapperAspect.cs
index c4b014682..0656a8bd3 100644
--- a/libraries/src/AWS.Lambda.Powertools.Common/Aspects/UniversalWrapperAspect.cs
+++ b/libraries/src/AWS.Lambda.Powertools.Common/Aspects/UniversalWrapperAspect.cs
@@ -1,18 +1,3 @@
-/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
diff --git a/libraries/src/AWS.Lambda.Powertools.Common/Core/ConsoleWrapper.cs b/libraries/src/AWS.Lambda.Powertools.Common/Core/ConsoleWrapper.cs
index 873211404..4a124d94d 100644
--- a/libraries/src/AWS.Lambda.Powertools.Common/Core/ConsoleWrapper.cs
+++ b/libraries/src/AWS.Lambda.Powertools.Common/Core/ConsoleWrapper.cs
@@ -1,31 +1,160 @@
-/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
using System;
+using System.IO;
namespace AWS.Lambda.Powertools.Common;
///
public class ConsoleWrapper : IConsoleWrapper
{
+ private static bool _override;
+ private static TextWriter _testOutputStream;
+ private static bool _inTestMode = false;
+
///
- public void WriteLine(string message) => Console.WriteLine(message);
- ///
- public void Debug(string message) => System.Diagnostics.Debug.WriteLine(message);
+ public void WriteLine(string message)
+ {
+ if (_inTestMode && _testOutputStream != null)
+ {
+ _testOutputStream.WriteLine(message);
+ }
+ else
+ {
+ EnsureConsoleOutput();
+ Console.WriteLine(message);
+ }
+ }
+
///
- public void Error(string message) => Console.Error.WriteLine(message);
+ public void Debug(string message)
+ {
+ if (_inTestMode && _testOutputStream != null)
+ {
+ _testOutputStream.WriteLine(message);
+ }
+ else
+ {
+ EnsureConsoleOutput();
+ System.Diagnostics.Debug.WriteLine(message);
+ }
+ }
+
///
- public string ReadLine() => Console.ReadLine();
+ public void Error(string message)
+ {
+ if (_inTestMode && _testOutputStream != null)
+ {
+ _testOutputStream.WriteLine(message);
+ }
+ else
+ {
+ if (!_override)
+ {
+ var errorOutput = new StreamWriter(Console.OpenStandardError());
+ errorOutput.AutoFlush = true;
+ Console.SetError(errorOutput);
+ }
+ Console.Error.WriteLine(message);
+ }
+ }
+
+ ///
+ /// Set the ConsoleWrapper to use a different TextWriter
+ /// This is useful for unit tests where you want to capture the output
+ ///
+ public static void SetOut(TextWriter consoleOut)
+ {
+ _testOutputStream = consoleOut;
+ _inTestMode = true;
+ _override = true;
+ Console.SetOut(consoleOut);
+ }
+
+ private static void EnsureConsoleOutput()
+ {
+ // Check if we need to override console output for Lambda environment
+ if (ShouldOverrideConsole())
+ {
+ OverrideLambdaLogger();
+ }
+ }
+
+ private static bool ShouldOverrideConsole()
+ {
+ // Don't override if we're in test mode
+ if (_inTestMode) return false;
+
+ // Always override in Lambda environment to prevent Lambda's log wrapping
+ var isLambda = !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("AWS_LAMBDA_FUNCTION_NAME"));
+
+ return isLambda && (!_override || HasLambdaReInterceptedConsole());
+ }
+
+ internal static bool HasLambdaReInterceptedConsole()
+ {
+ return HasLambdaReInterceptedConsole(() => Console.Out);
+ }
+
+ internal static bool HasLambdaReInterceptedConsole(Func consoleOutAccessor)
+ {
+ // Lambda might re-intercept console between init and handler execution
+ try
+ {
+ var currentOut = consoleOutAccessor();
+ // Check if current output stream looks like it might be Lambda's wrapper
+ var typeName = currentOut.GetType().FullName ?? "";
+ return typeName.Contains("Lambda") || typeName == "System.IO.TextWriter+SyncTextWriter";
+ }
+ catch
+ {
+ return true; // Assume re-interception if we can't determine
+ }
+ }
+
+ internal static void OverrideLambdaLogger()
+ {
+ OverrideLambdaLogger(() => Console.OpenStandardOutput());
+ }
+
+ internal static void OverrideLambdaLogger(Func standardOutputOpener)
+ {
+ try
+ {
+ // Force override of LambdaLogger
+ var standardOutput = new StreamWriter(standardOutputOpener())
+ {
+ AutoFlush = true
+ };
+ Console.SetOut(standardOutput);
+ _override = true;
+ }
+ catch (Exception)
+ {
+ // Log the failure but don't throw - degraded functionality is better than crash
+ _override = false;
+ }
+ }
+
+ internal static void WriteLine(string logLevel, string message)
+ {
+ Console.WriteLine($"{DateTime.UtcNow:yyyy-MM-ddTHH:mm:ss.fffZ}\t{logLevel}\t{message}");
+ }
+
+ ///
+ /// Reset the ConsoleWrapper to its original state
+ ///
+ public static void ResetForTest()
+ {
+ _override = false;
+ _inTestMode = false;
+ _testOutputStream = null;
+ }
+
+ ///
+ /// Clear the output reset flag
+ ///
+ public static void ClearOutputResetFlag()
+ {
+ // This method is kept for backward compatibility but no longer needed
+ // since we removed the _outputResetPerformed flag
+ }
}
\ No newline at end of file
diff --git a/libraries/src/AWS.Lambda.Powertools.Common/Core/IConsoleWrapper.cs b/libraries/src/AWS.Lambda.Powertools.Common/Core/IConsoleWrapper.cs
index de75020ea..9c4f1db14 100644
--- a/libraries/src/AWS.Lambda.Powertools.Common/Core/IConsoleWrapper.cs
+++ b/libraries/src/AWS.Lambda.Powertools.Common/Core/IConsoleWrapper.cs
@@ -1,18 +1,3 @@
-/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
namespace AWS.Lambda.Powertools.Common;
///
@@ -37,10 +22,4 @@ public interface IConsoleWrapper
///
/// The error message to write.
void Error(string message);
-
- ///
- /// Reads the next line of characters from the standard input stream.
- ///
- /// The next line of characters from the input stream, or null if no more lines are available.
- string ReadLine();
}
\ No newline at end of file
diff --git a/libraries/src/AWS.Lambda.Powertools.Common/Core/IPowertoolsEnvironment.cs b/libraries/src/AWS.Lambda.Powertools.Common/Core/IPowertoolsEnvironment.cs
index 059cfb7e0..6f57aabb3 100644
--- a/libraries/src/AWS.Lambda.Powertools.Common/Core/IPowertoolsEnvironment.cs
+++ b/libraries/src/AWS.Lambda.Powertools.Common/Core/IPowertoolsEnvironment.cs
@@ -34,4 +34,10 @@ public interface IPowertoolsEnvironment
///
/// Assembly Version in the Major.Minor.Build format
string GetAssemblyVersion(T type);
+
+ ///
+ /// Sets the execution Environment Variable (AWS_EXECUTION_ENV)
+ ///
+ ///
+ void SetExecutionEnvironment(T type);
}
\ No newline at end of file
diff --git a/libraries/src/AWS.Lambda.Powertools.Common/Core/ISystemWrapper.cs b/libraries/src/AWS.Lambda.Powertools.Common/Core/ISystemWrapper.cs
deleted file mode 100644
index a873dcfbd..000000000
--- a/libraries/src/AWS.Lambda.Powertools.Common/Core/ISystemWrapper.cs
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
-using System.IO;
-
-namespace AWS.Lambda.Powertools.Common;
-
-///
-/// Interface ISystemWrapper
-///
-public interface ISystemWrapper
-{
- ///
- /// Gets the environment variable.
- ///
- /// The variable.
- /// System.String.
- string GetEnvironmentVariable(string variable);
-
- ///
- /// Logs the specified value.
- ///
- /// The value.
- void Log(string value);
-
- ///
- /// Logs the line.
- ///
- /// The value.
- void LogLine(string value);
-
- ///
- /// Gets random number
- ///
- /// System.Double.
- double GetRandom();
-
- ///
- /// Sets the environment variable.
- ///
- /// The variable.
- ///
- void SetEnvironmentVariable(string variable, string value);
-
- ///
- /// Sets the execution Environment Variable (AWS_EXECUTION_ENV)
- ///
- ///
- void SetExecutionEnvironment(T type);
-}
\ No newline at end of file
diff --git a/libraries/src/AWS.Lambda.Powertools.Common/Core/PowertoolsConfigurations.cs b/libraries/src/AWS.Lambda.Powertools.Common/Core/PowertoolsConfigurations.cs
index e57bb42ee..e6b6f6446 100644
--- a/libraries/src/AWS.Lambda.Powertools.Common/Core/PowertoolsConfigurations.cs
+++ b/libraries/src/AWS.Lambda.Powertools.Common/Core/PowertoolsConfigurations.cs
@@ -1,18 +1,3 @@
-/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
using System.Globalization;
using AWS.Lambda.Powertools.Common.Core;
@@ -25,6 +10,8 @@ namespace AWS.Lambda.Powertools.Common;
///
public class PowertoolsConfigurations : IPowertoolsConfigurations
{
+ private readonly IPowertoolsEnvironment _powertoolsEnvironment;
+
///
/// The maximum dimensions
///
@@ -40,18 +27,13 @@ public class PowertoolsConfigurations : IPowertoolsConfigurations
///
private static IPowertoolsConfigurations _instance;
- ///
- /// The system wrapper
- ///
- private readonly ISystemWrapper _systemWrapper;
-
///
/// Initializes a new instance of the class.
///
- /// The system wrapper.
- internal PowertoolsConfigurations(ISystemWrapper systemWrapper)
+ ///
+ internal PowertoolsConfigurations(IPowertoolsEnvironment powertoolsEnvironment)
{
- _systemWrapper = systemWrapper;
+ _powertoolsEnvironment = powertoolsEnvironment;
}
///
@@ -59,7 +41,7 @@ internal PowertoolsConfigurations(ISystemWrapper systemWrapper)
///
/// The instance.
public static IPowertoolsConfigurations Instance =>
- _instance ??= new PowertoolsConfigurations(SystemWrapper.Instance);
+ _instance ??= new PowertoolsConfigurations(PowertoolsEnvironment.Instance);
///
/// Gets the environment variable.
@@ -68,7 +50,7 @@ internal PowertoolsConfigurations(ISystemWrapper systemWrapper)
/// System.String.
public string GetEnvironmentVariable(string variable)
{
- return _systemWrapper.GetEnvironmentVariable(variable);
+ return _powertoolsEnvironment.GetEnvironmentVariable(variable);
}
///
@@ -79,7 +61,7 @@ public string GetEnvironmentVariable(string variable)
/// System.String.
public string GetEnvironmentVariableOrDefault(string variable, string defaultValue)
{
- var result = _systemWrapper.GetEnvironmentVariable(variable);
+ var result = _powertoolsEnvironment.GetEnvironmentVariable(variable);
return string.IsNullOrWhiteSpace(result) ? defaultValue : result;
}
@@ -91,7 +73,7 @@ public string GetEnvironmentVariableOrDefault(string variable, string defaultVal
/// System.Int32.
public int GetEnvironmentVariableOrDefault(string variable, int defaultValue)
{
- var result = _systemWrapper.GetEnvironmentVariable(variable);
+ var result = _powertoolsEnvironment.GetEnvironmentVariable(variable);
return int.TryParse(result, out var parsedValue) ? parsedValue : defaultValue;
}
@@ -103,7 +85,7 @@ public int GetEnvironmentVariableOrDefault(string variable, int defaultValue)
/// true if XXXX, false otherwise.
public bool GetEnvironmentVariableOrDefault(string variable, bool defaultValue)
{
- return bool.TryParse(_systemWrapper.GetEnvironmentVariable(variable), out var result)
+ return bool.TryParse(_powertoolsEnvironment.GetEnvironmentVariable(variable), out var result)
? result
: defaultValue;
}
@@ -161,7 +143,8 @@ public bool GetEnvironmentVariableOrDefault(string variable, bool defaultValue)
///
/// The logger sample rate.
public double LoggerSampleRate =>
- double.TryParse(_systemWrapper.GetEnvironmentVariable(Constants.LoggerSampleRateNameEnv), NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var result)
+ double.TryParse(_powertoolsEnvironment.GetEnvironmentVariable(Constants.LoggerSampleRateNameEnv),
+ NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var result)
? result
: 0;
@@ -191,7 +174,7 @@ public bool GetEnvironmentVariableOrDefault(string variable, bool defaultValue)
///
/// true if this instance is Lambda; otherwise, false.
public bool IsLambdaEnvironment => GetEnvironmentVariable(Constants.LambdaTaskRoot) is not null;
-
+
///
/// Gets a value indicating whether [tracing is disabled].
///
@@ -202,7 +185,7 @@ public bool GetEnvironmentVariableOrDefault(string variable, bool defaultValue)
///
public void SetExecutionEnvironment(T type)
{
- _systemWrapper.SetExecutionEnvironment(type);
+ _powertoolsEnvironment.SetExecutionEnvironment(type);
}
///
@@ -210,20 +193,24 @@ public void SetExecutionEnvironment(T type)
GetEnvironmentVariableOrDefault(Constants.IdempotencyDisabledEnv, false);
///
- public string BatchProcessingErrorHandlingPolicy => GetEnvironmentVariableOrDefault(Constants.BatchErrorHandlingPolicyEnv, "DeriveFromEvent");
+ public string BatchProcessingErrorHandlingPolicy =>
+ GetEnvironmentVariableOrDefault(Constants.BatchErrorHandlingPolicyEnv, "DeriveFromEvent");
///
- public bool BatchParallelProcessingEnabled => GetEnvironmentVariableOrDefault(Constants.BatchParallelProcessingEnabled, false);
+ public bool BatchParallelProcessingEnabled =>
+ GetEnvironmentVariableOrDefault(Constants.BatchParallelProcessingEnabled, false);
///
- public int BatchProcessingMaxDegreeOfParallelism => GetEnvironmentVariableOrDefault(Constants.BatchMaxDegreeOfParallelismEnv, 1);
+ public int BatchProcessingMaxDegreeOfParallelism =>
+ GetEnvironmentVariableOrDefault(Constants.BatchMaxDegreeOfParallelismEnv, 1);
///
- public bool BatchThrowOnFullBatchFailureEnabled => GetEnvironmentVariableOrDefault(Constants.BatchThrowOnFullBatchFailureEnv, true);
+ public bool BatchThrowOnFullBatchFailureEnabled =>
+ GetEnvironmentVariableOrDefault(Constants.BatchThrowOnFullBatchFailureEnv, true);
///
public bool MetricsDisabled => GetEnvironmentVariableOrDefault(Constants.PowertoolsMetricsDisabledEnv, false);
-
+
///
public bool IsColdStart => LambdaLifecycleTracker.IsColdStart;
diff --git a/libraries/src/AWS.Lambda.Powertools.Common/Core/PowertoolsEnvironment.cs b/libraries/src/AWS.Lambda.Powertools.Common/Core/PowertoolsEnvironment.cs
index 3ad5317c6..afc796b6a 100644
--- a/libraries/src/AWS.Lambda.Powertools.Common/Core/PowertoolsEnvironment.cs
+++ b/libraries/src/AWS.Lambda.Powertools.Common/Core/PowertoolsEnvironment.cs
@@ -1,4 +1,6 @@
using System;
+using System.Collections.Concurrent;
+using System.Text;
namespace AWS.Lambda.Powertools.Common;
@@ -10,6 +12,16 @@ public class PowertoolsEnvironment : IPowertoolsEnvironment
///
private static IPowertoolsEnvironment _instance;
+ ///
+ /// Cached runtime environment string
+ ///
+ private static readonly string CachedRuntimeEnvironment = $"PTENV/AWS_LAMBDA_DOTNET{Environment.Version.Major}";
+
+ ///
+ /// Cache for parsed assembly names to avoid repeated string operations
+ ///
+ private static readonly ConcurrentDictionary ParsedAssemblyNameCache = new();
+
///
/// Gets the instance.
///
@@ -31,13 +43,100 @@ public void SetEnvironmentVariable(string variableName, string value)
///
public string GetAssemblyName(T type)
{
+ if (type is Type typeObject)
+ {
+ return typeObject.Assembly.GetName().Name;
+ }
+
return type.GetType().Assembly.GetName().Name;
}
///
public string GetAssemblyVersion(T type)
{
- var version = type.GetType().Assembly.GetName().Version;
+ Version version;
+
+ if (type is Type typeObject)
+ {
+ version = typeObject.Assembly.GetName().Version;
+ }
+ else
+ {
+ version = type.GetType().Assembly.GetName().Version;
+ }
+
return version != null ? $"{version.Major}.{version.Minor}.{version.Build}" : string.Empty;
}
-}
\ No newline at end of file
+
+ ///
+ public void SetExecutionEnvironment(T type)
+ {
+ const string envName = Constants.AwsExecutionEnvironmentVariableName;
+ var currentEnvValue = GetEnvironmentVariable(envName);
+ var assemblyName = ParseAssemblyName(GetAssemblyName(type));
+
+ // Check for duplication early
+ if (!string.IsNullOrEmpty(currentEnvValue) && currentEnvValue.Contains(assemblyName))
+ {
+ return;
+ }
+
+ var assemblyVersion = GetAssemblyVersion(type);
+ var newEntry = $"{assemblyName}/{assemblyVersion}";
+
+ string finalValue;
+
+ if (string.IsNullOrEmpty(currentEnvValue))
+ {
+ // First entry: "PT/Assembly/1.0.0 PTENV/AWS_LAMBDA_DOTNET8"
+ finalValue = $"{newEntry} {CachedRuntimeEnvironment}";
+ }
+ else
+ {
+ // Check if PTENV already exists in one pass
+ var containsPtenv = currentEnvValue.Contains("PTENV/");
+
+ if (containsPtenv)
+ {
+ // Just append the new entry: "existing PT/Assembly/1.0.0"
+ finalValue = $"{currentEnvValue} {newEntry}";
+ }
+ else
+ {
+ // Append new entry + PTENV: "existing PT/Assembly/1.0.0 PTENV/AWS_LAMBDA_DOTNET8"
+ finalValue = $"{currentEnvValue} {newEntry} {CachedRuntimeEnvironment}";
+ }
+ }
+
+ SetEnvironmentVariable(envName, finalValue);
+ }
+
+ ///
+ /// Parsing the name to conform with the required naming convention for the UserAgent header (PTFeature/Name/Version)
+ /// Fallback to Assembly Name on exception
+ ///
+ ///
+ ///
+ internal static string ParseAssemblyName(string assemblyName)
+ {
+ // Use cache to avoid repeated string operations
+ try
+ {
+ return ParsedAssemblyNameCache.GetOrAdd(assemblyName, name =>
+ {
+ var lastDotIndex = name.LastIndexOf('.');
+ if (lastDotIndex >= 0 && lastDotIndex < name.Length - 1)
+ {
+ var parsedName = name.Substring(lastDotIndex + 1);
+ return $"{Constants.FeatureContextIdentifier}/{parsedName}";
+ }
+
+ return $"{Constants.FeatureContextIdentifier}/{name}";
+ });
+ }
+ catch
+ {
+ return string.Empty;
+ }
+ }
+}
diff --git a/libraries/src/AWS.Lambda.Powertools.Common/Core/SystemWrapper.cs b/libraries/src/AWS.Lambda.Powertools.Common/Core/SystemWrapper.cs
deleted file mode 100644
index cec85233f..000000000
--- a/libraries/src/AWS.Lambda.Powertools.Common/Core/SystemWrapper.cs
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
- * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License").
- * You may not use this file except in compliance with the License.
- * A copy of the License is located at
- *
- * http://aws.amazon.com/apache2.0
- *
- * or in the "license" file accompanying this file. This file is distributed
- * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language governing
- * permissions and limitations under the License.
- */
-
-using System;
-using System.IO;
-using System.Text;
-
-namespace AWS.Lambda.Powertools.Common;
-
-///
-/// Class SystemWrapper.
-/// Implements the
-///
-///
-public class SystemWrapper : ISystemWrapper
-{
- private static IPowertoolsEnvironment _powertoolsEnvironment;
- private static bool _inTestMode = false;
- private static TextWriter _testOutputStream;
- private static bool _outputResetPerformed = false;
-
- ///
- /// The instance
- ///
- private static ISystemWrapper _instance;
-
- ///
- /// Prevents a default instance of the class from being created.
- ///
- public SystemWrapper(IPowertoolsEnvironment powertoolsEnvironment)
- {
- _powertoolsEnvironment = powertoolsEnvironment;
- _instance ??= this;
-
- if (!_inTestMode)
- {
- // Clear AWS SDK Console injected parameters in production only
- ResetConsoleOutput();
- }
- }
-
- ///
- /// Gets the instance.
- ///
- /// The instance.
- public static ISystemWrapper Instance => _instance ??= new SystemWrapper(PowertoolsEnvironment.Instance);
-
- ///
- /// Gets the environment variable.
- ///
- /// The variable.
- /// System.String.
- public string GetEnvironmentVariable(string variable)
- {
- return _powertoolsEnvironment.GetEnvironmentVariable(variable);
- }
-
- ///
- /// Logs the specified value.
- ///
- /// The value.
- public void Log(string value)
- {
- if (_inTestMode && _testOutputStream != null)
- {
- _testOutputStream.Write(value);
- }
- else
- {
- EnsureConsoleOutputOnce();
- Console.Write(value);
- }
- }
-
- ///
- /// Logs the line.
- ///
- /// The value.
- public void LogLine(string value)
- {
- if (_inTestMode && _testOutputStream != null)
- {
- _testOutputStream.WriteLine(value);
- }
- else
- {
- EnsureConsoleOutputOnce();
- Console.WriteLine(value);
- }
- }
-
- ///
- /// Gets random number
- ///
- /// System.Double.
- public double GetRandom()
- {
- return new Random().NextDouble();
- }
-
- ///
- public void SetEnvironmentVariable(string variable, string value)
- {
- _powertoolsEnvironment.SetEnvironmentVariable(variable, value);
- }
-
- ///
- public void SetExecutionEnvironment(T type)
- {
- const string envName = Constants.AwsExecutionEnvironmentVariableName;
- var envValue = new StringBuilder();
- var currentEnvValue = GetEnvironmentVariable(envName);
- var assemblyName = ParseAssemblyName(_powertoolsEnvironment.GetAssemblyName(type));
-
- // If there is an existing execution environment variable add the annotations package as a suffix.
- if (!string.IsNullOrEmpty(currentEnvValue))
- {
- // Avoid duplication - should not happen since the calling Instances are Singletons - defensive purposes
- if (currentEnvValue.Contains(assemblyName))
- {
- return;
- }
-
- envValue.Append($"{currentEnvValue} ");
- }
-
- var assemblyVersion = _powertoolsEnvironment.GetAssemblyVersion(type);
-
- envValue.Append($"{assemblyName}/{assemblyVersion}");
-
- SetEnvironmentVariable(envName, envValue.ToString());
- }
-
- ///
- /// Sets console output
- /// Useful for testing and checking the console output
- ///
- /// var consoleOut = new StringWriter();
- /// SystemWrapper.Instance.SetOut(consoleOut);
- ///
- ///
- /// The TextWriter instance where to write to
-
- public static void SetOut(TextWriter writeTo)
- {
- _testOutputStream = writeTo;
- _inTestMode = true;
- Console.SetOut(writeTo);
- }
-
- ///
- /// Parsing the name to conform with the required naming convention for the UserAgent header (PTFeature/Name/Version)
- /// Fallback to Assembly Name on exception
- ///
- ///
- ///
- private string ParseAssemblyName(string assemblyName)
- {
- try
- {
- var parsedName = assemblyName.Substring(assemblyName.LastIndexOf(".", StringComparison.Ordinal) + 1);
- return $"{Constants.FeatureContextIdentifier}/{parsedName}";
- }
- catch
- {
- //NOOP
- }
-
- return $"{Constants.FeatureContextIdentifier}/{assemblyName}";
- }
-
- private static void EnsureConsoleOutputOnce()
- {
- if (_outputResetPerformed) return;
- ResetConsoleOutput();
- _outputResetPerformed = true;
- }
-
- private static void ResetConsoleOutput()
- {
- var standardOutput = new StreamWriter(Console.OpenStandardOutput());
- standardOutput.AutoFlush = true;
- Console.SetOut(standardOutput);
- var errorOutput = new StreamWriter(Console.OpenStandardError());
- errorOutput.AutoFlush = true;
- Console.SetError(errorOutput);
- }
-
- public static void ClearOutputResetFlag()
- {
- _outputResetPerformed = false;
- }
-
- // For test cleanup
- internal static void ResetTestMode()
- {
- _inTestMode = false;
- _testOutputStream = null;
- }
-}
\ No newline at end of file
diff --git a/libraries/src/AWS.Lambda.Powertools.Common/Tests/TestLoggerOutput.cs b/libraries/src/AWS.Lambda.Powertools.Common/Tests/TestLoggerOutput.cs
new file mode 100644
index 000000000..b5dded35c
--- /dev/null
+++ b/libraries/src/AWS.Lambda.Powertools.Common/Tests/TestLoggerOutput.cs
@@ -0,0 +1,49 @@
+using System.Text;
+
+namespace AWS.Lambda.Powertools.Common.Tests;
+
+///
+/// Test logger output
+///
+public class TestLoggerOutput : IConsoleWrapper
+{
+ ///
+ /// Buffer for all the log messages written to the logger.
+ ///
+ private readonly StringBuilder _outputBuffer = new();
+
+ ///
+ /// Cleasr the output buffer.
+ ///
+ public void Clear()
+ {
+ _outputBuffer.Clear();
+ }
+
+ ///
+ /// Output the contents of the buffer.
+ ///
+ ///
+ public override string ToString()
+ {
+ return _outputBuffer.ToString();
+ }
+
+ ///
+ public void WriteLine(string message)
+ {
+ _outputBuffer.AppendLine(message);
+ }
+
+ ///
+ public void Debug(string message)
+ {
+ _outputBuffer.AppendLine(message);
+ }
+
+ ///
+ public void Error(string message)
+ {
+ _outputBuffer.AppendLine(message);
+ }
+}
\ No newline at end of file
diff --git a/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore.csproj b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore.csproj
new file mode 100644
index 000000000..5e5c66660
--- /dev/null
+++ b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore.csproj
@@ -0,0 +1,26 @@
+īģŋ
+
+
+
+ AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore
+ Powertools for AWS Lambda (.NET) - Event Handler Bedrock Agent Function Resolver AspNetCore package.
+ AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore
+ AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore
+ net8.0
+ false
+ enable
+ enable
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/BedrockFunctionRegistration.cs b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/BedrockFunctionRegistration.cs
new file mode 100644
index 000000000..7bc17dbdb
--- /dev/null
+++ b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/BedrockFunctionRegistration.cs
@@ -0,0 +1,41 @@
+namespace AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore;
+
+///
+/// Helper class for function registration with fluent API pattern.
+///
+internal class BedrockFunctionRegistration
+{
+ private readonly BedrockAgentFunctionResolver _resolver;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The Bedrock agent function resolver.
+ public BedrockFunctionRegistration(BedrockAgentFunctionResolver resolver)
+ {
+ _resolver = resolver;
+ }
+
+ ///
+ /// Adds a function to the Bedrock resolver.
+ ///
+ /// The name of the function.
+ /// The delegate handler.
+ /// Optional description of the function.
+ /// The function registration instance for method chaining.
+ ///
+ ///
+ /// app.MapBedrockFunction("GetWeather", (string city, int month) =>
+ /// $"Weather forecast for {city} in month {month}: Warm and sunny");
+ ///
+ /// app.MapBedrockFunction("Calculate", (int x, int y) =>
+ /// $"Result: {x + y}");
+ /// );
+ ///
+ ///
+ public BedrockFunctionRegistration Add(string name, Delegate handler, string description = "")
+ {
+ _resolver.Tool(name, description, handler);
+ return this;
+ }
+}
\ No newline at end of file
diff --git a/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/BedrockMinimalApiExtensions.cs b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/BedrockMinimalApiExtensions.cs
new file mode 100644
index 000000000..ca9fd9ece
--- /dev/null
+++ b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/BedrockMinimalApiExtensions.cs
@@ -0,0 +1,158 @@
+using System.Diagnostics.CodeAnalysis;
+using System.Text.Json;
+using System.Text.Json.Serialization;
+using AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.Models;
+using Microsoft.AspNetCore.Builder;
+using Microsoft.AspNetCore.Http;
+using Microsoft.Extensions.DependencyInjection;
+
+namespace AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore;
+
+// Source generation for JSON serialization
+[JsonSerializable(typeof(BedrockFunctionRequest))]
+internal partial class BedrockJsonContext : JsonSerializerContext
+{
+}
+
+///
+/// Extension methods for registering Bedrock Agent Functions in ASP.NET Core Minimal API.
+///
+public static class BedrockMinimalApiExtensions
+{
+ // Static flag to track if handler is mapped (thread-safe with volatile)
+ private static volatile bool _bedrockRequestHandlerMapped;
+
+ // JSON options with case insensitivity
+ private static readonly JsonSerializerOptions JsonOptions = new JsonSerializerOptions
+ {
+ PropertyNameCaseInsensitive = true
+ };
+
+ ///
+ /// Maps an individual Bedrock Agent function that will be called directly from the root endpoint.
+ /// The function name is extracted from the incoming request payload.
+ ///
+ /// The web application to configure.
+ /// The name of the function to register.
+ /// The delegate handler that implements the function.
+ /// Optional description of the function.
+ /// The web application instance.
+ ///
+ ///
+ /// // Register individual functions
+ /// app.MapBedrockFunction("GetWeather", (string city, int month) =>
+ /// $"Weather forecast for {city} in month {month}: Warm and sunny");
+ ///
+ /// app.MapBedrockFunction("Calculate", (int x, int y) =>
+ /// $"Result: {x + y}");
+ ///
+ ///
+ public static WebApplication MapBedrockFunction(
+ this WebApplication app,
+ string functionName,
+ Delegate handler,
+ string description = "")
+ {
+ // Get or create the resolver from services
+ var resolver = app.Services.GetService()
+ ?? new BedrockAgentFunctionResolver();
+
+ // Register the function with the resolver
+ resolver.Tool(functionName, description, handler);
+
+ // Ensure we have a global handler for Bedrock requests
+ EnsureBedrockRequestHandler(app, resolver);
+
+ return app;
+ }
+
+ [UnconditionalSuppressMessage("AOT", "IL3050:RequiresDynamicCode",
+ Justification = "The handler implementation is controlled and AOT-compatible")]
+ [UnconditionalSuppressMessage("Trimming", "IL2026:RequiresUnreferencedCode",
+ Justification = "The handler implementation is controlled and trim-compatible")]
+ private static void EnsureBedrockRequestHandler(WebApplication app, BedrockAgentFunctionResolver resolver)
+ {
+ // Check if we've already mapped the handler (we only need to do this once)
+ if (_bedrockRequestHandlerMapped)
+ return;
+
+ // Map the root endpoint to handle all Bedrock Agent Function requests
+ app.MapPost("/", [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Handler is AOT-friendly")]
+ [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Handler is trim-friendly")]
+ async (HttpContext context) =>
+ {
+ try
+ {
+ // Read the request body
+ string requestBody;
+ using (var reader = new StreamReader(context.Request.Body))
+ {
+ requestBody = await reader.ReadToEndAsync();
+ }
+
+ // Use source-generated serialization for the request
+ var bedrockRequest = JsonSerializer.Deserialize(requestBody,
+ BedrockJsonContext.Default.BedrockFunctionRequest);
+
+ if (bedrockRequest == null)
+ return Results.BadRequest("Invalid request format");
+
+ // Process the request through the resolver
+ var result = await resolver.ResolveAsync(bedrockRequest);
+
+ // For the response, use the standard serializer with suppressed warnings
+ // This is more compatible with different response types
+ context.Response.ContentType = "application/json";
+ await context.Response.WriteAsJsonAsync(result, JsonOptions);
+ return Results.Empty;
+ }
+ catch (Exception ex)
+ {
+ return Results.Problem($"Error processing Bedrock Agent request: {ex.Message}");
+ }
+ });
+
+ // Mark that we've set up the handler
+ _bedrockRequestHandlerMapped = true;
+ }
+
+ ///
+ /// Registers all methods from a class marked with BedrockFunctionTypeAttribute.
+ ///
+ /// The type containing tool methods marked with BedrockFunctionToolAttribute
+ /// The web application to configure.
+ /// The web application instance.
+ ///
+ ///
+ /// // Define your tool class
+ /// [BedrockFunctionType]
+ /// public class WeatherTools
+ /// {
+ /// [BedrockFunctionTool(Name = "GetWeather", Description = "Gets weather forecast")]
+ /// public static string GetWeather(string location, int days)
+ /// {
+ /// return $"Weather forecast for {location} for the next {days} days";
+ /// }
+ /// }
+ ///
+ /// // Register all tools from the class
+ /// app.MapBedrockToolClass<WeatherTools>();
+ ///
+ ///
+ public static WebApplication MapBedrockToolType<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicMethods)] T>(
+ this WebApplication app)
+ where T : class
+ {
+ // Get or create the resolver from services
+ var resolver = app.Services.GetService()
+ ?? new BedrockAgentFunctionResolver();
+
+ // Register the tool class
+ resolver.RegisterTool();
+
+ // Ensure we have a global handler for Bedrock requests
+ EnsureBedrockRequestHandler(app, resolver);
+
+ return app;
+ }
+}
\ No newline at end of file
diff --git a/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/Readme.md b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/Readme.md
new file mode 100644
index 000000000..8cc31365c
--- /dev/null
+++ b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore/Readme.md
@@ -0,0 +1,115 @@
+# Experimental work in progress, not yet released
+
+# AWS Lambda Powertools for .NET - Bedrock Agent Function Resolver for ASP.NET Core
+
+## Overview
+This library provides ASP.NET Core integration for the AWS Lambda Powertools Bedrock Agent Function Resolver. It enables you to easily expose Bedrock Agent functions as endpoints in your ASP.NET Core applications using a simple, fluent API.
+
+## Features
+
+- **Minimal API Integration**: Register Bedrock Agent functions using familiar ASP.NET Core Minimal API patterns
+- **AOT Compatibility**: Full support for .NET 8 AOT compilation through source generation
+- **Simple Function Registration**: Register functions with a fluent API
+- **Automatic Request Processing**: Automatic parsing of Bedrock Agent requests and formatting of responses
+- **Error Handling**: Built-in error handling for Bedrock Agent function requests
+
+## Installation
+
+Install the package via NuGet:
+
+```bash
+dotnet add package AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore
+```
+
+## Basic Usage
+
+Here's how to register Bedrock Agent functions in your ASP.NET Core application:
+
+```csharp
+using AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore;
+
+var builder = WebApplication.CreateBuilder(args);
+var app = builder.Build();
+
+// Register individual functions
+app.MapBedrockFunction("GetWeather", (string city, int month) =>
+ $"Weather forecast for {city} in month {month}: Warm and sunny");
+
+app.MapBedrockFunction("Calculate", (int x, int y) =>
+ $"Result: {x + y}");
+
+app.Run();
+```
+
+When Amazon Bedrock Agent sends a request to your application, the appropriate function will be invoked with the extracted parameters, and the response will be formatted correctly for the agent.
+
+## Using with Dependency Injection
+
+Register the Bedrock resolver with dependency injection for more advanced scenarios:
+
+```csharp
+using AWS.Lambda.Powertools.EventHandler.Resolvers;
+using AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore;
+
+var builder = WebApplication.CreateBuilder(args);
+
+// Register the resolver and any other services
+builder.Services.AddBedrockResolver();
+builder.Services.AddSingleton();
+
+var app = builder.Build();
+
+// Register functions that use injected services
+app.MapBedrockFunction("GetWeatherForecast",
+ (string city, IWeatherService weatherService) =>
+ weatherService.GetForecast(city),
+ "Gets weather forecast for a city");
+
+app.Run();
+```
+
+## Advanced Usage
+
+### Function Documentation
+
+Add descriptions to your functions for better documentation:
+
+```csharp
+app.MapBedrockFunction("GetWeather",
+ (string city, int month) => $"Weather forecast for {city} in month {month}: Warm and sunny",
+ "Gets weather forecast for a specific city and month");
+```
+
+### Working with Tool Classes
+
+Use the `MapBedrockToolClass()` method to register all functions from a class directly:
+
+```csharp
+[BedrockFunctionType]
+public class WeatherTools
+{
+ [BedrockFunctionTool(Name = "GetWeather", Description = "Gets weather forecast")]
+ public static string GetWeather(string location, int days)
+ {
+ return $"Weather forecast for {location} for the next {days} days";
+ }
+}
+
+// In Program.cs - directly register the tool class
+app.MapBedrockToolClass();
+```
+
+## How It Works
+
+1. When you call `MapBedrockFunction`, the function is registered with the resolver
+2. An HTTP endpoint is set up at the root path (/) to handle incoming Bedrock Agent requests
+3. When a request arrives, the library:
+ - Deserializes the JSON payload
+ - Extracts the function name and parameters
+ - Invokes the matching function with the appropriate parameters
+ - Serializes the result and returns it as a response
+
+## Requirements
+
+- .NET 8.0 or later
+- ASP.NET Core 8.0 or later
\ No newline at end of file
diff --git a/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.csproj b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.csproj
new file mode 100644
index 000000000..b0a7db73a
--- /dev/null
+++ b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.csproj
@@ -0,0 +1,21 @@
+īģŋ
+
+
+
+ AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction
+ Powertools for AWS Lambda (.NET) - Event Handler Bedrock Agent Function Resolver package.
+ net8.0
+ false
+ enable
+ enable
+ true
+ true
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction/BedrockAgentFunctionResolver.cs b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction/BedrockAgentFunctionResolver.cs
new file mode 100644
index 000000000..4107a1b9d
--- /dev/null
+++ b/libraries/src/AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction/BedrockAgentFunctionResolver.cs
@@ -0,0 +1,364 @@
+īģŋusing System.Text.Json.Serialization.Metadata;
+using Amazon.Lambda.Core;
+using AWS.Lambda.Powertools.Common;
+using AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.Models;
+using AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.Helpers;
+
+// ReSharper disable once CheckNamespace
+namespace AWS.Lambda.Powertools.EventHandler.Resolvers
+{
+ ///
+ /// A resolver for Bedrock Agent functions that allows registering handlers for tool functions.
+ ///
+ ///
+ /// Basic usage:
+ ///
+ /// var resolver = new BedrockAgentFunctionResolver();
+ /// resolver.Tool("GetWeather", (string city) => $"Weather in {city} is sunny");
+ ///
+ /// // Lambda handler
+ /// public BedrockFunctionResponse FunctionHandler(BedrockFunctionRequest input, ILambdaContext context)
+ /// {
+ /// return resolver.Resolve(input, context);
+ /// }
+ ///
+ ///
+ public class BedrockAgentFunctionResolver
+ {
+ private readonly
+ Dictionary>
+ _handlers = new();
+
+ private readonly ParameterTypeValidator _parameterValidator = new();
+ private readonly ResultConverter _resultConverter = new();
+ private readonly ParameterMapper _parameterMapper;
+
+ ///
+ /// Initializes a new instance of the class.
+ /// Optionally accepts a type resolver for JSON serialization.
+ ///
+ public BedrockAgentFunctionResolver(IJsonTypeInfoResolver? typeResolver = null)
+ {
+ _parameterMapper = new ParameterMapper(typeResolver);
+ PowertoolsEnvironment.Instance.SetExecutionEnvironment(this);
+ }
+
+ ///
+ /// Checks if another tool can be registered, and logs a warning if the maximum limit is reached
+ /// or if a tool with the same name is already registered
+ ///
+ /// The name of the tool being registered
+ /// True if the tool can be registered, false if the maximum limit is reached
+ private bool CanRegisterTool(string name)
+ {
+ if (_handlers.ContainsKey(name))
+ {
+ Console.WriteLine($"WARNING: Tool {name} already registered. Overwriting with new definition.");
+ }
+
+ return true;
+ }
+
+ ///
+ /// Registers a handler that directly accepts BedrockFunctionRequest and returns BedrockFunctionResponse
+ ///
+ /// The name of the tool function
+ /// The handler function that accepts input and context and returns output
+ /// Optional description of the tool function
+ /// The resolver instance for method chaining
+ public BedrockAgentFunctionResolver Tool(
+ string name,
+ Func handler,
+ string description = "")
+ {
+ ArgumentNullException.ThrowIfNull(handler);
+
+ if (!CanRegisterTool(name))
+ return this;
+
+ _handlers[name] = handler;
+ return this;
+ }
+
+ ///
+ /// Registers a handler that directly accepts BedrockFunctionRequest and returns BedrockFunctionResponse
+ ///
+ /// The name of the tool function
+ /// The handler function that accepts input and returns output
+ /// Optional description of the tool function
+ /// The resolver instance for method chaining
+ public BedrockAgentFunctionResolver Tool(
+ string name,
+ Func handler,
+ string description = "")
+ {
+ ArgumentNullException.ThrowIfNull(handler);
+
+ if (!CanRegisterTool(name))
+ return this;
+
+ _handlers[name] = (input, _) => handler(input);
+ return this;
+ }
+
+ ///
+ /// Registers a parameter-less handler that returns BedrockFunctionResponse
+ ///
+ /// The name of the tool function
+ /// The handler function that returns output
+ /// Optional description of the tool function
+ /// The resolver instance for method chaining
+ public BedrockAgentFunctionResolver Tool(
+ string name,
+ Func handler,
+ string description = "")
+ {
+ ArgumentNullException.ThrowIfNull(handler);
+
+ if (!CanRegisterTool(name))
+ return this;
+
+ _handlers[name] = (_, _) => handler();
+ return this;
+ }
+
+ ///
+ /// Registers a parameter-less handler with automatic string conversion
+ ///
+ /// The name of the tool function
+ /// The handler function that returns a string
+ /// Optional description of the tool function
+ /// The resolver instance for method chaining
+ public BedrockAgentFunctionResolver Tool(
+ string name,
+ Func handler,
+ string description = "")
+ {
+ ArgumentNullException.ThrowIfNull(handler);
+
+ if (!CanRegisterTool(name))
+ return this;
+
+ _handlers[name] = (input, _) => BedrockFunctionResponse.WithText(
+ handler(),
+ input.ActionGroup,
+ name,
+ input.SessionAttributes,
+ input.PromptSessionAttributes,
+ new Dictionary());
+ return this;
+ }
+
+ ///
+ /// Registers a parameter-less handler with automatic object conversion
+ ///
+ /// The name of the tool function
+ /// The handler function that returns an object
+ /// Optional description of the tool function
+ /// The resolver instance for method chaining
+ public BedrockAgentFunctionResolver Tool(
+ string name,
+ Func