diff --git a/aws_sra_examples/solutions/genai/bedrock_org/README.md b/aws_sra_examples/solutions/genai/bedrock_org/README.md index cfcf25ee..80b005bc 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/README.md +++ b/aws_sra_examples/solutions/genai/bedrock_org/README.md @@ -7,6 +7,7 @@ - [Security Controls](#security-controls) - [JSON Parameters](#json-parameters) - [References](#references) +- [Related Security Control Solutions](#related-security-control-solutions) --- @@ -102,6 +103,11 @@ aws cloudformation create-stack \ ParameterKey=pBedrockPromptInjectionFilterParams,ParameterValue='"{\"deploy\": \"true\", \"accounts\": [\"222222222222\",\"333333333333\"], \"regions\": [\"us-east-1\"], \"filter_params\": {\"log_group_name\": \"model-invocation-log-group\", \"input_path\": \"input.inputBodyJson.messages[0].content\"}}"' \ ParameterKey=pBedrockSensitiveInfoFilterParams,ParameterValue='"{\"deploy\": \"true\", \"accounts\": [\"222222222222\",\"333333333333\"], \"regions\": [\"us-east-1\"], \"filter_params\": {\"log_group_name\": \"model-invocation-log-group\", \"input_path\": \"input.inputBodyJson.messages[0].content\"}}"' \ ParameterKey=pBedrockCentralObservabilityParams,ParameterValue='"{\"deploy\": \"true\", \"bedrock_accounts\": [\"222222222222\",\"333333333333\"], \"regions\": [\"us-east-1\"]}"' \ + ParameterKey=pBedrockKBLoggingRuleParams,ParameterValue='"{\"deploy\": \"true\", \"accounts\": [\"222222222222\",\"333333333333\"], \"regions\": [\"us-east-1\",\"us-west-2\"], \"input_params\": {}}"' \ + ParameterKey=pBedrockKBIngestionEncryptionRuleParams,ParameterValue='"{\"deploy\": \"true\", \"accounts\": [\"222222222222\",\"333333333333\"], \"regions\": [\"us-east-1\",\"us-west-2\"], \"input_params\": {}}"' \ + ParameterKey=pBedrockKBS3BucketRuleParams,ParameterValue='"{\"deploy\": \"true\", \"accounts\": [\"222222222222\",\"333333333333\"], \"regions\": [\"us-east-1\",\"us-west-2\"], \"input_params\": {\"check_retention\": \"true\", \"check_encryption\": \"true\", \"check_access_logging\": \"true\", \"check_object_locking\": \"true\", \"check_versioning\": \"true\"}}"' \ + ParameterKey=pBedrockKBVectorStoreSecretRuleParams,ParameterValue='"{\"deploy\": \"true\", \"accounts\": [\"222222222222\",\"333333333333\"], \"regions\": [\"us-east-1\",\"us-west-2\"], \"input_params\": {}}"' \ + ParameterKey=pBedrockKBOpenSearchEncryptionRuleParams,ParameterValue='"{\"deploy\": \"true\", \"accounts\": [\"222222222222\",\"333333333333\"], \"regions\": [\"us-east-1\",\"us-west-2\"], \"input_params\": {}}"' \ --capabilities CAPABILITY_NAMED_IAM ``` @@ -139,6 +145,11 @@ Please read the following notes before deploying the stack to ensure successful | CloudWatch Endpoint Validation | Ensures proper CloudWatch VPC endpoint setup | [pBedrockCWEndpointsRuleParams](#pbedrockcwendpointsruleparams) | | S3 Endpoint Validation | Ensures proper S3 VPC endpoint setup | [pBedrockS3EndpointsRuleParams](#pbedrocks3endpointsruleparams) | | Guardrail Encryption | Validates KMS encryption for Bedrock guardrails | [pBedrockGuardrailEncryptionRuleParams](#pbedrockguardrailencryptionruleparams) | +| Knowledge Base Logging | Validates logging configuration for Bedrock Knowledge Base | [pBedrockKBLoggingRuleParams](#pbedrockkbloggingruleparams) | +| Knowledge Base Ingestion Encryption | Validates encryption for Knowledge Base data ingestion | [pBedrockKBIngestionEncryptionRuleParams](#pbedrockkbingestionencryptionruleparams) | +| Knowledge Base S3 Bucket | Validates S3 bucket configurations for Knowledge Base | [pBedrockKBS3BucketRuleParams](#pbedrockkbs3bucketruleparams) | +| Knowledge Base Vector Store Secret | Validates vector store secret configuration | [pBedrockKBVectorStoreSecretRuleParams](#pbedrockkbvectorstoresecretruleparams) | +| Knowledge Base OpenSearch Encryption | Validates OpenSearch encryption configuration | [pBedrockKBOpenSearchEncryptionRuleParams](#pbedrockkbopensearchencryptionruleparams) | > **Important Note**: The Config rule Lambda execution role needs to have access to any KMS keys used to encrypt Bedrock guardrails. Make sure to grant the appropriate KMS key permissions to the Lambda role to ensure proper evaluation of encrypted guardrail configurations. @@ -155,6 +166,15 @@ Please read the following notes before deploying the stack to ensure successful |-----------------|-------------|----------------| | Central Observability | Configures cross-account/region metric aggregation | [pBedrockCentralObservabilityParams](#pbedrockcentralobservabilityparams) | +### Bedrock Knowledge Base +| Security Control | Description | JSON Parameter | +|-----------------|-------------|----------------| +| KB Logging | Validates logging configuration for Bedrock Knowledge Base | [pBedrockKBLoggingRuleParams](#pbedrockkbloggingruleparams) | +| KB Ingestion Encryption | Validates encryption configuration for Bedrock Knowledge Base | [pBedrockKBIngestionEncryptionRuleParams](#pbedrockkbingestionencryptionruleparams) | +| KB S3 Bucket | Validates S3 bucket configuration for Bedrock Knowledge Base | [pBedrockKBS3BucketRuleParams](#pbedrockkbs3bucketruleparams) | +| KB Vector Store Secret | Validates secret configuration for Bedrock Knowledge Base | [pBedrockKBVectorStoreSecretRuleParams](#pbedrockkbvectorstoresecretruleparams) | +| KB OpenSearch Encryption | Validates encryption configuration for Bedrock Knowledge Base | [pBedrockKBOpenSearchEncryptionRuleParams](#pbedrockkbopensearchencryptionruleparams) | + --- ## JSON Parameters @@ -367,6 +387,72 @@ This section explains the parameters in the CloudFormation template that require } ``` +### `pBedrockKBLoggingRuleParams` +- **Purpose**: Validates logging configuration for Bedrock Knowledge Base. +- **Structure**: +```json +{ + "deploy": "true|false", + "accounts": ["account_id1", "account_id2"], + "regions": ["region1", "region2"], + "input_params": {} +} +``` + +### `pBedrockKBIngestionEncryptionRuleParams` +- **Purpose**: Validates encryption configuration for Bedrock Knowledge Base. +- **Structure**: +```json +{ + "deploy": "true|false", + "accounts": ["account_id1", "account_id2"], + "regions": ["region1", "region2"], + "input_params": {} +} +``` + +### `pBedrockKBS3BucketRuleParams` +- **Purpose**: Validates S3 bucket configuration for Bedrock Knowledge Base. +- **Structure**: +```json +{ + "deploy": "true|false", + "accounts": ["account_id1", "account_id2"], + "regions": ["region1", "region2"], + "input_params": { + "check_retention": "true|false", + "check_encryption": "true|false", + "check_access_logging": "true|false", + "check_object_locking": "true|false", + "check_versioning": "true|false" + } +} +``` + +### `pBedrockKBVectorStoreSecretRuleParams` +- **Purpose**: Validates secret configuration for Bedrock Knowledge Base. +- **Structure**: +```json +{ + "deploy": "true|false", + "accounts": ["account_id1", "account_id2"], + "regions": ["region1", "region2"], + "input_params": {} +} +``` + +### `pBedrockKBOpenSearchEncryptionRuleParams` +- **Purpose**: Validates encryption configuration for Bedrock Knowledge Base. +- **Structure**: +```json +{ + "deploy": "true|false", + "accounts": ["account_id1", "account_id2"], + "regions": ["region1", "region2"], + "input_params": {} +} +``` + --- ## References - [AWS SRA Generative AI Deep-Dive](https://docs.aws.amazon.com/prescriptive-guidance/latest/security-reference-architecture/gen-ai-sra.html) @@ -375,3 +461,32 @@ This section explains the parameters in the CloudFormation template that require - [CloudWatch Metrics and Alarms](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/WhatIsCloudWatch.html) - [AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/welcome.html) - [AWS KMS](https://docs.aws.amazon.com/kms/latest/developerguide/overview.html) + +## Related Security Control Solutions + +This solution works in conjunction with other AWS SRA solutions to provide comprehensive security controls for Bedrock GenAI environments: + +### Amazon Bedrock Guardrails Solution +The [SRA Bedrock Guardrails solution](../../genai/bedrock_guardrails/README.md) provides automated deployment of Amazon Bedrock Guardrails across your organization. It supports: + +- **Content Filters**: Block harmful content in inputs/outputs based on predefined categories (Hate, Insults, Sexual, Violence, Misconduct, Prompt Attack) +- **Denied Topics**: Define and block undesirable topics +- **Word Filters**: Block specific words, phrases, and profanity +- **Sensitive Information Filters**: Block or mask PII and sensitive data +- **Contextual Grounding**: Detect and filter hallucinations based on source grounding + +The solution uses KMS encryption for enhanced security and requires proper IAM role configurations for users who need to invoke or manage guardrails. + +### GuardDuty Malware Protection for S3 +The [SRA GuardDuty Malware Protection solution](../../guardduty/guardduty_malware_protection_for_s3/README.md) helps protect S3 buckets used in your Bedrock environment from malware. This is particularly important for: + +- Model evaluation job buckets +- Knowledge base data ingestion buckets +- Model invocation logging buckets + +The solution enables GuardDuty's malware scanning capabilities to detect malicious files that could be used in prompt injection attacks or compromise your GenAI applications. + +These complementary solutions work together to provide defense-in-depth for your Bedrock GenAI environment: +- This solution (SRA Bedrock Org) provides organizational security controls and monitoring +- Bedrock Guardrails solution provides content and data security controls +- GuardDuty Malware Protection ensures S3 bucket security against malware threats diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_cloudwatch_endpoints/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_cloudwatch_endpoints/app.py index 6fe35ceb..84ccd76b 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_cloudwatch_endpoints/app.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_cloudwatch_endpoints/app.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import json import logging import os diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_eval_job_bucket/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_eval_job_bucket/app.py index 5abfdf2c..aed334a0 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_eval_job_bucket/app.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_eval_job_bucket/app.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import ast import logging import os diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_guardrail_encryption/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_guardrail_encryption/app.py index c1113eb0..2b68f892 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_guardrail_encryption/app.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_guardrail_encryption/app.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import json import logging import os diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_guardrails/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_guardrails/app.py index 72d0a726..f9c37c3c 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_guardrails/app.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_guardrails/app.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import ast import json import logging diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_iam_user_access/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_iam_user_access/app.py index 10361ac6..1595681e 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_iam_user_access/app.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_iam_user_access/app.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import json import logging import os diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_invocation_log_cloudwatch/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_invocation_log_cloudwatch/app.py index f94cc1fc..570b5cfc 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_invocation_log_cloudwatch/app.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_invocation_log_cloudwatch/app.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import json import logging import os diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_invocation_log_s3/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_invocation_log_s3/app.py index a88d73e1..608afef1 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_invocation_log_s3/app.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_invocation_log_s3/app.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import json import logging import os diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_ingestion_encryption/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_ingestion_encryption/app.py new file mode 100644 index 00000000..72b5e6d3 --- /dev/null +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_ingestion_encryption/app.py @@ -0,0 +1,151 @@ +"""Config rule to check knowledge base data ingestion encryption for Bedrock environments. + +Version: 1.0 + +Config rule for SRA in the repo, https://github.com/aws-samples/aws-security-reference-architecture-examples + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +SPDX-License-Identifier: MIT-0 +""" + +import json +import logging +import os +from typing import Any + +import boto3 +from botocore.exceptions import ClientError + +# Setup Default Logger +LOGGER = logging.getLogger(__name__) +log_level = os.environ.get("LOG_LEVEL", logging.INFO) +LOGGER.setLevel(log_level) +LOGGER.info(f"boto3 version: {boto3.__version__}") + +# Get AWS region from environment variable +AWS_REGION = os.environ.get("AWS_REGION") + +# Initialize AWS clients +bedrock_agent_client = boto3.client("bedrock-agent", region_name=AWS_REGION) +config_client = boto3.client("config", region_name=AWS_REGION) + + +def check_data_sources(kb_id: str, kb_name: str) -> str | None: # type: ignore # noqa: CFQ004, CCR001 + """Check if a knowledge base's data sources are encrypted with KMS during ingestion. + + Args: + kb_id (str): Knowledge base ID + kb_name (str): Knowledge base name + + Raises: + ClientError: If there is an error checking the knowledge base + + Returns: + str | None: Error message if non-compliant, None if compliant + """ + try: + data_sources = bedrock_agent_client.list_data_sources(knowledgeBaseId=kb_id) + LOGGER.info(f"Data sources: {data_sources}") + if not isinstance(data_sources, dict): + return f"{kb_name}: Invalid response" + + unencrypted_sources = [] + for source in data_sources.get("dataSourceSummaries", []): + LOGGER.info(f"Source: {source}") + if not isinstance(source, dict): + continue + + # Get the detailed data source configuration + try: + source_details = bedrock_agent_client.get_data_source(knowledgeBaseId=kb_id, dataSourceId=source["dataSourceId"]) + LOGGER.info(f"Source details: {source_details}") + + # Check for KMS encryption configuration + data_source = source_details.get("dataSource", {}) + encryption_config = data_source.get("serverSideEncryptionConfiguration", {}) + LOGGER.info(f"Encryption config: {encryption_config}") + + # Check if KMS key is configured for encryption + if not encryption_config.get("kmsKeyArn"): + unencrypted_sources.append(source.get("name", source["dataSourceId"])) + + except ClientError as e: + LOGGER.error(f"Error getting data source details for {source.get('name', source['dataSourceId'])}: {str(e)}") + if e.response["Error"]["Code"] == "AccessDeniedException": + unencrypted_sources.append(f"{source.get('name', source['dataSourceId'])}") + continue + + if unencrypted_sources: + return f"{kb_name}: {len(unencrypted_sources)} sources need CMK" + return None + except ClientError as e: + LOGGER.error(f"Error checking data sources for knowledge base {kb_name}: {str(e)}") + if e.response["Error"]["Code"] == "AccessDeniedException": + return f"{kb_name}: Access denied" + raise + + +def evaluate_compliance(rule_parameters: dict) -> tuple[str, str]: # noqa: U100 + """Evaluate if Bedrock Knowledge Base data sources are encrypted with KMS. + + Args: + rule_parameters (dict): Rule parameters from AWS Config rule. + + Returns: + tuple[str, str]: Compliance type and annotation message. + """ + try: + non_compliant_kbs = [] + paginator = bedrock_agent_client.get_paginator("list_knowledge_bases") + + for page in paginator.paginate(): + for kb in page["knowledgeBaseSummaries"]: + kb_id = kb["knowledgeBaseId"] + kb_name = kb.get("name", kb_id) + error = check_data_sources(kb_id, kb_name) + if error: + non_compliant_kbs.append(error) + + if non_compliant_kbs: + msg = f"KBs missing Customer Managed Keys: {'; '.join(non_compliant_kbs)}" + # Ensure annotation doesn't exceed 256 characters + if len(msg) > 256: + LOGGER.info(f"Full message truncated: {msg}") + msg = msg[:220] + " (see CloudWatch logs for details)" + return "NON_COMPLIANT", msg + return "COMPLIANT", "All KB data sources use Customer Managed Keys" + + except Exception as e: + LOGGER.error(f"Error evaluating Bedrock Knowledge Base encryption: {str(e)}") + return "ERROR", f"Error: {str(e)[:240]}" + + +def lambda_handler(event: dict, context: Any) -> None: # noqa: U100 + """Lambda handler. + + Args: + event (dict): Lambda event object + context (Any): Lambda context object + """ + LOGGER.info("Evaluating compliance for AWS Config rule") + LOGGER.info(f"Event: {json.dumps(event)}") + + invoking_event = json.loads(event["invokingEvent"]) + rule_parameters = json.loads(event["ruleParameters"]) if "ruleParameters" in event else {} + + compliance_type, annotation = evaluate_compliance(rule_parameters) + + evaluation = { + "ComplianceResourceType": "AWS::::Account", + "ComplianceResourceId": event["accountId"], + "ComplianceType": compliance_type, + "Annotation": annotation, + "OrderingTimestamp": invoking_event["notificationCreationTime"], + } + + LOGGER.info(f"Compliance evaluation result: {compliance_type}") + LOGGER.info(f"Annotation: {annotation}") + + config_client.put_evaluations(Evaluations=[evaluation], ResultToken=event["resultToken"]) # type: ignore + + LOGGER.info("Compliance evaluation complete.") diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_logging/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_logging/app.py new file mode 100644 index 00000000..bbc95016 --- /dev/null +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_logging/app.py @@ -0,0 +1,206 @@ +"""Config rule to check knowledge base logging for Bedrock environments. + +Version: 1.0 + +Config rule for SRA in the repo, https://github.com/aws-samples/aws-security-reference-architecture-examples + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +SPDX-License-Identifier: MIT-0 +""" + +import json +import logging +import os +from typing import Any, Optional, Tuple + +import boto3 + +# Setup Default Logger +LOGGER = logging.getLogger(__name__) +log_level = os.environ.get("LOG_LEVEL", logging.INFO) +LOGGER.setLevel(log_level) +LOGGER.info(f"boto3 version: {boto3.__version__}") + +# Get AWS region from environment variable +AWS_REGION = os.environ.get("AWS_REGION") + +# Initialize AWS clients +bedrock_agent_client = boto3.client("bedrock-agent", region_name=AWS_REGION) +config_client = boto3.client("config", region_name=AWS_REGION) +logs_client = boto3.client("logs", region_name=AWS_REGION) +sts_client = boto3.client("sts", region_name=AWS_REGION) + +# Max length for AWS Config annotation +MAX_ANNOTATION_LENGTH = 256 + + +def truncate_annotation(message: str) -> str: + """Ensure annotation stays within AWS Config's 256 character limit. + + Args: + message (str): Original annotation message + + Returns: + str: Truncated message with CloudWatch reference if needed + """ + if len(message) <= MAX_ANNOTATION_LENGTH: + return message + + log_group = f"/aws/lambda/{os.environ.get('AWS_LAMBDA_FUNCTION_NAME', 'unknown')}" + reference = f" See CloudWatch logs ({log_group}) for details." + + # Calculate available space for the actual message + available_chars = MAX_ANNOTATION_LENGTH - len(reference) + + # Truncate message and add reference + truncated = message[: available_chars - 3] + "..." + return truncated + reference + + +def check_kb_logging(kb_id: str) -> Tuple[bool, Optional[str]]: # noqa: CCR001 + """Check if knowledge base has CloudWatch logging enabled. + + Args: + kb_id (str): Knowledge base ID + + Returns: + Tuple[bool, Optional[str]]: (True if logging is enabled, destination type if found) + """ + try: + account_id = sts_client.get_caller_identity()["Account"] + kb_arn = f"arn:aws:bedrock:{AWS_REGION}:{account_id}:knowledge-base/{kb_id}" + LOGGER.info(f"Checking logging for KB ARN: {kb_arn}") + + # Get delivery sources + delivery_sources = logs_client.describe_delivery_sources() + LOGGER.info(f"Found {len(delivery_sources.get('deliverySources', []))} delivery sources") + + for source in delivery_sources.get("deliverySources", []): + LOGGER.info(f"Checking source: {source.get('name')}") + if kb_arn in source.get("resourceArns", []): + source_name = source.get("name") + LOGGER.info(f"Found matching source name: {source_name}") + if not source_name: + continue + + # Get deliveries to find the delivery ID + LOGGER.info("Calling describe_deliveries API") + deliveries = logs_client.describe_deliveries() + LOGGER.info(f"Found {len(deliveries.get('deliveries', []))} deliveries") + + for delivery in deliveries.get("deliveries", []): + LOGGER.info(f"Checking delivery: {delivery.get('id')} with source name: {delivery.get('deliverySourceName')}") + if delivery.get("deliverySourceName") == source_name: + delivery_id = delivery.get("id") + LOGGER.info(f"Found matching delivery ID: {delivery_id}") + if not delivery_id: + continue + + # Get delivery details to get the destination ARN + LOGGER.info(f"Calling get_delivery API with ID: {delivery_id}") + delivery_details = logs_client.get_delivery(id=delivery_id) + LOGGER.info(f"Delivery details: {delivery_details}") + + delivery_destination_arn = delivery_details.get("delivery", {}).get("deliveryDestinationArn") + LOGGER.info(f"Found delivery destination ARN: {delivery_destination_arn}") + if not delivery_destination_arn: + continue + + # Get delivery destinations to match the ARN + LOGGER.info("Calling describe_delivery_destinations API") + delivery_destinations = logs_client.describe_delivery_destinations() + LOGGER.info(f"Found {len(delivery_destinations.get('deliveryDestinations', []))} delivery destinations") + + for destination in delivery_destinations.get("deliveryDestinations", []): + LOGGER.info(f"Checking destination: {destination.get('name')} with ARN: {destination.get('arn')}") + if destination.get("arn") == delivery_destination_arn: + destination_type = destination.get("deliveryDestinationType") + LOGGER.info(f"Found matching destination with type: {destination_type}") + return True, destination_type + + LOGGER.info("No matching logging configuration found") + return False, None + + except Exception as e: + LOGGER.error(f"Error checking logging for knowledge base {kb_id}: {str(e)}") + return False, None + + +def evaluate_compliance(rule_parameters: dict) -> tuple[str, str]: # noqa: CFQ004, U100 + """Evaluate if Bedrock Knowledge Base logging is properly configured. + + Args: + rule_parameters (dict): Rule parameters from AWS Config rule. + + Returns: + tuple[str, str]: Compliance type and annotation message. + """ + try: + # List all knowledge bases + kb_list = [] + paginator = bedrock_agent_client.get_paginator("list_knowledge_bases") + for page in paginator.paginate(): + kb_list.extend(page.get("knowledgeBaseSummaries", [])) + + if not kb_list: + return "COMPLIANT", "No KBs found" + + non_compliant_kbs = [] + compliant_count = 0 + + # Check each knowledge base for logging configuration + for kb in kb_list: + kb_id = kb["knowledgeBaseId"] + kb_name = kb.get("name", "unnamed") + + has_logging, destination_type = check_kb_logging(kb_id) + if not has_logging: + # Use shorter format for non-compliant KBs + non_compliant_kbs.append(f"{kb_id[:8]}..({kb_name[:10]})") + else: + compliant_count += 1 + LOGGER.info(f"KB {kb_id} ({kb_name}) has logging to {destination_type}") + + if non_compliant_kbs: + msg = f"{len(non_compliant_kbs)} KBs without logging: {', '.join(non_compliant_kbs[:5])}" + # Add count indicator if there are more than shown + if len(non_compliant_kbs) > 5: + msg += f" +{len(non_compliant_kbs) - 5} more" + return "NON_COMPLIANT", truncate_annotation(msg) + + return "COMPLIANT", truncate_annotation(f"All {compliant_count} KBs have logging enabled") + + except Exception as e: + LOGGER.error(f"Error evaluating Bedrock KB logging: {str(e)}") + return "ERROR", truncate_annotation(f"Error: {str(e)}") + + +def lambda_handler(event: dict, context: Any) -> None: # noqa: U100 + """Lambda handler. + + Args: + event (dict): Lambda event object + context (Any): Lambda context object + """ + LOGGER.info("Evaluating compliance for AWS Config rule") + LOGGER.info(f"Event: {json.dumps(event)}") + + invoking_event = json.loads(event["invokingEvent"]) + rule_parameters = json.loads(event["ruleParameters"]) if "ruleParameters" in event else {} + + compliance_type, annotation = evaluate_compliance(rule_parameters) + + evaluation = { + "ComplianceResourceType": "AWS::::Account", + "ComplianceResourceId": event["accountId"], + "ComplianceType": compliance_type, + "Annotation": annotation, + "OrderingTimestamp": invoking_event["notificationCreationTime"], + } + + LOGGER.info(f"Compliance evaluation result: {compliance_type}") + LOGGER.info(f"Annotation: {annotation}") + + config_client.put_evaluations(Evaluations=[evaluation], ResultToken=event["resultToken"]) # type: ignore + + LOGGER.info("Compliance evaluation complete.") diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_opensearch_encryption/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_opensearch_encryption/app.py new file mode 100644 index 00000000..e97f7df7 --- /dev/null +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_opensearch_encryption/app.py @@ -0,0 +1,243 @@ +"""Config rule to check OpenSearch vector store encryption for Bedrock Knowledge Base. + +Version: 1.0 + +Config rule for SRA in the repo, https://github.com/aws-samples/aws-security-reference-architecture-examples + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +SPDX-License-Identifier: MIT-0 +""" + +import json +import logging +import os +from typing import Any + +import boto3 +from botocore.exceptions import ClientError + +# Setup Default Logger +LOGGER = logging.getLogger(__name__) +log_level = os.environ.get("LOG_LEVEL", logging.INFO) +LOGGER.setLevel(log_level) +LOGGER.info(f"boto3 version: {boto3.__version__}") + +# Get AWS region from environment variable +AWS_REGION = os.environ.get("AWS_REGION") + +# Initialize AWS clients +bedrock_agent_client = boto3.client("bedrock-agent", region_name=AWS_REGION) +opensearch_client = boto3.client("opensearch", region_name=AWS_REGION) +opensearch_serverless_client = boto3.client("opensearchserverless", region_name=AWS_REGION) +config_client = boto3.client("config", region_name=AWS_REGION) + + +def check_opensearch_serverless(collection_id: str, kb_name: str) -> str | None: # type: ignore # noqa: CFQ004 + """Check OpenSearch Serverless collection encryption. + + Args: + collection_id (str): Collection ID + kb_name (str): Knowledge base name + + Returns: + str | None: Error message if non-compliant, None if compliant + """ + try: + # Get collection details to get the collection name + collection_response = opensearch_serverless_client.batch_get_collection(ids=[collection_id]) + LOGGER.info(f"Collection details: {json.dumps(collection_response, default=str)}") + + if not collection_response.get("collectionDetails"): + LOGGER.error(f"No collection details found for ID {collection_id}") + return f"{kb_name} (no collection)" + + collection_name = collection_response["collectionDetails"][0].get("name") + if not collection_name: + LOGGER.error(f"No collection name found for ID {collection_id}") + return f"{kb_name} (no collection name)" + + # Get the specific policy details using the collection name + policy_details = opensearch_serverless_client.get_security_policy(name=collection_name, type="encryption") + LOGGER.info(f"Policy details for {collection_name}: {json.dumps(policy_details, default=str)}") + + policy_details_dict = json.loads(json.dumps(policy_details, default=str)) + policy_details_dict = policy_details_dict.get("securityPolicyDetail", {}).get("policy", {}) + LOGGER.info(f"Policy details dict (after getting policy): {json.dumps(policy_details_dict, default=str)}") + + if policy_details_dict.get("AWSOwnedKey", False): + LOGGER.info(f"{kb_name} (Using AWS-owned key, not CMK)") + return f"{kb_name} (AWS-owned key)" + + kms_key_arn = policy_details_dict.get("KmsARN", "") + if not kms_key_arn: + LOGGER.info(f"{kb_name} (OpenSearch Serverless not using CMK)") + return f"{kb_name} (no CMK)" + + return None + + except ClientError as e: + LOGGER.error(f"Error checking OpenSearch Serverless collection: {str(e)}") + return f"{kb_name} (error)" + + +def check_opensearch_domain(domain_name: str, kb_name: str) -> str | None: # type: ignore # noqa: CFQ004 + """Check standard OpenSearch domain encryption. + + Args: + domain_name (str): Domain name + kb_name (str): Knowledge base name + + Returns: + str | None: Error message if non-compliant, None if compliant + """ + try: + domain = opensearch_client.describe_domain(DomainName=domain_name) + encryption_config = domain.get("DomainStatus", {}).get("EncryptionAtRestOptions", {}) + if not encryption_config.get("Enabled", False): + return f"{kb_name} (encryption disabled)" + kms_key_id = encryption_config.get("KmsKeyId", "") + if not kms_key_id or "aws/opensearch" in kms_key_id: + return f"{kb_name} (no CMK)" + except ClientError as e: + LOGGER.error(f"Error checking OpenSearch domain: {str(e)}") + return f"{kb_name} (error)" + return None + + +def check_knowledge_base(kb_id: str, kb_name: str) -> tuple[bool, str | None]: # type: ignore # noqa: CFQ004 + """Check a knowledge base's OpenSearch configuration. + + Args: + kb_id (str): Knowledge base ID + kb_name (str): Knowledge base name + + Raises: + ClientError: If there is an error checking the knowledge base + + Returns: + tuple[bool, str | None]: (has_opensearch, error_message) + """ + try: + kb_details = bedrock_agent_client.get_knowledge_base(knowledgeBaseId=kb_id) + # Convert datetime objects to strings before JSON serialization + kb_details_serializable = json.loads(json.dumps(kb_details, default=str)) + LOGGER.info(f"Knowledge base details for {kb_name}: {json.dumps(kb_details_serializable)}") + + # Access the knowledgeBase key from the response + kb_data = kb_details.get("knowledgeBase", {}) + + # Check both possible locations for vector store config + vector_store = kb_data.get("vectorStoreConfiguration") or kb_data.get("storageConfiguration", {}) + LOGGER.info(f"Vector store config for {kb_name}: {json.dumps(vector_store)}") + + if not vector_store or not isinstance(vector_store, dict): + LOGGER.info(f"No vector store configuration found for {kb_name}") + return False, None + + vector_store_type = vector_store.get("vectorStoreType") or vector_store.get("type") + LOGGER.info(f"Vector store type for {kb_name}: {vector_store_type}") + if not vector_store_type or (vector_store_type.upper() != "OPENSEARCH" and vector_store_type.upper() != "OPENSEARCH_SERVERLESS"): + LOGGER.info(f"Vector store type is not OpenSearch for {kb_name}") + return False, None + + opensearch_config = vector_store.get("opensearchServerlessConfiguration") or vector_store.get("opensearchConfiguration") + LOGGER.info(f"OpenSearch config for {kb_name}: {json.dumps(opensearch_config)}") + if not opensearch_config: + return True, f"{kb_name} (missing config)" + + if "collectionArn" in opensearch_config: + collection_id = opensearch_config["collectionArn"].split("/")[-1] + LOGGER.info(f"Found OpenSearch Serverless collection {collection_id} for {kb_name}") + return True, check_opensearch_serverless(collection_id, kb_name) + + domain_endpoint = opensearch_config.get("endpoint", "") + if not domain_endpoint: + return True, f"{kb_name} (no endpoint)" + domain_name = domain_endpoint.split(".")[0] + LOGGER.info(f"Found OpenSearch domain {domain_name} for {kb_name}") + return True, check_opensearch_domain(domain_name, kb_name) + + except ClientError as e: + LOGGER.error(f"Error checking knowledge base {kb_id}: {str(e)}") + if e.response["Error"]["Code"] == "AccessDeniedException": + return True, f"{kb_name} (access denied)" + raise + + +def evaluate_compliance(rule_parameters: dict, request_id: str = "") -> tuple[str, str]: # noqa: U100, CFQ004 + """Evaluate if Bedrock Knowledge Base OpenSearch vector stores are encrypted with KMS CMK. + + Args: + rule_parameters (dict): Rule parameters from AWS Config rule. + request_id (str): Lambda request ID for CloudWatch log reference. + + Returns: + tuple[str, str]: Compliance type and annotation message. + """ + try: + non_compliant_kbs = [] + has_opensearch = False + paginator = bedrock_agent_client.get_paginator("list_knowledge_bases") + + for page in paginator.paginate(): + for kb in page["knowledgeBaseSummaries"]: + kb_id = kb["knowledgeBaseId"] + kb_name = kb.get("name", kb_id) + is_opensearch, error = check_knowledge_base(kb_id, kb_name) + has_opensearch = has_opensearch or is_opensearch + if error: + non_compliant_kbs.append(error) + + if not has_opensearch: + return "COMPLIANT", "No OpenSearch vector stores found" + + if non_compliant_kbs: + message = "KBs without CMK encryption: " + ", ".join(non_compliant_kbs) + # Check if message exceeds the 256-character limit + if len(message) > 256: + LOGGER.info(f"Full message (truncated in annotation): {message}") + return "NON_COMPLIANT", f"Multiple KBs without CMK encryption. See CloudWatch logs ({request_id})" + return "NON_COMPLIANT", message + + return "COMPLIANT", "All KBs properly encrypted with CMK" + + except Exception as e: + LOGGER.error(f"Error evaluating Bedrock Knowledge Base OpenSearch encryption: {str(e)}") + return "INSUFFICIENT_DATA", f"Error: {str(e)[:220]}" + + +def lambda_handler(event: dict, context: Any) -> None: # noqa: U100 + """Lambda handler. + + Args: + event (dict): Lambda event object + context (Any): Lambda context object + """ + LOGGER.info("Evaluating compliance for AWS Config rule") + LOGGER.info(f"Event: {json.dumps(event)}") + LOGGER.info(f"Lambda Request ID: {context.aws_request_id}") + + invoking_event = json.loads(event["invokingEvent"]) + rule_parameters = json.loads(event["ruleParameters"]) if "ruleParameters" in event else {} + + compliance_type, annotation = evaluate_compliance(rule_parameters, context.aws_request_id) + + # Ensure annotation doesn't exceed 256 characters + if len(annotation) > 256: + LOGGER.info(f"Original annotation (truncated): {annotation}") + annotation = annotation[:252] + "..." + + evaluation = { + "ComplianceResourceType": "AWS::::Account", + "ComplianceResourceId": event["accountId"], + "ComplianceType": compliance_type, + "Annotation": annotation, + "OrderingTimestamp": invoking_event["notificationCreationTime"], + } + + LOGGER.info(f"Compliance evaluation result: {compliance_type}") + LOGGER.info(f"Annotation: {annotation}") + + config_client.put_evaluations(Evaluations=[evaluation], ResultToken=event["resultToken"]) # type: ignore + + LOGGER.info("Compliance evaluation complete.") diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_s3_bucket/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_s3_bucket/app.py new file mode 100644 index 00000000..17f2a79c --- /dev/null +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_s3_bucket/app.py @@ -0,0 +1,285 @@ +"""Config rule to check knowledge base S3 bucket configuration for Bedrock environments. + +Version: 1.0 + +Config rule for SRA in the repo, https://github.com/aws-samples/aws-security-reference-architecture-examples + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +SPDX-License-Identifier: MIT-0 +""" + +import json +import logging +import os +from typing import Any, Dict + +import boto3 +from botocore.exceptions import ClientError + +# Setup Default Logger +LOGGER = logging.getLogger(__name__) +log_level = os.environ.get("LOG_LEVEL", logging.INFO) +LOGGER.setLevel(log_level) +LOGGER.info(f"boto3 version: {boto3.__version__}") + +# Get AWS region from environment variable +AWS_REGION = os.environ.get("AWS_REGION") + +# Initialize AWS clients +bedrock_agent_client = boto3.client("bedrock-agent", region_name=AWS_REGION) +s3_client = boto3.client("s3", region_name=AWS_REGION) +config_client = boto3.client("config", region_name=AWS_REGION) + + +def check_retention(bucket_name: str) -> bool: + """Check if bucket has retention configuration. + + Args: + bucket_name (str): Name of the S3 bucket to check + + Returns: + bool: True if bucket has retention configuration, False otherwise + """ + try: + lifecycle = s3_client.get_bucket_lifecycle_configuration(Bucket=bucket_name) + return any(rule.get("Expiration") for rule in lifecycle.get("Rules", [])) + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchLifecycleConfiguration": + return False + if e.response["Error"]["Code"] != "NoSuchBucket": + LOGGER.error(f"Error checking retention for bucket {bucket_name}: {str(e)}") + return False + + +def check_encryption(bucket_name: str) -> bool: + """Check if bucket has encryption configuration. + + Args: + bucket_name (str): Name of the S3 bucket to check + + Returns: + bool: True if bucket has encryption configuration, False otherwise + """ + try: + encryption = s3_client.get_bucket_encryption(Bucket=bucket_name) + return bool(encryption.get("ServerSideEncryptionConfiguration")) + except ClientError as e: + if e.response["Error"]["Code"] != "NoSuchBucket": + return False + return False + + +def check_access_logging(bucket_name: str) -> bool: + """Check if bucket has access logging enabled. + + Args: + bucket_name (str): Name of the S3 bucket to check + + Returns: + bool: True if bucket has access logging enabled, False otherwise + """ + try: + logging_config = s3_client.get_bucket_logging(Bucket=bucket_name) + return bool(logging_config.get("LoggingEnabled")) + except ClientError as e: + if e.response["Error"]["Code"] != "NoSuchBucket": + return False + return False + + +def check_object_locking(bucket_name: str) -> bool: + """Check if bucket has object locking enabled. + + Args: + bucket_name (str): Name of the S3 bucket to check + + Returns: + bool: True if bucket has object locking enabled, False otherwise + """ + try: + lock_config = s3_client.get_object_lock_configuration(Bucket=bucket_name) + return bool(lock_config.get("ObjectLockConfiguration")) + except ClientError as e: + if e.response["Error"]["Code"] != "NoSuchBucket": + return False + return False + + +def check_versioning(bucket_name: str) -> bool: + """Check if bucket has versioning enabled. + + Args: + bucket_name (str): Name of the S3 bucket to check + + Returns: + bool: True if bucket has versioning enabled, False otherwise + """ + try: + versioning = s3_client.get_bucket_versioning(Bucket=bucket_name) + return versioning.get("Status") == "Enabled" + except ClientError as e: + if e.response["Error"]["Code"] != "NoSuchBucket": + return False + return False + + +def check_bucket_configuration(bucket_name: str, rule_parameters: dict) -> list[str]: + """Check S3 bucket configuration against required settings. + + Args: + bucket_name (str): Name of the S3 bucket + rule_parameters (dict): Rule parameters containing check flags + + Returns: + list[str]: List of missing configurations + """ + issues = [] + + if rule_parameters.get("check_retention", "true").lower() == "true" and not check_retention(bucket_name): + issues.append("retention") + if rule_parameters.get("check_encryption", "true").lower() == "true" and not check_encryption(bucket_name): + issues.append("encryption") + if rule_parameters.get("check_access_logging", "true").lower() == "true" and not check_access_logging(bucket_name): + issues.append("access logging") + if rule_parameters.get("check_object_locking", "true").lower() == "true" and not check_object_locking(bucket_name): + issues.append("object locking") + if rule_parameters.get("check_versioning", "true").lower() == "true" and not check_versioning(bucket_name): + issues.append("versioning") + + return issues + + +def get_bucket_name_from_data_source(data_source: Dict[str, Any]) -> str | None: # type: ignore + """Extract bucket name from data source configuration. + + Args: + data_source (Dict[str, Any]): Data source configuration + + Returns: + str | None: Bucket name if found, None otherwise + """ + try: + if ( + "dataSource" in data_source + and "dataSourceConfiguration" in data_source["dataSource"] + and "s3Configuration" in data_source["dataSource"]["dataSourceConfiguration"] + ): + s3_config = data_source["dataSource"]["dataSourceConfiguration"]["s3Configuration"] + bucket_arn = s3_config.get("bucketArn", "") + + if not bucket_arn: + return None + + bucket_name = bucket_arn.split(":")[-1] + return bucket_name.split("/")[0] if "/" in bucket_name else bucket_name + except Exception as e: + LOGGER.error(f"Error processing data source: {str(e)}") + return None + + +def check_knowledge_base(kb_id: str, rule_parameters: dict) -> list[str]: + """Check a knowledge base's data sources for S3 bucket compliance. + + Args: + kb_id (str): Knowledge base ID + rule_parameters (dict): Rule parameters containing check flags + + Returns: + list[str]: List of non-compliant bucket messages + """ + non_compliant_buckets = [] + data_sources_paginator = bedrock_agent_client.get_paginator("list_data_sources") + + for ds_page in data_sources_paginator.paginate(knowledgeBaseId=kb_id): + for ds in ds_page.get("dataSourceSummaries", []): + data_source = bedrock_agent_client.get_data_source(knowledgeBaseId=kb_id, dataSourceId=ds["dataSourceId"]) + + bucket_name = get_bucket_name_from_data_source(data_source) # type: ignore + if not bucket_name: + continue + + issues = check_bucket_configuration(bucket_name, rule_parameters) + if issues: + non_compliant_buckets.append(f"{bucket_name} (missing: {', '.join(issues)})") + + return non_compliant_buckets + + +def evaluate_compliance(rule_parameters: dict) -> tuple[str, str]: + """Evaluate if Bedrock Knowledge Base S3 bucket has required configurations. + + Args: + rule_parameters (dict): Rule parameters from AWS Config rule. + + Returns: + tuple[str, str]: Compliance status and annotation + """ + try: + non_compliant_buckets = [] + paginator = bedrock_agent_client.get_paginator("list_knowledge_bases") + + for page in paginator.paginate(): + for kb in page["knowledgeBaseSummaries"]: + non_compliant_buckets.extend(check_knowledge_base(kb["knowledgeBaseId"], rule_parameters)) + + if non_compliant_buckets: + # Create a shorter message for each bucket by using abbreviations + bucket_msgs = [] + for bucket in non_compliant_buckets: + # Replace longer descriptions with abbreviations + short_msg = bucket.replace("missing: ", "") + short_msg = short_msg.replace("retention", "ret") + short_msg = short_msg.replace("encryption", "enc") + short_msg = short_msg.replace("access logging", "log") + short_msg = short_msg.replace("object locking", "lock") + short_msg = short_msg.replace("versioning", "ver") + bucket_msgs.append(short_msg) + + # Build the annotation message + annotation = f"Non-compliant KB S3 buckets: {'; '.join(bucket_msgs)}" + + # If annotation exceeds limit, truncate and refer to logs + if len(annotation) > 256: + # Log the full message + LOGGER.info(f"Full compliance details: {annotation}") + # Create a truncated message that fits within the limit + count = len(non_compliant_buckets) + annotation = f"{count} non-compliant KB S3 buckets. See CloudWatch logs for details." + + return "NON_COMPLIANT", annotation + return "COMPLIANT", "All KB S3 buckets compliant" + + except Exception as e: + LOGGER.error(f"Error evaluating Knowledge Base S3 bucket configurations: {str(e)}") + return "ERROR", f"Error: {str(e)[:240]}" + + +def lambda_handler(event: dict, context: Any) -> None: # noqa: U100 + """Lambda handler. + + Args: + event (dict): Lambda event object + context (Any): Lambda context object + """ + LOGGER.info("Evaluating compliance for AWS Config rule") + LOGGER.info(f"Event: {json.dumps(event)}") + + invoking_event = json.loads(event["invokingEvent"]) + rule_parameters = json.loads(event["ruleParameters"]) if "ruleParameters" in event else {} + + compliance_type, annotation = evaluate_compliance(rule_parameters) + + evaluation = { + "ComplianceResourceType": "AWS::::Account", + "ComplianceResourceId": event["accountId"], + "ComplianceType": compliance_type, + "Annotation": annotation, + "OrderingTimestamp": invoking_event["notificationCreationTime"], + } + + LOGGER.info(f"Compliance evaluation result: {compliance_type}") + LOGGER.info(f"Annotation: {annotation}") + + config_client.put_evaluations(Evaluations=[evaluation], ResultToken=event["resultToken"]) # type: ignore + + LOGGER.info("Compliance evaluation complete.") diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_vector_store_secret/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_vector_store_secret/app.py new file mode 100644 index 00000000..6c8ef440 --- /dev/null +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_kb_vector_store_secret/app.py @@ -0,0 +1,221 @@ +"""Config rule to check knowledge base vector store secret configuration for Bedrock environments. + +Version: 1.0 + +Config rule for SRA in the repo, https://github.com/aws-samples/aws-security-reference-architecture-examples + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +SPDX-License-Identifier: MIT-0 +""" + +import json +import logging +import os +from typing import Any, List, Optional + +import boto3 +from botocore.exceptions import ClientError + +# Setup Default Logger +LOGGER = logging.getLogger(__name__) +log_level = os.environ.get("LOG_LEVEL", logging.INFO) +LOGGER.setLevel(log_level) +LOGGER.info(f"boto3 version: {boto3.__version__}") + +# Get AWS region from environment variable +AWS_REGION = os.environ.get("AWS_REGION") + +# Initialize AWS clients +bedrock_agent_client = boto3.client("bedrock-agent", region_name=AWS_REGION) +secretsmanager_client = boto3.client("secretsmanager", region_name=AWS_REGION) +config_client = boto3.client("config", region_name=AWS_REGION) + +# Maximum annotation length for AWS Config PutEvaluations API +MAX_ANNOTATION_LENGTH = 256 + + +def check_knowledge_base(kb_id: str, kb_name: str) -> tuple[bool, str]: # noqa: CFQ004 + """Check if a knowledge base's vector store is using AWS Secrets Manager for credentials. + + Args: + kb_id (str): Knowledge base ID + kb_name (str): Knowledge base name + + Raises: + ClientError: If there is an error accessing the knowledge base or secret. + + Returns: + tuple[bool, str]: (is_compliant, message) + """ + try: + kb_details = bedrock_agent_client.get_knowledge_base(knowledgeBaseId=kb_id) + LOGGER.info(f"KB Details: {json.dumps(kb_details, default=str)}") + + # Get the knowledge base object from the response + kb = kb_details.get("knowledgeBase", {}) + storage_config = kb.get("storageConfiguration") + LOGGER.info(f"Storage config from kb: {json.dumps(storage_config, default=str)}") + + if not storage_config or not isinstance(storage_config, dict): + return False, f"{kb_name} (No vector config)" + + storage_type = storage_config.get("type") + LOGGER.info(f"Storage type: {storage_type}") + if not storage_type: + return False, f"{kb_name} (No store type)" + + # Check if storage type is one of the supported types + supported_types = { + "PINECONE": "pineconeConfiguration", + "MONGO_DB_ATLAS": "mongoDbAtlasConfiguration", + "REDIS_ENTERPRISE_CLOUD": "redisEnterpriseCloudConfiguration", + "RDS": "rdsConfiguration", + } + + # If storage type is not supported, it's compliant (no credentials needed) + if storage_type not in supported_types: + LOGGER.info(f"Storage type {storage_type} not supported - no credentials needed") + return True, f"{kb_name} ({storage_type} - no creds needed)" + + # Get the configuration block for the storage type + config_key = supported_types[storage_type] + LOGGER.info(f"Config key: {config_key}") + type_config = storage_config.get(config_key) + LOGGER.info(f"Type config: {type_config}") + + if not type_config or not isinstance(type_config, dict): + return False, f"{kb_name} (Missing {storage_type} config)" + + # Check for credentials secret ARN + secret_arn = type_config.get("credentialsSecretArn") + LOGGER.info(f"Secret ARN: {secret_arn}") + if not secret_arn: + return False, f"{kb_name} (Missing secret)" + + try: + # Verify the secret exists and is using KMS encryption + secret_details = secretsmanager_client.describe_secret(SecretId=secret_arn) + LOGGER.info(f"Secret details: {secret_details}") + if not secret_details.get("KmsKeyId"): + return False, f"{kb_name} (Secret not using CMK)" + return True, f"{kb_name} (Uses CMK)" + except ClientError as e: + if e.response["Error"]["Code"] == "AccessDeniedException": + return False, f"{kb_name} (Secret access denied)" + raise + except ClientError as e: + if e.response["Error"]["Code"] == "AccessDeniedException": + return False, f"{kb_name} (KB access denied)" + raise + + +def format_annotation(compliance_type: str, compliant_kbs: List[str], non_compliant_kbs: Optional[List[str]] = None) -> str: + """Format annotation message and ensure it doesn't exceed 256 characters. + + Args: + compliance_type (str): Compliance status + compliant_kbs (List[str]): List of compliant knowledge bases + non_compliant_kbs (Optional[List[str]]): List of non-compliant knowledge bases + + Returns: + str: Formatted annotation message + """ + if compliance_type == "ERROR": + return compliant_kbs[0] # In this case, compliant_kbs contains the error message + + non_compliant_count = len(non_compliant_kbs) if non_compliant_kbs else 0 + message = "" + + # Start with a brief message + if compliance_type == "NON_COMPLIANT": + base_message = "KB vector store check: " + if non_compliant_kbs: + combined = "; ".join(non_compliant_kbs) + # If message would be too long, provide a count instead of details + if len(base_message + combined) > MAX_ANNOTATION_LENGTH: + message = f"{base_message}{non_compliant_count} non-compliant KBs. See logs for details." + else: + message = base_message + combined + else: # COMPLIANT + if len(compliant_kbs) > 3: + message = f"All {len(compliant_kbs)} KBs comply with vector store requirements." + elif len("; ".join(compliant_kbs)) > MAX_ANNOTATION_LENGTH: + message = f"{len(compliant_kbs)} KBs comply with vector store requirements." + else: + message = "; ".join(compliant_kbs) + + # Final check to ensure we don't exceed limit + if len(message) > MAX_ANNOTATION_LENGTH: + return f"See CloudWatch logs for details. Found {non_compliant_count} issues." + + return message + + +def evaluate_compliance(rule_parameters: dict) -> tuple[str, str]: # noqa: U100 + """Evaluate if Bedrock Knowledge Base vector stores are using KMS encrypted secrets. + + Args: + rule_parameters (dict): Rule parameters from AWS Config rule. + + Returns: + tuple[str, str]: Compliance type and annotation message. + """ + try: + non_compliant_kbs = [] + compliant_kbs = [] + paginator = bedrock_agent_client.get_paginator("list_knowledge_bases") + + for page in paginator.paginate(): + for kb in page["knowledgeBaseSummaries"]: + kb_id = kb["knowledgeBaseId"] + LOGGER.info(f"KB ID: {kb_id}") + kb_name = kb.get("name", kb_id) + LOGGER.info(f"KB Name: {kb_name}") + is_compliant, message = check_knowledge_base(kb_id, kb_name) + if is_compliant: + compliant_kbs.append(message) + else: + non_compliant_kbs.append(message) + + # Log full details for CloudWatch reference + LOGGER.info(f"Compliant KBs: {'; '.join(compliant_kbs)}") + if non_compliant_kbs: + LOGGER.info(f"Non-compliant KBs: {'; '.join(non_compliant_kbs)}") + return "NON_COMPLIANT", format_annotation("NON_COMPLIANT", compliant_kbs, non_compliant_kbs) + return "COMPLIANT", format_annotation("COMPLIANT", compliant_kbs) + + except Exception as e: + error_msg = f"Error evaluating compliance: {str(e)}" + LOGGER.error(error_msg) + return "ERROR", format_annotation("ERROR", [error_msg]) + + +def lambda_handler(event: dict, context: Any) -> None: # noqa: U100 + """Lambda handler. + + Args: + event (dict): Lambda event object + context (Any): Lambda context object + """ + LOGGER.info("Evaluating compliance for AWS Config rule") + LOGGER.info(f"Event: {json.dumps(event)}") + + invoking_event = json.loads(event["invokingEvent"]) + rule_parameters = json.loads(event["ruleParameters"]) if "ruleParameters" in event else {} + + compliance_type, annotation = evaluate_compliance(rule_parameters) + + evaluation = { + "ComplianceResourceType": "AWS::::Account", + "ComplianceResourceId": event["accountId"], + "ComplianceType": compliance_type, + "Annotation": annotation, + "OrderingTimestamp": invoking_event["notificationCreationTime"], + } + + LOGGER.info(f"Compliance evaluation result: {compliance_type}") + LOGGER.info(f"Annotation: {annotation}") + + config_client.put_evaluations(Evaluations=[evaluation], ResultToken=event["resultToken"]) # type: ignore + + LOGGER.info("Compliance evaluation complete.") diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_s3_endpoints/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_s3_endpoints/app.py index 7eb7df0f..9d296510 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_s3_endpoints/app.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_s3_endpoints/app.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import json import logging import os diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_vpc_endpoints/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_vpc_endpoints/app.py index 234ba9dd..aca1c0dd 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_vpc_endpoints/app.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/rules/sra_bedrock_check_vpc_endpoints/app.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import json import logging import os diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/app.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/app.py index 17d2edc1..f0f0e893 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/app.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/app.py @@ -8,6 +8,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import copy import json import logging @@ -200,6 +201,18 @@ def load_sra_cloudwatch_dashboard() -> dict: + r'\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"filter_params"\s*:\s*\{"log_group_name"\s*:\s*"[^"\s]+",\s*"input_path"\s*:\s*"[^"\s]+"\}\}$', "SRA-BEDROCK-CENTRAL-OBSERVABILITY": r'^\{"deploy"\s*:\s*"(true|false)",\s*"bedrock_accounts"\s*:\s*' + r'\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*"regions"\s*:\s*\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\]\}$', + "SRA-BEDROCK-CHECK-KB-LOGGING": r'^\{"deploy"\s*:\s*"(true|false)",\s*"accounts"\s*:\s*\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*"regions"\s*:\s*' + + r'\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"input_params"\s*:\s*(\{\})\}$', + "SRA-BEDROCK-CHECK-KB-INGESTION-ENCRYPTION": r'^\{"deploy"\s*:\s*"(true|false)",\s*"accounts"\s*:\s*\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*' + + r'"regions"\s*:\s*\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"input_params"\s*:\s*(\{\})\}$', + "SRA-BEDROCK-CHECK-KB-S3-BUCKET": r'^\{"deploy"\s*:\s*"(true|false)",\s*"accounts"\s*:\s*\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*"regions"\s*:\s*' + + r'\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"input_params"\s*:\s*\{(\s*"check_retention"\s*:\s*"(true|false)")?(\s*,\s*"check_encryption"\s*:\s*' + + r'"(true|false)")?(\s*,\s*"check_access_logging"\s*:\s*"(true|false)")?(\s*,\s*"check_object_locking"\s*:\s*"(true|false)")?(\s*,\s*' + + r'"check_versioning"\s*:\s*"(true|false)")?\s*\}\}$', + "SRA-BEDROCK-CHECK-KB-VECTOR-STORE-SECRET": r'^\{"deploy"\s*:\s*"(true|false)",\s*"accounts"\s*:\s*\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*' + + r'"regions"\s*:\s*\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"input_params"\s*:\s*(\{\})\}$', + "SRA-BEDROCK-CHECK-KB-OPENSEARCH-ENCRYPTION": r'^\{"deploy"\s*:\s*"(true|false)",\s*"accounts"\s*:\s*\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*' + + r'"regions"\s*:\s*\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"input_params"\s*:\s*(\{\})\}$', } # Instantiate sra class objects @@ -806,6 +819,9 @@ def deploy_config_rules(region: str, accounts: list, resource_properties: dict) if rule_deploy is False: LOGGER.info(f"{rule_name} is not to be deployed. Checking to see if it needs to be removed...") + if acct not in rule_accounts: + LOGGER.info(f"{rule_name} does not apply to {acct}; skipping attempt to delete...") + continue delete_custom_config_rule(rule_name, acct, region) delete_custom_config_iam_role(rule_name, acct) continue @@ -1439,13 +1455,17 @@ def create_event(event: dict, context: Any) -> str: create_sns_messages(accounts, regions, topic_arn, event["ResourceProperties"], "configure") LOGGER.info(f"CFN_RESPONSE_DATA POST create_sns_messages: {CFN_RESPONSE_DATA}") - # 5) Central CloudWatch Observability (regional) - deploy_central_cloudwatch_observability(event) - LOGGER.info(f"CFN_RESPONSE_DATA POST deploy_central_cloudwatch_observability: {CFN_RESPONSE_DATA}") - - # 6) Cloudwatch dashboard in security account (home region, security account) - deploy_cloudwatch_dashboard(event) - LOGGER.info(f"CFN_RESPONSE_DATA POST deploy_cloudwatch_dashboard: {CFN_RESPONSE_DATA}") + central_observability_params = json.loads(event["ResourceProperties"]["SRA-BEDROCK-CENTRAL-OBSERVABILITY"]) + if central_observability_params["deploy"] == "true": + # 5) Central CloudWatch Observability (regional) + deploy_central_cloudwatch_observability(event) + LOGGER.info(f"CFN_RESPONSE_DATA POST deploy_central_cloudwatch_observability: {CFN_RESPONSE_DATA}") + + # 6) Cloudwatch dashboard in security account (home region, security account) + deploy_cloudwatch_dashboard(event) + LOGGER.info(f"CFN_RESPONSE_DATA POST deploy_cloudwatch_dashboard: {CFN_RESPONSE_DATA}") + else: + LOGGER.info("CloudWatch observability deploy set to false, skipping deployment...") # End if DRY_RUN is False: @@ -1864,8 +1884,8 @@ def delete_event(event: dict, context: Any) -> None: # noqa: CFQ001, CCR001, C9 for region in regions: delete_custom_config_rule(rule_name, acct, region) - # 5, 6, & 7) Detach IAM policies, delete IAM policy, delete IAM execution role for custom config rule lambda - delete_custom_config_iam_role(rule_name, acct) + # 5, 6, & 7) Detach IAM policies, delete IAM policy, delete IAM execution role for custom config rule lambda + delete_custom_config_iam_role(rule_name, acct) # Must infer the execution role arn because the function is being reported as non-existent at this point execution_role_arn = f"arn:aws:iam::{sts.MANAGEMENT_ACCOUNT}:role/{SOLUTION_NAME}-lambda" LOGGER.info(f"Removing state table record for lambda IAM execution role: {execution_role_arn}") diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/cfnresponse.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/cfnresponse.py index 60d40e54..9173c3c3 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/cfnresponse.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/cfnresponse.py @@ -1,4 +1,5 @@ """Amazon CFNResponse Module.""" + # mypy: ignore-errors # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_config_lambda_iam_permissions.json b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_config_lambda_iam_permissions.json index de11d6ac..f055d5b6 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_config_lambda_iam_permissions.json +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_config_lambda_iam_permissions.json @@ -123,5 +123,129 @@ "Resource": "*" } ] + }, + "sra-bedrock-check-kb-logging": { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowKnowledgeBaseAccess", + "Effect": "Allow", + "Action": [ + "bedrock:ListKnowledgeBases", + "bedrock:GetKnowledgeBase" + ], + "Resource": "*" + }, + { + "Sid": "AllowDescribeDeliverySourcesAndDestinations", + "Effect": "Allow", + "Action": [ + "logs:DescribeDeliverySources", + "logs:DescribeDeliveryDestinations", + "logs:DescribeDeliveries", + "logs:GetDelivery" + ], + "Resource": "*" + } + ] + }, + "sra-bedrock-check-kb-ingestion-encryption": { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowKnowledgeBaseAccess", + "Effect": "Allow", + "Action": [ + "bedrock:ListKnowledgeBases", + "bedrock:GetKnowledgeBase", + "bedrock:ListDataSources", + "bedrock:GetDataSource" + ], + "Resource": "*" + } + ] + }, + "sra-bedrock-check-kb-s3-bucket": { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowKnowledgeBaseAccess", + "Effect": "Allow", + "Action": [ + "bedrock:ListKnowledgeBases", + "bedrock:GetKnowledgeBase", + "bedrock:ListDataSources", + "bedrock:GetDataSource" + ], + "Resource": "*" + }, + { + "Sid": "AllowS3BucketAccess", + "Effect": "Allow", + "Action": [ + "s3:GetBucketObjectLockConfiguration", + "s3:GetLifecycleConfiguration", + "s3:GetEncryptionConfiguration", + "s3:GetBucketLogging", + "s3:GetBucketVersioning" + ], + "Resource": "arn:aws:s3:::*" + } + ] + }, + "sra-bedrock-check-kb-vector-store-secret": { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowKnowledgeBaseAccess", + "Effect": "Allow", + "Action": [ + "bedrock:ListKnowledgeBases", + "bedrock:GetKnowledgeBase" + ], + "Resource": "*" + }, + { + "Sid": "AllowSecretsManagerAccess", + "Effect": "Allow", + "Action": [ + "secretsmanager:DescribeSecret" + ], + "Resource": "*" + } + ] + }, + "sra-bedrock-check-kb-opensearch-encryption": { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowKnowledgeBaseAccess", + "Effect": "Allow", + "Action": [ + "bedrock:ListKnowledgeBases", + "bedrock:GetKnowledgeBase" + ], + "Resource": "*" + }, + { + "Sid": "AllowOpenSearchAccess", + "Effect": "Allow", + "Action": [ + "es:DescribeDomain" + ], + "Resource": "*" + }, + { + "Sid": "AllowOpenSearchServerlessAccess", + "Effect": "Allow", + "Action": [ + "aoss:GetSecurityPolicy", + "aoss:ListSecurityPolicies", + "aoss:BatchGetCollection", + "aoss:BatchGetCollection" + ], + "Resource": "*" + } + ] } } diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_iam.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_iam.py index e58d2349..97f9a385 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_iam.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_iam.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + from __future__ import annotations import json @@ -113,6 +114,8 @@ def create_role(self, role_name: str, trust_policy: dict, solution_name: str) -> except ClientError as error: if error.response["Error"]["Code"] == "EntityAlreadyExists": self.LOGGER.info(f"{role_name} role already exists!") + response = self.IAM_CLIENT.get_role(RoleName=role_name) + return {"Role": {"Arn": response["Role"]["Arn"]}} return {"Role": {"Arn": "error"}} def create_policy(self, policy_name: str, policy_document: dict, solution_name: str) -> dict: @@ -158,18 +161,32 @@ def detach_policy(self, role_name: str, policy_arn: str) -> EmptyResponseMetadat role_name: Name of the role for which the policy is removed from policy_arn: The Amazon Resource Name (ARN) of the policy to be detached + Raises: + ValueError: If an unexpected error occurs during the operation. + Returns: Empty response metadata """ self.LOGGER.info("Detaching policy from %s.", role_name) - return self.IAM_CLIENT.detach_role_policy(RoleName=role_name, PolicyArn=policy_arn) + try: + response = self.IAM_CLIENT.detach_role_policy(RoleName=role_name, PolicyArn=policy_arn) + except ClientError as error: + if error.response["Error"]["Code"] == "NoSuchEntity": + self.LOGGER.info(f"Policy '{policy_arn}' is not attached to role '{role_name}'.") + else: + self.LOGGER.error(f"Error detaching policy '{policy_arn}' from role '{role_name}': {error}") + raise ValueError(f"Error detaching policy '{policy_arn}' from role '{role_name}': {error}") from None + return response - def delete_policy(self, policy_arn: str) -> EmptyResponseMetadataTypeDef: + def delete_policy(self, policy_arn: str) -> EmptyResponseMetadataTypeDef: # noqa: CCR001 """Delete IAM Policy. Args: policy_arn: The Amazon Resource Name (ARN) of the policy to be deleted + Raises: + ValueError: If an unexpected error occurs during the operation. + Returns: Empty response metadata """ @@ -181,10 +198,25 @@ def delete_policy(self, policy_arn: str) -> EmptyResponseMetadataTypeDef: for version in page["Versions"]: if not version["IsDefaultVersion"]: self.LOGGER.info(f"Deleting policy version {version['VersionId']}") - self.IAM_CLIENT.delete_policy_version(PolicyArn=policy_arn, VersionId=version["VersionId"]) - sleep(1) - self.LOGGER.info("Policy version deleted.") - return self.IAM_CLIENT.delete_policy(PolicyArn=policy_arn) + try: + self.IAM_CLIENT.delete_policy_version(PolicyArn=policy_arn, VersionId=version["VersionId"]) + sleep(1) + self.LOGGER.info("Policy version deleted.") + except ClientError as error: + if error.response["Error"]["Code"] == "NoSuchEntity": + self.LOGGER.info(f"Policy version {version['VersionId']} not found.") + else: + self.LOGGER.error(f"Error deleting policy version {version['VersionId']}: {error}") + raise ValueError(f"Error deleting policy version {version['VersionId']}: {error}") from None + try: + response = self.IAM_CLIENT.delete_policy(PolicyArn=policy_arn) + except ClientError as error: + if error.response["Error"]["Code"] == "NoSuchEntity": + self.LOGGER.info(f"Policy {policy_arn} not found.") + else: + self.LOGGER.error(f"Error deleting policy {policy_arn}: {error}") + raise ValueError(f"Error deleting policy {policy_arn}: {error}") from None + return response def delete_role(self, role_name: str) -> EmptyResponseMetadataTypeDef: """Delete IAM role. diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_kms.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_kms.py index 0322a2bc..d86cfdae 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_kms.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_kms.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + from __future__ import annotations import logging @@ -70,7 +71,13 @@ def create_kms_key(self, kms_client: KMSClient, key_policy: str, description: st KeyUsage="ENCRYPT_DECRYPT", CustomerMasterKeySpec="SYMMETRIC_DEFAULT", ) - return key_response["KeyMetadata"]["KeyId"] + key_id = key_response["KeyMetadata"]["KeyId"] + + # Enable key rotation + self.LOGGER.info(f"Enabling key rotation for key: {key_id}") + kms_client.enable_key_rotation(KeyId=key_id) + + return key_id def create_alias(self, kms_client: KMSClient, alias_name: str, target_key_id: str) -> None: """Create KMS alias. diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_lambda.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_lambda.py index eba755e0..3f04e522 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_lambda.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_lambda.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + from __future__ import annotations import logging diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_repo.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_repo.py index 3e308f38..36807b12 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_repo.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_repo.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import logging import os import shutil diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_s3.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_s3.py index 1bb11f38..6a419999 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_s3.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_s3.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import json import logging import os diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_sns.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_sns.py index 1cb2f99a..56bf4250 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_sns.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_sns.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + from __future__ import annotations import json diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_ssm_params.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_ssm_params.py index 5e87ad8f..efd7abf1 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_ssm_params.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_ssm_params.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + from __future__ import annotations import logging diff --git a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_sts.py b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_sts.py index f3695902..1db7e439 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_sts.py +++ b/aws_sra_examples/solutions/genai/bedrock_org/lambda/src/sra_sts.py @@ -7,6 +7,7 @@ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ + import logging import os from typing import Any diff --git a/aws_sra_examples/solutions/genai/bedrock_org/templates/sra-bedrock-org-main.yaml b/aws_sra_examples/solutions/genai/bedrock_org/templates/sra-bedrock-org-main.yaml index 1af2e701..33417d04 100644 --- a/aws_sra_examples/solutions/genai/bedrock_org/templates/sra-bedrock-org-main.yaml +++ b/aws_sra_examples/solutions/genai/bedrock_org/templates/sra-bedrock-org-main.yaml @@ -256,7 +256,6 @@ Parameters: or for titan: {"deploy": "true", "filter_params": {"log_group_name": "model-invocation-log-group", "input_path": "input.inputBodyJson.inputText"}} NOTE: input_path is based on the base model used such as clause or titan; check the invocation log InvokeModel messages for details - pBedrockCentralObservabilityParams: Type: String Default: '{"deploy": "true", "bedrock_accounts": ["444455556666"], "regions": ["us-west-2"]}' @@ -282,6 +281,64 @@ Parameters: ConstraintDescription: > Must be a valid JSON string containing an array of region names. Example: ["us-east-1", "us-west-2"] + pBedrockKBLoggingRuleParams: + Type: String + Default: '{"deploy": "true", "accounts": ["444455556666"], "regions": ["us-west-2"], "input_params": {}}' + Description: Bedrock Knowledge Base Logging Config Rule Parameters + AllowedPattern: ^\{"deploy"\s*:\s*"(true|false)",\s*"accounts"\s*:\s*\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*"regions"\s*:\s*\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"input_params"\s*:\s*(\{\})\}$ + ConstraintDescription: + "Must be a valid JSON string containing: 'deploy' (true/false), 'accounts' (array of account numbers), + 'regions' (array of region names), and 'input_params' object/dict (input params must be empty). Arrays can be empty. + Example: {\"deploy\": \"true\", \"accounts\": [\"123456789012\"], \"regions\": [\"us-east-1\"], \"input_params\": {}} or + {\"deploy\": \"false\", \"accounts\": [], \"regions\": [], \"input_params\": {}}" + + pBedrockKBIngestionEncryptionRuleParams: + Type: String + Default: '{"deploy": "true", "accounts": ["444455556666"], "regions": ["us-west-2"], "input_params": {}}' + Description: Bedrock Knowledge Base Data Ingestion Encryption Config Rule Parameters + AllowedPattern: ^\{"deploy"\s*:\s*"(true|false)",\s*"accounts"\s*:\s*\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*"regions"\s*:\s*\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"input_params"\s*:\s*(\{\})\}$ + ConstraintDescription: + "Must be a valid JSON string containing: 'deploy' (true/false), 'accounts' (array of account numbers), + 'regions' (array of region names), and 'input_params' object/dict (input params must be empty). Arrays can be empty. + Example: {\"deploy\": \"true\", \"accounts\": [\"123456789012\"], \"regions\": [\"us-east-1\"], \"input_params\": {}} or + {\"deploy\": \"false\", \"accounts\": [], \"regions\": [], \"input_params\": {}}" + + pBedrockKBS3BucketRuleParams: + Type: String + Default: '{"deploy": "true", "accounts": ["444455556666"], "regions": ["us-west-2"], "input_params": {"check_retention": "true", "check_encryption": "true", "check_access_logging": "true", "check_object_locking": "true", "check_versioning": "true"}}' + Description: Bedrock Knowledge Base S3 Bucket Config Rule Parameters + AllowedPattern: ^\{"deploy"\s*:\s*"(true|false)",\s*"accounts"\s*:\s*\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*"regions"\s*:\s*\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"input_params"\s*:\s*\{(\s*"check_retention"\s*:\s*"(true|false)")?(\s*,\s*"check_encryption"\s*:\s*"(true|false)")?(\s*,\s*"check_access_logging"\s*:\s*"(true|false)")?(\s*,\s*"check_object_locking"\s*:\s*"(true|false)")?(\s*,\s*"check_versioning"\s*:\s*"(true|false)")?\s*\}\}$ + ConstraintDescription: > + Must be a valid JSON string containing: 'deploy' (true/false), 'accounts' (array of account numbers), + 'regions' (array of region names), and 'input_params' object with optional parameters: + 'check_retention', 'check_encryption', 'check_access_logging', 'check_object_locking', 'check_versioning'. + Each parameter in 'input_params' should be either "true" or "false". + Arrays can be empty. + Example: {"deploy": "true", "accounts": ["123456789012"], "regions": ["us-east-1"], "input_params": {"check_retention": "true", "check_encryption": "true", "check_access_logging": "true", "check_object_locking": "true", "check_versioning": "true"}} or + {"deploy": "false", "accounts": [], "regions": [], "input_params": {}} + + pBedrockKBVectorStoreSecretRuleParams: + Type: String + Default: '{"deploy": "true", "accounts": ["444455556666"], "regions": ["us-west-2"], "input_params": {}}' + Description: Bedrock Knowledge Base Vector Store Secret Config Rule Parameters + AllowedPattern: ^\{"deploy"\s*:\s*"(true|false)",\s*"accounts"\s*:\s*\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*"regions"\s*:\s*\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"input_params"\s*:\s*(\{\})\}$ + ConstraintDescription: + "Must be a valid JSON string containing: 'deploy' (true/false), 'accounts' (array of account numbers), + 'regions' (array of region names), and 'input_params' object/dict (input params must be empty). Arrays can be empty. + Example: {\"deploy\": \"true\", \"accounts\": [\"123456789012\"], \"regions\": [\"us-east-1\"], \"input_params\": {}} or + {\"deploy\": \"false\", \"accounts\": [], \"regions\": [], \"input_params\": {}}" + + pBedrockKBOpenSearchEncryptionRuleParams: + Type: String + Default: '{"deploy": "true", "accounts": ["444455556666"], "regions": ["us-west-2"], "input_params": {}}' + Description: Bedrock Knowledge Base OpenSearch Encryption Config Rule Parameters + AllowedPattern: ^\{"deploy"\s*:\s*"(true|false)",\s*"accounts"\s*:\s*\[((?:"[0-9]+"(?:\s*,\s*)?)*)\],\s*"regions"\s*:\s*\[((?:"[a-z0-9-]+"(?:\s*,\s*)?)*)\],\s*"input_params"\s*:\s*(\{\})\}$ + ConstraintDescription: + "Must be a valid JSON string containing: 'deploy' (true/false), 'accounts' (array of account numbers), + 'regions' (array of region names), and 'input_params' object/dict (input params must be empty). Arrays can be empty. + Example: {\"deploy\": \"true\", \"accounts\": [\"123456789012\"], \"regions\": [\"us-east-1\"], \"input_params\": {}} or + {\"deploy\": \"false\", \"accounts\": [], \"regions\": [], \"input_params\": {}}" + Metadata: AWS::CloudFormation::Interface: ParameterGroups: @@ -322,6 +379,11 @@ Metadata: - pBedrockCWEndpointsRuleParams - pBedrockS3EndpointsRuleParams - pBedrockGuardrailEncryptionRuleParams + - pBedrockKBLoggingRuleParams + - pBedrockKBIngestionEncryptionRuleParams + - pBedrockKBS3BucketRuleParams + - pBedrockKBVectorStoreSecretRuleParams + - pBedrockKBOpenSearchEncryptionRuleParams - Label: default: Bedrock CloudWatch Metric Filters Parameters: @@ -389,6 +451,16 @@ Metadata: default: Bedrock Accounts pBedrockRegions: default: Bedrock Regions + pBedrockKBLoggingRuleParams: + default: Bedrock Knowledge Base Logging Config Rule Parameters + pBedrockKBIngestionEncryptionRuleParams: + default: Bedrock Knowledge Base Data Ingestion Encryption Config Rule Parameters + pBedrockKBS3BucketRuleParams: + default: Bedrock Knowledge Base S3 Bucket Config Rule Parameters + pBedrockKBVectorStoreSecretRuleParams: + default: Bedrock Knowledge Base Vector Store Secret Config Rule Parameters + pBedrockKBOpenSearchEncryptionRuleParams: + default: Bedrock Knowledge Base OpenSearch Encryption Config Rule Parameters Resources: rBedrockOrgLambdaRole: @@ -669,6 +741,11 @@ Resources: SRA-BEDROCK-FILTER-PROMPT-INJECTION: !Ref pBedrockPromptInjectionFilterParams SRA-BEDROCK-FILTER-SENSITIVE-INFO: !Ref pBedrockSensitiveInfoFilterParams SRA-BEDROCK-CENTRAL-OBSERVABILITY: !Ref pBedrockCentralObservabilityParams + SRA-BEDROCK-CHECK-KB-LOGGING: !Ref pBedrockKBLoggingRuleParams + SRA-BEDROCK-CHECK-KB-INGESTION-ENCRYPTION: !Ref pBedrockKBIngestionEncryptionRuleParams + SRA-BEDROCK-CHECK-KB-S3-BUCKET: !Ref pBedrockKBS3BucketRuleParams + SRA-BEDROCK-CHECK-KB-VECTOR-STORE-SECRET: !Ref pBedrockKBVectorStoreSecretRuleParams + SRA-BEDROCK-CHECK-KB-OPENSEARCH-ENCRYPTION: !Ref pBedrockKBOpenSearchEncryptionRuleParams rBedrockOrgLambdaInvokePermission: Type: AWS::Lambda::Permission diff --git a/pyproject.toml b/pyproject.toml index f8e9d4d0..ff7f5162 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -123,7 +123,16 @@ reportGeneralTypeIssues = "none" reportTypedDictNotRequiredAccess = "none" [tool.pylic] -safe_licenses = ["MIT License", "BSD License", "Apache Software License"] +safe_licenses = [ + "MIT", + "BSD-2-Clause", + "Apache-2.0", + "MIT License", + "BSD License", + "Apache Software License", + "Python Software Foundation License", + "ISC License (ISCL)" +] [tool.vulture] ignore_decorators = ["@helper.*"]