Skip to content

Commit 6929ad5

Browse files
committed
Add document understanding examples for text generation models
1 parent 4d2f67c commit 6929ad5

File tree

8 files changed

+415
-7
lines changed

8 files changed

+415
-7
lines changed

.doc_gen/metadata/bedrock-runtime_metadata.yaml

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1522,6 +1522,109 @@ bedrock-runtime_InvokeModelWithResponseStream_TitanTextEmbeddings:
15221522
services:
15231523
bedrock-runtime: {InvokeModel}
15241524

1525+
# Document understanding
1526+
bedrock-runtime_DocumentUnderstanding_AmazonNova:
1527+
title: Send and process a document with Amazon Nova on &BR;
1528+
title_abbrev: "Document understanding"
1529+
synopsis: send and process a document with Amazon Nova on &BR;
1530+
category: Amazon Nova
1531+
languages:
1532+
Python:
1533+
versions:
1534+
- sdk_version: 3
1535+
github: python/example_code/bedrock-runtime
1536+
excerpts:
1537+
- description: Send and process a document with Amazon Nova on &BR;
1538+
snippet_tags:
1539+
- python.example_code.bedrock-runtime.DocumentUnderstanding_AmazonNovaText
1540+
services:
1541+
bedrock-runtime: {Converse}
1542+
1543+
bedrock-runtime_DocumentUnderstanding_AnthropicClaude:
1544+
title: Send and process a document with Anthropic Claude on &BR;
1545+
title_abbrev: "Document understanding"
1546+
synopsis: send and process a document with Anthropic Claude on &BR;
1547+
category: Anthropic Claude
1548+
languages:
1549+
Python:
1550+
versions:
1551+
- sdk_version: 3
1552+
github: python/example_code/bedrock-runtime
1553+
excerpts:
1554+
- description: Send and process a document with Anthropic Claude on &BR;
1555+
snippet_tags:
1556+
- python.example_code.bedrock-runtime.DocumentUnderstanding_AnthropicClaude
1557+
services:
1558+
bedrock-runtime: {Converse}
1559+
1560+
bedrock-runtime_DocumentUnderstanding_CohereCommand:
1561+
title: Send and process a document with Cohere Command models on &BR;
1562+
title_abbrev: "Document understanding"
1563+
synopsis: send and process a document with Cohere Command models on &BR;
1564+
category: Cohere Command
1565+
languages:
1566+
Python:
1567+
versions:
1568+
- sdk_version: 3
1569+
github: python/example_code/bedrock-runtime
1570+
excerpts:
1571+
- description: Send and process a document with Cohere Command models on &BR;
1572+
snippet_tags:
1573+
- python.example_code.bedrock-runtime.DocumentUnderstanding_CohereCommand
1574+
services:
1575+
bedrock-runtime: {Converse}
1576+
1577+
bedrock-runtime_DocumentUnderstanding_DeepSeek:
1578+
title: Send and process a document with DeepSeek on &BR;
1579+
title_abbrev: "Document understanding"
1580+
synopsis: send and process a document with DeepSeek on &BR;
1581+
category: DeepSeek
1582+
languages:
1583+
Python:
1584+
versions:
1585+
- sdk_version: 3
1586+
github: python/example_code/bedrock-runtime
1587+
excerpts:
1588+
- description: Send and process a document with DeepSeek on &BR;
1589+
snippet_tags:
1590+
- python.example_code.bedrock-runtime.DocumentUnderstanding_DeepSeek
1591+
services:
1592+
bedrock-runtime: {Converse}
1593+
1594+
bedrock-runtime_DocumentUnderstanding_MetaLlama:
1595+
title: Send and process a document with Llama on &BR;
1596+
title_abbrev: "Document understanding"
1597+
synopsis: send and process a document with Llama on &BR;
1598+
category: Meta Llama
1599+
languages:
1600+
Python:
1601+
versions:
1602+
- sdk_version: 3
1603+
github: python/example_code/bedrock-runtime
1604+
excerpts:
1605+
- description: Send and process a document with Llama on &BR;
1606+
snippet_tags:
1607+
- python.example_code.bedrock-runtime.DocumentUnderstanding_MetaLlama
1608+
services:
1609+
bedrock-runtime: {Converse}
1610+
1611+
bedrock-runtime_DocumentUnderstanding_Mistral:
1612+
title: Send and process a document with Mistral models on &BR;
1613+
title_abbrev: "Document understanding"
1614+
synopsis: send and process a document with Mistral models on &BR;
1615+
category: Meta Llama
1616+
languages:
1617+
Python:
1618+
versions:
1619+
- sdk_version: 3
1620+
github: python/example_code/bedrock-runtime
1621+
excerpts:
1622+
- description: Send and process a document with Mistral models on &BR;
1623+
snippet_tags:
1624+
- python.example_code.bedrock-runtime.DocumentUnderstanding_Mistral
1625+
services:
1626+
bedrock-runtime: {Converse}
1627+
15251628
# Tool use scenarios
15261629
bedrock-runtime_Scenario_ToolUseDemo_AmazonNova:
15271630
title: "A tool use demo illustrating how to connect AI models on &BR; with a custom tool or API"

python/example_code/bedrock-runtime/models/amazon_nova/amazon_nova_text/document_understanding.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,15 @@
22
# SPDX-License-Identifier: Apache-2.0
33

44
# snippet-start:[python.example_code.bedrock-runtime.DocumentUnderstanding_AmazonNovaText]
5-
# Use the Conversation API to send a text message to Amazon Nova.
5+
# Send and process a document with Amazon Nova on Amazon Bedrock.
66

77
import boto3
88
from botocore.exceptions import ClientError
99

1010
# Create a Bedrock Runtime client in the AWS Region you want to use.
1111
client = boto3.client("bedrock-runtime", region_name="us-east-1")
1212

13-
# Set the model ID, e.g., Amazon Nova Lite.
13+
# Set the model ID, e.g. Amazon Nova Lite.
1414
model_id = "amazon.nova-lite-v1:0"
1515

1616
# Load the document
@@ -22,16 +22,15 @@
2222
{
2323
"role": "user",
2424
"content": [
25-
{
26-
"text": "Briefly compare the models described in this document"
27-
},
25+
{"text": "Briefly compare the models described in this document"},
2826
{
2927
"document": {
28+
# Available formats: html, md, pdf, doc/docx, xls/xlsx, csv, and txt
3029
"format": "pdf",
3130
"name": "Amazon Nova Service Cards",
32-
"source": {"bytes": document_bytes}
31+
"source": {"bytes": document_bytes},
3332
}
34-
}
33+
},
3534
],
3635
}
3736
]
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
# snippet-start:[python.example_code.bedrock-runtime.DocumentUnderstanding_AnthropicClaude]
5+
# Send and process a document with Anthropic Claude on Amazon Bedrock.
6+
7+
import boto3
8+
from botocore.exceptions import ClientError
9+
10+
# Create a Bedrock Runtime client in the AWS Region you want to use.
11+
client = boto3.client("bedrock-runtime", region_name="us-east-1")
12+
13+
# Set the model ID, e.g. Claude 3 Haiku.
14+
model_id = "anthropic.claude-3-haiku-20240307-v1:0"
15+
16+
# Load the document
17+
with open("example-data/amazon-nova-service-cards.pdf", "rb") as file:
18+
document_bytes = file.read()
19+
20+
# Start a conversation with a user message and the document
21+
conversation = [
22+
{
23+
"role": "user",
24+
"content": [
25+
{"text": "Briefly compare the models described in this document"},
26+
{
27+
"document": {
28+
# Available formats: html, md, pdf, doc/docx, xls/xlsx, csv, and txt
29+
"format": "pdf",
30+
"name": "Amazon Nova Service Cards",
31+
"source": {"bytes": document_bytes},
32+
}
33+
},
34+
],
35+
}
36+
]
37+
38+
try:
39+
# Send the message to the model, using a basic inference configuration.
40+
response = client.converse(
41+
modelId=model_id,
42+
messages=conversation,
43+
inferenceConfig={"maxTokens": 500, "temperature": 0.3},
44+
)
45+
46+
# Extract and print the response text.
47+
response_text = response["output"]["message"]["content"][0]["text"]
48+
print(response_text)
49+
50+
except (ClientError, Exception) as e:
51+
print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}")
52+
exit(1)
53+
54+
# snippet-end:[python.example_code.bedrock-runtime.DocumentUnderstanding_AnthropicClaude]
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
# snippet-start:[python.example_code.bedrock-runtime.DocumentUnderstanding_CohereCommand]
5+
# Send and process a document with Cohere Command models on Amazon Bedrock.
6+
7+
import boto3
8+
from botocore.exceptions import ClientError
9+
10+
# Create a Bedrock Runtime client in the AWS Region you want to use.
11+
client = boto3.client("bedrock-runtime", region_name="us-east-1")
12+
13+
# Set the model ID, e.g. Command R+.
14+
model_id = "cohere.command-r-plus-v1:0"
15+
16+
# Load the document
17+
with open("example-data/amazon-nova-service-cards.pdf", "rb") as file:
18+
document_bytes = file.read()
19+
20+
# Start a conversation with a user message and the document
21+
conversation = [
22+
{
23+
"role": "user",
24+
"content": [
25+
{"text": "Briefly compare the models described in this document"},
26+
{
27+
"document": {
28+
# Available formats: html, md, pdf, doc/docx, xls/xlsx, csv, and txt
29+
"format": "pdf",
30+
"name": "Amazon Nova Service Cards",
31+
"source": {"bytes": document_bytes},
32+
}
33+
},
34+
],
35+
}
36+
]
37+
38+
try:
39+
# Send the message to the model, using a basic inference configuration.
40+
response = client.converse(
41+
modelId=model_id,
42+
messages=conversation,
43+
inferenceConfig={"maxTokens": 500, "temperature": 0.3},
44+
)
45+
46+
# Extract and print the response text.
47+
response_text = response["output"]["message"]["content"][0]["text"]
48+
print(response_text)
49+
50+
except (ClientError, Exception) as e:
51+
print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}")
52+
exit(1)
53+
54+
# snippet-end:[python.example_code.bedrock-runtime.DocumentUnderstanding_CohereCommand]
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
# snippet-start:[python.example_code.bedrock-runtime.DocumentUnderstanding_DeepSeek]
5+
# Send and process a document with DeepSeek on Amazon Bedrock.
6+
7+
import boto3
8+
from botocore.exceptions import ClientError
9+
10+
# Create a Bedrock Runtime client in the AWS Region you want to use.
11+
client = boto3.client("bedrock-runtime", region_name="us-east-1")
12+
13+
# Set the model ID, e.g. DeepSeek-R1
14+
model_id = "us.deepseek.r1-v1:0"
15+
16+
# Load the document
17+
with open("example-data/amazon-nova-service-cards.pdf", "rb") as file:
18+
document_bytes = file.read()
19+
20+
# Start a conversation with a user message and the document
21+
conversation = [
22+
{
23+
"role": "user",
24+
"content": [
25+
{"text": "Briefly compare the models described in this document"},
26+
{
27+
"document": {
28+
# Available formats: html, md, pdf, doc/docx, xls/xlsx, csv, and txt
29+
"format": "pdf",
30+
"name": "Amazon Nova Service Cards",
31+
"source": {"bytes": document_bytes},
32+
}
33+
},
34+
],
35+
}
36+
]
37+
38+
try:
39+
# Send the message to the model, using a basic inference configuration.
40+
response = client.converse(
41+
modelId=model_id,
42+
messages=conversation,
43+
inferenceConfig={"maxTokens": 2000, "temperature": 0.3},
44+
)
45+
46+
# Extract and print the reasoning and response text.
47+
reasoning, response_text = "", ""
48+
for item in response["output"]["message"]["content"]:
49+
for key, value in item.items():
50+
if key == "reasoningContent":
51+
reasoning = value["reasoningText"]["text"]
52+
elif key == "text":
53+
response_text = value
54+
55+
print(f"\nReasoning:\n{reasoning}")
56+
print(f"\nResponse:\n{response_text}")
57+
58+
except (ClientError, Exception) as e:
59+
print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}")
60+
exit(1)
61+
62+
# snippet-end:[python.example_code.bedrock-runtime.DocumentUnderstanding_DeepSeek]
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
# snippet-start:[python.example_code.bedrock-runtime.DocumentUnderstanding_MetaLlama]
5+
# Send and process a document with Llama on Amazon Bedrock.
6+
7+
import boto3
8+
from botocore.exceptions import ClientError
9+
10+
# Create a Bedrock Runtime client in the AWS Region you want to use.
11+
client = boto3.client("bedrock-runtime", region_name="us-east-1")
12+
13+
# Set the model ID, e.g. Llama 3.1 8B Instruct.
14+
model_id = "us.meta.llama3-1-8b-instruct-v1:0"
15+
16+
# Load the document
17+
with open("example-data/amazon-nova-service-cards.pdf", "rb") as file:
18+
document_bytes = file.read()
19+
20+
# Start a conversation with a user message and the document
21+
conversation = [
22+
{
23+
"role": "user",
24+
"content": [
25+
{"text": "Briefly compare the models described in this document"},
26+
{
27+
"document": {
28+
# Available formats: html, md, pdf, doc/docx, xls/xlsx, csv, and txt
29+
"format": "pdf",
30+
"name": "Amazon Nova Service Cards",
31+
"source": {"bytes": document_bytes},
32+
}
33+
},
34+
],
35+
}
36+
]
37+
38+
try:
39+
# Send the message to the model, using a basic inference configuration.
40+
response = client.converse(
41+
modelId=model_id,
42+
messages=conversation,
43+
inferenceConfig={"maxTokens": 500, "temperature": 0.3},
44+
)
45+
46+
# Extract and print the response text.
47+
response_text = response["output"]["message"]["content"][0]["text"]
48+
print(response_text)
49+
50+
except (ClientError, Exception) as e:
51+
print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}")
52+
exit(1)
53+
54+
# snippet-end:[python.example_code.bedrock-runtime.DocumentUnderstanding_MetaLlama]

0 commit comments

Comments
 (0)