tutorialintermediate
Parallel tool calls on Claude 3.7 Sonnet Mar 2025 • Tools Enable parallel tool calls on Claude 3.7 Sonnet using batch tool meta-pattern workaround.
cookbook
View original on cookbookClaude 3.7 Sonnet may not make parallel tool calls by default even when enabled. This cookbook demonstrates a workaround using a "batch tool" meta-pattern that wraps multiple tool invocations, encouraging the model to call multiple tools simultaneously in a single response. By introducing a batch_tool that accepts an array of tool invocations, developers can improve latency and efficiency by processing multiple tool calls in parallel rather than sequentially.
Key Points
- •Claude 3.7 Sonnet is less likely to make parallel tool calls despite disable_parallel_tool_use not being set
- •Introduce a 'batch_tool' meta-tool that wraps multiple tool invocations to encourage parallel execution
- •The batch_tool accepts an 'invocations' array containing tool names and JSON-serialized arguments
- •Processing batch_tool calls requires iterating through invocations and executing each tool sequentially in the handler, but they're submitted to Claude in one response
- •This pattern reduces back-and-forth latency by allowing Claude to request multiple tools in a single turn instead of waiting for results between calls
- •Batch tool implementation requires JSON parsing of arguments since they're passed as serialized strings
- •The workaround maintains compatibility with existing tool definitions while adding a coordination layer
- •Results from batch invocations are joined and returned as a single tool result to Claude
Found this useful? Add it to a playbook for a step-by-step implementation guide.
Workflow Diagram
Start Process
Step A
Step B
Step C
Complete
Concepts
Artifacts (4)
batch_tool_processorpythonscript
import json
def process_tool_with_maybe_batch(tool_name, tool_input):
if tool_name == "batch_tool":
results = []
for invocation in tool_input["invocations"]:
results.append(
process_tool_call(
invocation["name"],
json.loads(invocation["arguments"])
)
)
return "\n".join(results)
else:
return process_tool_call(tool_name, tool_input)batch_tool_definitionpythonconfig
batch_tool = {
"name": "batch_tool",
"description": "Invoke multiple other tool calls simultaneously",
"input_schema": {
"type": "object",
"properties": {
"invocations": {
"type": "array",
"description": "The tool calls to invoke",
"items": {
"types": "object",
"properties": {
"name": {
"types": "string",
"description": "The name of the tool to invoke"
},
"arguments": {
"types": "string",
"description": "The arguments to the tool"
}
},
"required": ["name", "arguments"]
}
}
},
"required": ["invocations"]
}
}tool_definitionspythonconfig
weather_tool = {
"name": "get_weather",
"description": "Gets the weather for in a given location",
"input_schema": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
}
},
"required": ["location"]
}
}
time_tool = {
"name": "get_time",
"description": "Gets the time in a given location",
"input_schema": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
}
},
"required": ["location"]
}
}parallel_tools_examplepythonscript
from anthropic import Anthropic
import json
client = Anthropic()
MODEL_NAME = "claude-sonnet-4-6"
def get_weather(location):
return f"The weather in {location} is 72 degrees and sunny."
def get_time(location):
return f"The time in {location} is 12:32 PM."
def process_tool_call(tool_name, tool_input):
if tool_name == "get_weather":
return get_weather(tool_input["location"])
elif tool_name == "get_time":
return get_time(tool_input["location"])
else:
raise ValueError(f"Unexpected tool name: {tool_name}")
def process_tool_with_maybe_batch(tool_name, tool_input):
if tool_name == "batch_tool":
results = []
for invocation in tool_input["invocations"]:
results.append(
process_tool_call(
invocation["name"],
json.loads(invocation["arguments"])
)
)
return "\n".join(results)
else:
return process_tool_call(tool_name, tool_input)
def make_query_and_print_result(messages, tools=None):
response = client.messages.create(
model=MODEL_NAME,
messages=messages,
max_tokens=1000,
tool_choice={"type": "auto"},
tools=tools or [weather_tool, time_tool]
)
for block in response.content:
match block.type:
case "text":
print(block.text)
case "tool_use":
print(f"Tool: {block.name}({block.input})")
case _:
raise ValueError(f"Unexpected block type: {block.type}")
return response
# Query with batch tool enabled
MESSAGES = [{"role": "user", "content": "What's the weather and time in San Francisco?"}]
response = make_query_and_print_result(MESSAGES, tools=[weather_tool, time_tool, batch_tool])