Source code for reminix.adapters.anthropic.client

"""
Anthropic SDK Client Adapter

Wraps an Anthropic AsyncAnthropic client for use with the Reminix runtime.

Compatibility:
    anthropic >= 0.75.0
"""

from typing import Any, AsyncIterator

from ..protocols import AnthropicClientProtocol
from ...runtime import Agent, Context


[docs] def from_anthropic( client: AnthropicClientProtocol, *, name: str, model: str = "claude-sonnet-4-20250514", system: str | None = None, max_tokens: int = 4096, metadata: dict[str, Any] | None = None, ) -> Agent: """ Create a Reminix Agent from an Anthropic AsyncAnthropic client. Args: client: An Anthropic AsyncAnthropic client instance name: Name for the Reminix agent model: The model to use (default: "claude-sonnet-4-20250514") system: Optional system prompt max_tokens: Maximum tokens in the response (default: 4096) metadata: Optional metadata for the agent Returns: A Reminix Agent that wraps the Anthropic client Example:: from anthropic import AsyncAnthropic from reminix.adapters.anthropic import from_anthropic from reminix.runtime import serve client = AsyncAnthropic() agent = from_anthropic(client, name="claude-agent", model="claude-sonnet-4-20250514") serve(agent) """ agent = Agent( name=name, metadata={ "framework": "anthropic", "adapter": "client", "model": model, **(metadata or {}), }, ) def _build_messages(messages: list[dict[str, Any]]) -> list[dict[str, Any]]: """Build messages list for Anthropic API.""" return [{"role": m["role"], "content": m["content"]} for m in messages] @agent.invoke # type: ignore[arg-type] async def handle_invoke(input_data: dict[str, Any], ctx: Context) -> dict[str, Any]: prompt = input_data.get("prompt") or input_data.get("input") or str(input_data) messages = [{"role": "user", "content": prompt}] response = await client.messages.create( model=model, max_tokens=max_tokens, system=system or "", messages=messages, ) # Extract text from content blocks text = "".join(block.text for block in response.content if hasattr(block, "text")) return {"output": text} @agent.invoke_stream # type: ignore[arg-type] async def handle_invoke_stream( input_data: dict[str, Any], ctx: Context ) -> AsyncIterator[dict[str, str]]: prompt = input_data.get("prompt") or input_data.get("input") or str(input_data) messages = [{"role": "user", "content": prompt}] async with client.messages.stream( model=model, max_tokens=max_tokens, system=system or "", messages=messages, ) as stream: async for text in stream.text_stream: yield {"chunk": text} @agent.chat # type: ignore[arg-type] async def handle_chat(messages: list[dict[str, Any]], ctx: Context) -> dict[str, Any]: chat_messages = _build_messages(messages) response = await client.messages.create( model=model, max_tokens=max_tokens, system=system or "", messages=chat_messages, ) # Extract text from content blocks text = "".join(block.text for block in response.content if hasattr(block, "text")) return {"message": {"role": "assistant", "content": text}} @agent.chat_stream # type: ignore[arg-type] async def handle_chat_stream( messages: list[dict[str, Any]], ctx: Context ) -> AsyncIterator[dict[str, str]]: chat_messages = _build_messages(messages) async with client.messages.stream( model=model, max_tokens=max_tokens, system=system or "", messages=chat_messages, ) as stream: async for text in stream.text_stream: yield {"chunk": text} return agent