Source code for reminix.client.resources.agents
"""Auto-generated operation classes.
This file is auto-generated by the operators generator.
Do not edit this file manually.
"""
from typing import TYPE_CHECKING, Any, Dict, List, Optional
from reminix.client.types import (
ChatRequest,
ChatResponse,
InvokeRequest,
InvokeResponse
)
from reminix.client.utils.pagination import PaginatedResponse
if TYPE_CHECKING:
from reminix.client.client import Client
[docs]
class Agents:
"""Operations for agents."""
[docs]
def __init__(self, client: "Client") -> None:
"""Initialize Agents operations.
Args:
client: The Reminix client instance
"""
self._client = client
[docs]
async def invoke(
self,
name: str,
request: InvokeRequest
) -> InvokeResponse:
"""Invoke an agent.
Execute a one-shot task with an agent. This endpoint is designed for task-oriented operations
where you provide input and receive a complete output.
**Use cases:**
- Data analysis and processing
- Content generation (with streaming for long outputs)
- One-time operations that don't require conversation history
- API-like operations
**Streaming:** Set `stream: true` in the request body to receive Server-Sent Events (SSE)
stream with incremental chunks. Useful for long-running tasks or real-time progress updates.
Args:
name: Unique, URL-safe agent name within the project
request: InvokeRequest object
Returns:
InvokeResponse: Agent invocation successful
Example::
request = InvokeRequest(
input={},
stream=False,
context="example"
)
result = await client.agents.invoke(
"example",
request
)
print(result)
Raises:
APIError: Bad Request - Invalid request body
AuthenticationError: Unauthorized - Invalid or missing API key
APIError: Agent or deployment not found
APIError: Internal Server Error
NetworkError: If a network error occurs"""
response = await self._client.request("POST", f"/agents/{name}/invoke", json=request.model_dump(mode='json', exclude_none=True))
return InvokeResponse(**response)
[docs]
async def chat(
self,
name: str,
request: ChatRequest
) -> ChatResponse:
"""Chat with an agent.
Have a conversational interaction with an agent. This endpoint maintains conversation context
through the messages array, allowing for multi-turn conversations.
**Use cases:**
- Customer support chatbots
- AI assistants with memory
- Multi-step question answering
- Conversational agents that need context
**Message format:** Follows OpenAI-compliant message structure with support for:
- `system`, `user`, `assistant`, and `tool` roles
- Multimodal content (text + images)
- Tool/function calling
**Streaming:** Set `stream: true` in the request body to receive Server-Sent Events (SSE)
stream with incremental chunks. Perfect for ChatGPT-like real-time chat interfaces.
Args:
name: Unique, URL-safe agent name within the project
request: ChatRequest object
Returns:
ChatResponse: Chat response successful
Example::
request = ChatRequest(
messages=[],
stream=False,
context="example"
)
result = await client.agents.chat(
"example",
request
)
print(result)
Raises:
APIError: Bad Request - Invalid request body
AuthenticationError: Unauthorized - Invalid or missing API key
APIError: Agent or deployment not found
APIError: Internal Server Error
NetworkError: If a network error occurs"""
response = await self._client.request("POST", f"/agents/{name}/chat", json=request.model_dump(mode='json', exclude_none=True))
return ChatResponse(**response)