LLMService
from mango.llm import LLMService
Abstract base class for all LLM providers. Implement this to add any LLM with tool/function calling support.
Interface
class LLMService(ABC):
@abstractmethod
def chat(
self,
messages: list[Message],
tools: list[ToolDef],
system_prompt: str = "",
) -> LLMResponse:
...
@abstractmethod
def get_model_name(self) -> str:
...
Data types
Message
@dataclass
class Message:
role: str # "user" | "assistant" | "tool"
content: str | list # text or structured content blocks
tool_call_id: str | None = None # required when role="tool"
ToolDef
@dataclass
class ToolDef:
name: str
description: str
params: list[ToolParam]
ToolParam
@dataclass
class ToolParam:
name: str
type: str # "string" | "integer" | "number" | "boolean" | "array" | "object"
description: str
required: bool = True
enum: list | None = None
items: dict | None = None # for array types
LLMResponse
@dataclass
class LLMResponse:
text: str | None
tool_calls: list[ToolCall]
model: str
input_tokens: int
output_tokens: int
@property
def has_tool_calls(self) -> bool:
return len(self.tool_calls) > 0
ToolCall
@dataclass
class ToolCall:
tool_name: str
tool_args: dict
tool_call_id: str
thought_signature: str | None = None # Gemini 3 round-trip requirement
Built-in implementations
| Class | Import |
|---|---|
AnthropicLlmService | mango.integrations.anthropic |
OpenAiLlmService | mango.integrations.openai |
GeminiLlmService | mango.integrations.google |
Custom implementation
from mango.llm import LLMService, LLMResponse, Message, ToolDef, ToolCall
class MyLlmService(LLMService):
def chat(
self,
messages: list[Message],
tools: list[ToolDef],
system_prompt: str = "",
) -> LLMResponse:
# Convert to your provider's format
# Call API
# Parse response
return LLMResponse(
text="...",
tool_calls=[ToolCall(...)],
model="my-model",
input_tokens=100,
output_tokens=50,
)
def get_model_name(self) -> str:
return "my-model-v1"