[{"data":1,"prerenderedAt":620},["ShallowReactive",2],{"\u002Fdocs\u002Fintegrations\u002Fllm-providers":3},{"id":4,"title":5,"body":6,"description":613,"extension":614,"meta":615,"navigation":80,"path":616,"seo":617,"stem":618,"__hash__":619},"docs\u002Fdocs\u002F3.integrations\u002F1.llm-providers.md","LLM Providers",{"type":7,"value":8,"toc":607},"minimark",[9,13,22,27,52,151,157,173,177,191,263,267,281,339,343,349,483,489,603],[10,11,5],"h1",{"id":12},"llm-providers",[14,15,16,17,21],"p",{},"Mango supports three LLM providers out of the box. All implement the same ",[18,19,20],"code",{},"LLMService"," interface, so switching providers is a one-line change.",[23,24,26],"h2",{"id":25},"anthropic","Anthropic",[28,29,34],"pre",{"className":30,"code":31,"language":32,"meta":33,"style":33},"language-bash shiki shiki-themes github-dark","pip install mango-ai[anthropic]\n","bash","",[18,35,36],{"__ignoreMap":33},[37,38,41,45,49],"span",{"class":39,"line":40},"line",1,[37,42,44],{"class":43},"svObZ","pip",[37,46,48],{"class":47},"sU2Wk"," install",[37,50,51],{"class":47}," mango-ai[anthropic]\n",[28,53,57],{"className":54,"code":55,"language":56,"meta":33,"style":33},"language-python shiki shiki-themes github-dark","from mango.integrations.anthropic import AnthropicLlmService\n\nllm = AnthropicLlmService(\n    api_key=\"YOUR_KEY\",          # or set ANTHROPIC_API_KEY env var\n    model=\"claude-sonnet-4-6\",   # default\n    max_tokens=4096,\n)\n","python",[18,58,59,75,82,94,113,130,145],{"__ignoreMap":33},[37,60,61,65,69,72],{"class":39,"line":40},[37,62,64],{"class":63},"snl16","from",[37,66,68],{"class":67},"s95oV"," mango.integrations.anthropic ",[37,70,71],{"class":63},"import",[37,73,74],{"class":67}," AnthropicLlmService\n",[37,76,78],{"class":39,"line":77},2,[37,79,81],{"emptyLinePlaceholder":80},true,"\n",[37,83,85,88,91],{"class":39,"line":84},3,[37,86,87],{"class":67},"llm ",[37,89,90],{"class":63},"=",[37,92,93],{"class":67}," AnthropicLlmService(\n",[37,95,97,101,103,106,109],{"class":39,"line":96},4,[37,98,100],{"class":99},"s9osk","    api_key",[37,102,90],{"class":63},[37,104,105],{"class":47},"\"YOUR_KEY\"",[37,107,108],{"class":67},",          ",[37,110,112],{"class":111},"sAwPA","# or set ANTHROPIC_API_KEY env var\n",[37,114,116,119,121,124,127],{"class":39,"line":115},5,[37,117,118],{"class":99},"    model",[37,120,90],{"class":63},[37,122,123],{"class":47},"\"claude-sonnet-4-6\"",[37,125,126],{"class":67},",   ",[37,128,129],{"class":111},"# default\n",[37,131,133,136,138,142],{"class":39,"line":132},6,[37,134,135],{"class":99},"    max_tokens",[37,137,90],{"class":63},[37,139,141],{"class":140},"sDLfK","4096",[37,143,144],{"class":67},",\n",[37,146,148],{"class":39,"line":147},7,[37,149,150],{"class":67},")\n",[14,152,153],{},[154,155,156],"strong",{},"Recommended models:",[158,159,160,167],"ul",{},[161,162,163,166],"li",{},[18,164,165],{},"claude-sonnet-4-6"," — best balance of speed and accuracy (default)",[161,168,169,172],{},[18,170,171],{},"claude-opus-4-6"," — highest accuracy, higher cost",[23,174,176],{"id":175},"openai","OpenAI",[28,178,180],{"className":30,"code":179,"language":32,"meta":33,"style":33},"pip install mango-ai[openai]\n",[18,181,182],{"__ignoreMap":33},[37,183,184,186,188],{"class":39,"line":40},[37,185,44],{"class":43},[37,187,48],{"class":47},[37,189,190],{"class":47}," mango-ai[openai]\n",[28,192,194],{"className":54,"code":193,"language":56,"meta":33,"style":33},"from mango.integrations.openai import OpenAiLlmService\n\nllm = OpenAiLlmService(\n    api_key=\"YOUR_KEY\",          # or set OPENAI_API_KEY env var\n    model=\"gpt-5.4\",             # default\n    max_completion_tokens=4096,\n)\n",[18,195,196,208,212,221,234,248,259],{"__ignoreMap":33},[37,197,198,200,203,205],{"class":39,"line":40},[37,199,64],{"class":63},[37,201,202],{"class":67}," mango.integrations.openai ",[37,204,71],{"class":63},[37,206,207],{"class":67}," OpenAiLlmService\n",[37,209,210],{"class":39,"line":77},[37,211,81],{"emptyLinePlaceholder":80},[37,213,214,216,218],{"class":39,"line":84},[37,215,87],{"class":67},[37,217,90],{"class":63},[37,219,220],{"class":67}," OpenAiLlmService(\n",[37,222,223,225,227,229,231],{"class":39,"line":96},[37,224,100],{"class":99},[37,226,90],{"class":63},[37,228,105],{"class":47},[37,230,108],{"class":67},[37,232,233],{"class":111},"# or set OPENAI_API_KEY env var\n",[37,235,236,238,240,243,246],{"class":39,"line":115},[37,237,118],{"class":99},[37,239,90],{"class":63},[37,241,242],{"class":47},"\"gpt-5.4\"",[37,244,245],{"class":67},",             ",[37,247,129],{"class":111},[37,249,250,253,255,257],{"class":39,"line":132},[37,251,252],{"class":99},"    max_completion_tokens",[37,254,90],{"class":63},[37,256,141],{"class":140},[37,258,144],{"class":67},[37,260,261],{"class":39,"line":147},[37,262,150],{"class":67},[23,264,266],{"id":265},"google","Google",[28,268,270],{"className":30,"code":269,"language":32,"meta":33,"style":33},"pip install mango-ai[gemini]\n",[18,271,272],{"__ignoreMap":33},[37,273,274,276,278],{"class":39,"line":40},[37,275,44],{"class":43},[37,277,48],{"class":47},[37,279,280],{"class":47}," mango-ai[gemini]\n",[28,282,284],{"className":54,"code":283,"language":56,"meta":33,"style":33},"from mango.integrations.google import GeminiLlmService\n\nllm = GeminiLlmService(\n    api_key=\"YOUR_KEY\",          # or set GOOGLE_API_KEY env var\n    model=\"gemini-3.1-pro-preview\",\n)\n",[18,285,286,298,302,311,324,335],{"__ignoreMap":33},[37,287,288,290,293,295],{"class":39,"line":40},[37,289,64],{"class":63},[37,291,292],{"class":67}," mango.integrations.google ",[37,294,71],{"class":63},[37,296,297],{"class":67}," GeminiLlmService\n",[37,299,300],{"class":39,"line":77},[37,301,81],{"emptyLinePlaceholder":80},[37,303,304,306,308],{"class":39,"line":84},[37,305,87],{"class":67},[37,307,90],{"class":63},[37,309,310],{"class":67}," GeminiLlmService(\n",[37,312,313,315,317,319,321],{"class":39,"line":96},[37,314,100],{"class":99},[37,316,90],{"class":63},[37,318,105],{"class":47},[37,320,108],{"class":67},[37,322,323],{"class":111},"# or set GOOGLE_API_KEY env var\n",[37,325,326,328,330,333],{"class":39,"line":115},[37,327,118],{"class":99},[37,329,90],{"class":63},[37,331,332],{"class":47},"\"gemini-3.1-pro-preview\"",[37,334,144],{"class":67},[37,336,337],{"class":39,"line":132},[37,338,150],{"class":67},[23,340,342],{"id":341},"custom-llm-provider","Custom LLM provider",[14,344,345,346,348],{},"Implement ",[18,347,20],{}," to use any LLM with tool\u002Ffunction calling support:",[28,350,352],{"className":54,"code":351,"language":56,"meta":33,"style":33},"from mango.llm import LLMService, LLMResponse, Message, ToolDef\n\nclass MyCustomLlm(LLMService):\n    def chat(\n        self,\n        messages: list[Message],\n        tools: list[ToolDef],\n        system_prompt: str = \"\",\n    ) -> LLMResponse:\n        # Call your LLM API here\n        # Return LLMResponse with text and\u002For tool_calls\n        ...\n\n    def get_model_name(self) -> str:\n        return \"my-custom-model\"\n",[18,353,354,366,370,386,397,402,407,412,429,435,441,447,453,458,474],{"__ignoreMap":33},[37,355,356,358,361,363],{"class":39,"line":40},[37,357,64],{"class":63},[37,359,360],{"class":67}," mango.llm ",[37,362,71],{"class":63},[37,364,365],{"class":67}," LLMService, LLMResponse, Message, ToolDef\n",[37,367,368],{"class":39,"line":77},[37,369,81],{"emptyLinePlaceholder":80},[37,371,372,375,378,381,383],{"class":39,"line":84},[37,373,374],{"class":63},"class",[37,376,377],{"class":43}," MyCustomLlm",[37,379,380],{"class":67},"(",[37,382,20],{"class":43},[37,384,385],{"class":67},"):\n",[37,387,388,391,394],{"class":39,"line":96},[37,389,390],{"class":63},"    def",[37,392,393],{"class":43}," chat",[37,395,396],{"class":67},"(\n",[37,398,399],{"class":39,"line":115},[37,400,401],{"class":67},"        self,\n",[37,403,404],{"class":39,"line":132},[37,405,406],{"class":67},"        messages: list[Message],\n",[37,408,409],{"class":39,"line":147},[37,410,411],{"class":67},"        tools: list[ToolDef],\n",[37,413,415,418,421,424,427],{"class":39,"line":414},8,[37,416,417],{"class":67},"        system_prompt: ",[37,419,420],{"class":140},"str",[37,422,423],{"class":63}," =",[37,425,426],{"class":47}," \"\"",[37,428,144],{"class":67},[37,430,432],{"class":39,"line":431},9,[37,433,434],{"class":67},"    ) -> LLMResponse:\n",[37,436,438],{"class":39,"line":437},10,[37,439,440],{"class":111},"        # Call your LLM API here\n",[37,442,444],{"class":39,"line":443},11,[37,445,446],{"class":111},"        # Return LLMResponse with text and\u002For tool_calls\n",[37,448,450],{"class":39,"line":449},12,[37,451,452],{"class":140},"        ...\n",[37,454,456],{"class":39,"line":455},13,[37,457,81],{"emptyLinePlaceholder":80},[37,459,461,463,466,469,471],{"class":39,"line":460},14,[37,462,390],{"class":63},[37,464,465],{"class":43}," get_model_name",[37,467,468],{"class":67},"(self) -> ",[37,470,420],{"class":140},[37,472,473],{"class":67},":\n",[37,475,477,480],{"class":39,"line":476},15,[37,478,479],{"class":63},"        return",[37,481,482],{"class":47}," \"my-custom-model\"\n",[14,484,485,488],{},[18,486,487],{},"LLMResponse"," fields:",[490,491,492,508],"table",{},[493,494,495],"thead",{},[496,497,498,502,505],"tr",{},[499,500,501],"th",{},"Field",[499,503,504],{},"Type",[499,506,507],{},"Description",[509,510,511,527,542,556,571,585],"tbody",{},[496,512,513,519,524],{},[514,515,516],"td",{},[18,517,518],{},"text",[514,520,521],{},[18,522,523],{},"str | None",[514,525,526],{},"Text content of the response",[496,528,529,534,539],{},[514,530,531],{},[18,532,533],{},"tool_calls",[514,535,536],{},[18,537,538],{},"list[ToolCall]",[514,540,541],{},"Tool calls requested by the LLM",[496,543,544,549,553],{},[514,545,546],{},[18,547,548],{},"model",[514,550,551],{},[18,552,420],{},[514,554,555],{},"Model name as returned by the provider",[496,557,558,563,568],{},[514,559,560],{},[18,561,562],{},"input_tokens",[514,564,565],{},[18,566,567],{},"int",[514,569,570],{},"Input tokens used",[496,572,573,578,582],{},[514,574,575],{},[18,576,577],{},"output_tokens",[514,579,580],{},[18,581,567],{},[514,583,584],{},"Output tokens used",[496,586,587,592,597],{},[514,588,589],{},[18,590,591],{},"has_tool_calls",[514,593,594],{},[18,595,596],{},"bool",[514,598,599,600],{},"Property: ",[18,601,602],{},"len(tool_calls) > 0",[604,605,606],"style",{},"html pre.shiki code .svObZ, html code.shiki .svObZ{--shiki-default:#B392F0}html pre.shiki code .sU2Wk, html code.shiki .sU2Wk{--shiki-default:#9ECBFF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html pre.shiki code .snl16, html code.shiki .snl16{--shiki-default:#F97583}html pre.shiki code .s95oV, html code.shiki .s95oV{--shiki-default:#E1E4E8}html pre.shiki code .s9osk, html code.shiki .s9osk{--shiki-default:#FFAB70}html pre.shiki code .sAwPA, html code.shiki .sAwPA{--shiki-default:#6A737D}html pre.shiki code .sDLfK, html code.shiki .sDLfK{--shiki-default:#79B8FF}",{"title":33,"searchDepth":77,"depth":77,"links":608},[609,610,611,612],{"id":25,"depth":77,"text":26},{"id":175,"depth":77,"text":176},{"id":265,"depth":77,"text":266},{"id":341,"depth":77,"text":342},"Anthropic, OpenAI, and Google Gemini — how to configure each.","md",{},"\u002Fdocs\u002Fintegrations\u002Fllm-providers",{"title":5,"description":613},"docs\u002F3.integrations\u002F1.llm-providers","IL4b5C7nfHYzsNFQoysfHx0OKUort6OgH7MGSv14rTQ",1776189331996]