11# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
22
3- from typing import List , Optional
3+ from typing import Dict , List , Optional
44from typing_extensions import Literal
55
66from ..._models import BaseModel
77from ..completion_usage import CompletionUsage
88from .chat_completion_message import ChatCompletionMessage
99from .chat_completion_token_logprob import ChatCompletionTokenLogprob
1010
11- __all__ = ["ChatCompletion" , "Choice" , "ChoiceLogprobs" , "UsageBreakdown" , "UsageBreakdownModel" ]
11+ __all__ = [
12+ "ChatCompletion" ,
13+ "Choice" ,
14+ "ChoiceLogprobs" ,
15+ "McpListTool" ,
16+ "McpListToolTool" ,
17+ "UsageBreakdown" ,
18+ "UsageBreakdownModel" ,
19+ "XGroq" ,
20+ "XGroqDebug" ,
21+ "XGroqUsage" ,
22+ ]
1223
1324
1425class ChoiceLogprobs (BaseModel ):
@@ -36,6 +47,34 @@ class Choice(BaseModel):
3647 """A chat completion message generated by the model."""
3748
3849
50+ class McpListToolTool (BaseModel ):
51+ annotations : Optional [object ] = None
52+ """Additional metadata for the tool."""
53+
54+ description : Optional [str ] = None
55+ """Description of what the tool does."""
56+
57+ input_schema : Optional [Dict [str , object ]] = None
58+ """JSON Schema describing the tool's input parameters."""
59+
60+ name : Optional [str ] = None
61+ """The name of the tool."""
62+
63+
64+ class McpListTool (BaseModel ):
65+ id : Optional [str ] = None
66+ """Unique identifier for this tool list response."""
67+
68+ server_label : Optional [str ] = None
69+ """Human-readable label for the MCP server."""
70+
71+ tools : Optional [List [McpListToolTool ]] = None
72+ """Array of discovered tools from the server."""
73+
74+ type : Optional [str ] = None
75+ """The type identifier."""
76+
77+
3978class UsageBreakdownModel (BaseModel ):
4079 model : str
4180 """The name/identifier of the model used"""
@@ -49,6 +88,51 @@ class UsageBreakdown(BaseModel):
4988 """List of models used in the request and their individual usage statistics"""
5089
5190
91+ class XGroqDebug (BaseModel ):
92+ input_token_ids : Optional [List [int ]] = None
93+ """Token IDs for the input."""
94+
95+ input_tokens : Optional [List [str ]] = None
96+ """Token strings for the input."""
97+
98+ output_token_ids : Optional [List [int ]] = None
99+ """Token IDs for the output."""
100+
101+ output_tokens : Optional [List [str ]] = None
102+ """Token strings for the output."""
103+
104+
105+ class XGroqUsage (BaseModel ):
106+ dram_cached_tokens : Optional [int ] = None
107+ """Number of tokens served from DRAM cache."""
108+
109+ sram_cached_tokens : Optional [int ] = None
110+ """Number of tokens served from SRAM cache."""
111+
112+
113+ class XGroq (BaseModel ):
114+ id : str
115+ """
116+ A groq request ID which can be used to refer to a specific request to groq
117+ support.
118+ """
119+
120+ debug : Optional [XGroqDebug ] = None
121+ """Debug information including input and output token IDs and strings.
122+
123+ Only present when debug=true in the request.
124+ """
125+
126+ seed : Optional [int ] = None
127+ """The seed used for the request.
128+
129+ See the seed property on CreateChatCompletionRequest for more details.
130+ """
131+
132+ usage : Optional [XGroqUsage ] = None
133+ """Additional Groq-specific usage metrics (hardware cache statistics)."""
134+
135+
52136class ChatCompletion (BaseModel ):
53137 id : str
54138 """A unique identifier for the chat completion."""
@@ -68,6 +152,12 @@ class ChatCompletion(BaseModel):
68152 object : Literal ["chat.completion" ]
69153 """The object type, which is always `chat.completion`."""
70154
155+ mcp_list_tools : Optional [List [McpListTool ]] = None
156+ """List of discovered MCP tools from connected servers."""
157+
158+ service_tier : Optional [Literal ["auto" , "on_demand" , "flex" , "performance" ]] = None
159+ """The service tier used for the request."""
160+
71161 system_fingerprint : Optional [str ] = None
72162 """This fingerprint represents the backend configuration that the model runs with.
73163
@@ -83,3 +173,6 @@ class ChatCompletion(BaseModel):
83173 Detailed usage breakdown by model when multiple models are used in the request
84174 for compound AI systems.
85175 """
176+
177+ x_groq : Optional [XGroq ] = None
178+ """Groq-specific metadata for non-streaming chat completion responses."""
0 commit comments