Skip to content

Commit 2f14acf

Browse files
release: 0.35.0 (#257)
Co-authored-by: stainless-app[bot] <142633134+stainless-app[bot]@users.noreply.github.com> Co-authored-by: Graden Rea <grea@groq.com>
1 parent 63d11e3 commit 2f14acf

File tree

8 files changed

+190
-16
lines changed

8 files changed

+190
-16
lines changed

.release-please-manifest.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
{
2-
".": "0.34.1"
2+
".": "0.35.0"
33
}

.stats.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 17
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/groqcloud%2Fgroqcloud-5d2697ad30ccbbd1502ade612deb9221a1e05e70d09a156da743d0ca120942f9.yml
3-
openapi_spec_hash: 0bde05b560fbd40711e91e6b562c3b0f
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/groqcloud%2Fgroqcloud-b059caa2a4d4cc23663f61d2ec0f83a4b0e4eda7ed1f2dbbb129c5d320811200.yml
3+
openapi_spec_hash: 87ccaae4d281259575c961b9cd52766e
44
config_hash: a4a6c3089a2e53425351cc9f42b4b5aa

CHANGELOG.md

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,24 @@
11
# Changelog
22

3+
## 0.35.0 (2025-11-18)
4+
5+
Full Changelog: [v0.34.1...v0.35.0](https://github.com/groq/groq-python/compare/v0.34.1...v0.35.0)
6+
7+
### Features
8+
9+
* **api:** api update ([d6ec93d](https://github.com/groq/groq-python/commit/d6ec93d000702baf001b1a5739489d8d5fd7b7da))
10+
* **api:** api update ([48c8f11](https://github.com/groq/groq-python/commit/48c8f11f014207d9665b1eacce3f97461265b12d))
11+
12+
13+
### Bug Fixes
14+
15+
* update chat completion streaming types ([833cf83](https://github.com/groq/groq-python/commit/833cf83b5f2f59eaa17507c330be90a722a871e8))
16+
17+
18+
### Chores
19+
20+
* GitHub Terraform: Create/Update .github/workflows/code-freeze-bypass.yaml [skip ci] ([63d11e3](https://github.com/groq/groq-python/commit/63d11e39fec78e040a95e6e3fe398fe6ca9e1fc2))
21+
322
## 0.34.1 (2025-11-14)
423

524
Full Changelog: [v0.34.0...v0.34.1](https://github.com/groq/groq-python/compare/v0.34.0...v0.34.1)

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "groq"
3-
version = "0.34.1"
3+
version = "0.35.0"
44
description = "The official Python library for the groq API"
55
dynamic = ["readme"]
66
license = "Apache-2.0"

src/groq/_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
22

33
__title__ = "groq"
4-
__version__ = "0.34.1" # x-release-please-version
4+
__version__ = "0.35.0" # x-release-please-version

src/groq/types/chat/chat_completion.py

Lines changed: 95 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,25 @@
11
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
22

3-
from typing import List, Optional
3+
from typing import Dict, List, Optional
44
from typing_extensions import Literal
55

66
from ..._models import BaseModel
77
from ..completion_usage import CompletionUsage
88
from .chat_completion_message import ChatCompletionMessage
99
from .chat_completion_token_logprob import ChatCompletionTokenLogprob
1010

11-
__all__ = ["ChatCompletion", "Choice", "ChoiceLogprobs", "UsageBreakdown", "UsageBreakdownModel"]
11+
__all__ = [
12+
"ChatCompletion",
13+
"Choice",
14+
"ChoiceLogprobs",
15+
"McpListTool",
16+
"McpListToolTool",
17+
"UsageBreakdown",
18+
"UsageBreakdownModel",
19+
"XGroq",
20+
"XGroqDebug",
21+
"XGroqUsage",
22+
]
1223

1324

1425
class ChoiceLogprobs(BaseModel):
@@ -36,6 +47,34 @@ class Choice(BaseModel):
3647
"""A chat completion message generated by the model."""
3748

3849

50+
class McpListToolTool(BaseModel):
51+
annotations: Optional[object] = None
52+
"""Additional metadata for the tool."""
53+
54+
description: Optional[str] = None
55+
"""Description of what the tool does."""
56+
57+
input_schema: Optional[Dict[str, object]] = None
58+
"""JSON Schema describing the tool's input parameters."""
59+
60+
name: Optional[str] = None
61+
"""The name of the tool."""
62+
63+
64+
class McpListTool(BaseModel):
65+
id: Optional[str] = None
66+
"""Unique identifier for this tool list response."""
67+
68+
server_label: Optional[str] = None
69+
"""Human-readable label for the MCP server."""
70+
71+
tools: Optional[List[McpListToolTool]] = None
72+
"""Array of discovered tools from the server."""
73+
74+
type: Optional[str] = None
75+
"""The type identifier."""
76+
77+
3978
class UsageBreakdownModel(BaseModel):
4079
model: str
4180
"""The name/identifier of the model used"""
@@ -49,6 +88,51 @@ class UsageBreakdown(BaseModel):
4988
"""List of models used in the request and their individual usage statistics"""
5089

5190

91+
class XGroqDebug(BaseModel):
92+
input_token_ids: Optional[List[int]] = None
93+
"""Token IDs for the input."""
94+
95+
input_tokens: Optional[List[str]] = None
96+
"""Token strings for the input."""
97+
98+
output_token_ids: Optional[List[int]] = None
99+
"""Token IDs for the output."""
100+
101+
output_tokens: Optional[List[str]] = None
102+
"""Token strings for the output."""
103+
104+
105+
class XGroqUsage(BaseModel):
106+
dram_cached_tokens: Optional[int] = None
107+
"""Number of tokens served from DRAM cache."""
108+
109+
sram_cached_tokens: Optional[int] = None
110+
"""Number of tokens served from SRAM cache."""
111+
112+
113+
class XGroq(BaseModel):
114+
id: str
115+
"""
116+
A groq request ID which can be used to refer to a specific request to groq
117+
support.
118+
"""
119+
120+
debug: Optional[XGroqDebug] = None
121+
"""Debug information including input and output token IDs and strings.
122+
123+
Only present when debug=true in the request.
124+
"""
125+
126+
seed: Optional[int] = None
127+
"""The seed used for the request.
128+
129+
See the seed property on CreateChatCompletionRequest for more details.
130+
"""
131+
132+
usage: Optional[XGroqUsage] = None
133+
"""Additional Groq-specific usage metrics (hardware cache statistics)."""
134+
135+
52136
class ChatCompletion(BaseModel):
53137
id: str
54138
"""A unique identifier for the chat completion."""
@@ -68,6 +152,12 @@ class ChatCompletion(BaseModel):
68152
object: Literal["chat.completion"]
69153
"""The object type, which is always `chat.completion`."""
70154

155+
mcp_list_tools: Optional[List[McpListTool]] = None
156+
"""List of discovered MCP tools from connected servers."""
157+
158+
service_tier: Optional[Literal["auto", "on_demand", "flex", "performance"]] = None
159+
"""The service tier used for the request."""
160+
71161
system_fingerprint: Optional[str] = None
72162
"""This fingerprint represents the backend configuration that the model runs with.
73163
@@ -83,3 +173,6 @@ class ChatCompletion(BaseModel):
83173
Detailed usage breakdown by model when multiple models are used in the request
84174
for compound AI systems.
85175
"""
176+
177+
x_groq: Optional[XGroq] = None
178+
"""Groq-specific metadata for non-streaming chat completion responses."""

src/groq/types/chat/chat_completion_chunk.py

Lines changed: 54 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,10 @@
1717
"ChoiceDeltaToolCall",
1818
"ChoiceDeltaToolCallFunction",
1919
"ChoiceLogprobs",
20+
"XGroq",
21+
"XGroqDebug",
22+
"UsageBreakdown",
23+
"UsageBreakdownModel",
2024
]
2125

2226

@@ -98,6 +102,33 @@ class ChoiceLogprobs(BaseModel):
98102
"""A list of message content tokens with log probability information."""
99103

100104

105+
class UsageBreakdownModel(BaseModel):
106+
model: str
107+
"""The name/identifier of the model used"""
108+
109+
usage: CompletionUsage
110+
"""Usage statistics for the completion request."""
111+
112+
113+
class UsageBreakdown(BaseModel):
114+
models: List[UsageBreakdownModel]
115+
"""List of models used in the request and their individual usage statistics"""
116+
117+
118+
class XGroqDebug(BaseModel):
119+
input_token_ids: Optional[List[int]] = None
120+
"""Token IDs for the input."""
121+
122+
input_tokens: Optional[List[str]] = None
123+
"""Token strings for the input."""
124+
125+
output_token_ids: Optional[List[int]] = None
126+
"""Token IDs for the output."""
127+
128+
output_tokens: Optional[List[str]] = None
129+
"""Token strings for the output."""
130+
131+
101132
class Choice(BaseModel):
102133
delta: ChoiceDelta
103134
"""A chat completion delta generated by streamed model responses."""
@@ -120,17 +151,32 @@ class Choice(BaseModel):
120151

121152

122153
class XGroq(BaseModel):
123-
id: Optional[str]
154+
id: Optional[str] = None
155+
"""
156+
A groq request ID which can be used to refer to a specific request to groq support.
157+
Sent only in the first and final chunk.
124158
"""
125-
A groq request ID which can be used by to refer to a specific request to groq support
126-
Only sent with the first chunk
159+
160+
debug: Optional[XGroqDebug] = None
161+
"""Debug information including input and output token IDs and strings.
162+
163+
Only present when debug=true in the request.
127164
"""
128165

129-
usage: Optional[CompletionUsage]
130-
"""Usage information for the stream. Only sent in the final chunk"""
166+
seed: Optional[int] = None
167+
"""The seed used for the request. Sent in the final chunk."""
168+
169+
usage: Optional[CompletionUsage] = None
170+
"""Usage information for the stream. Only sent in the final chunk."""
171+
172+
usage_breakdown: Optional[UsageBreakdown] = None
173+
"""
174+
Detailed usage breakdown by model when multiple models are used in the request
175+
for compound AI systems. Only sent in the final chunk.
176+
"""
131177

132-
error: Optional[str]
133-
""" An error string indicating why a stream was stopped early"""
178+
error: Optional[str] = None
179+
"""An error string indicating why a stream was stopped early."""
134180

135181

136182
class ChatCompletionChunk(BaseModel):
@@ -171,7 +217,7 @@ class ChatCompletionChunk(BaseModel):
171217
statistics for the entire request.
172218
"""
173219

174-
x_groq: Optional[XGroq]
220+
x_groq: Optional[XGroq] = None
175221
"""
176222
Additional metadata provided by groq.
177223
"""

src/groq/types/completion_usage.py

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,17 @@
44

55
from .._models import BaseModel
66

7-
__all__ = ["CompletionUsage"]
7+
__all__ = ["CompletionUsage", "CompletionTokensDetails", "PromptTokensDetails"]
8+
9+
10+
class CompletionTokensDetails(BaseModel):
11+
reasoning_tokens: int
12+
"""Number of tokens used for reasoning (for reasoning models)."""
13+
14+
15+
class PromptTokensDetails(BaseModel):
16+
cached_tokens: int
17+
"""Number of tokens that were cached and reused."""
818

919

1020
class CompletionUsage(BaseModel):
@@ -20,9 +30,15 @@ class CompletionUsage(BaseModel):
2030
completion_time: Optional[float] = None
2131
"""Time spent generating tokens"""
2232

33+
completion_tokens_details: Optional[CompletionTokensDetails] = None
34+
"""Breakdown of tokens in the completion."""
35+
2336
prompt_time: Optional[float] = None
2437
"""Time spent processing input tokens"""
2538

39+
prompt_tokens_details: Optional[PromptTokensDetails] = None
40+
"""Breakdown of tokens in the prompt."""
41+
2642
queue_time: Optional[float] = None
2743
"""Time the requests was spent queued"""
2844

0 commit comments

Comments
 (0)