Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@ jobs:
test: "*.TestThreads"
- name: Vector Stores
test: "*.TestVectorStores"
- name: Responses
test: "*.TestResponses"
- name: Misc.
test: "*.misc.*"
steps:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import kotlin.time.Duration.Companion.seconds
* OpenAI API.
*/
public interface OpenAI : Completions, Files, Edits, Embeddings, Models, Moderations, FineTunes, Images, Chat, Audio,
FineTuning, Assistants, Threads, Runs, Messages, VectorStores, Batch, AutoCloseable
FineTuning, Assistants, Threads, Runs, Messages, VectorStores, Batch, Responses, AutoCloseable

/**
* Creates an instance of [OpenAI].
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
package com.aallam.openai.client

import com.aallam.openai.api.core.RequestOptions
import com.aallam.openai.api.responses.Response
import com.aallam.openai.api.responses.ResponseIncludable
import com.aallam.openai.api.responses.ResponseItem
import com.aallam.openai.api.responses.ResponseRequest

/** Interface for OpenAI's Responses API */
public interface Responses {
/**
* Create a new response.
*
* @param request The request for creating a response
* @param requestOptions Optional request configuration
* @return The created response
*/
public suspend fun createResponse(
request: ResponseRequest,
requestOptions: RequestOptions? = null
): Response

/**
* Retrieves a model response with the given ID.
*
* @param responseId The ID of the response to retrieve
* @param include Additional fields to include in the response.
* @param requestOptions Optional request configuration
*/
public suspend fun getResponse(
responseId: String,
include: List<ResponseIncludable>? = null,
requestOptions: RequestOptions? = null): Response

/**
* Deletes a model response with the given ID.
*
* @param responseId The ID of the response to delete
* @param requestOptions Optional request configuration
*/
public suspend fun deleteResponse(
responseId: String,
requestOptions: RequestOptions? = null): Boolean

/**
* Cancels a model response with the given ID. Only responses created with the background parameter set to true can be cancelled.
*
* @param responseId The ID of the response to cancel
*/
public suspend fun cancelResponse(
responseId: String,
requestOptions: RequestOptions? = null): Response

/**
* Returns a list of input items for a given response.
*
* @param responseId The ID of the response
* @param after An item ID to list items after, used in pagination.
* @param before An item ID to list items before, used in pagination.
* @param include Additional fields to include in the response.
* @param limit A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20.
* @param order The order to return the input items in. Can be either "asc" or "desc". Default is "desc".
* @param requestOptions Optional request configuration
*/
public suspend fun listInputItems(
responseId: String,
after: String? = null,
before: String? = null,
include: List<ResponseIncludable>? = null,
limit: Int? = null,
order: String? = null,
requestOptions: RequestOptions? = null): List<ResponseItem>

//TODO Streaming
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,5 @@ internal class OpenAIApi(
Messages by MessagesApi(requester),
VectorStores by VectorStoresApi(requester),
Batch by BatchApi(requester),
Responses by ResponsesApi(requester),
AutoCloseable by requester
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,5 @@ internal object ApiPath {
const val Threads = "threads"
const val VectorStores = "vector_stores"
const val Batches = "batches"
const val Responses = "responses"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
package com.aallam.openai.client.internal.api

import com.aallam.openai.api.core.DeleteResponse
import com.aallam.openai.api.core.ListResponse
import com.aallam.openai.api.core.RequestOptions
import com.aallam.openai.api.responses.Response
import com.aallam.openai.api.responses.ResponseIncludable
import com.aallam.openai.api.responses.ResponseItem
import com.aallam.openai.api.responses.ResponseRequest
import com.aallam.openai.client.Responses
import com.aallam.openai.client.internal.extension.requestOptions
import com.aallam.openai.client.internal.http.HttpRequester
import com.aallam.openai.client.internal.http.perform

import io.ktor.client.*
import io.ktor.client.call.*
import io.ktor.client.request.*
import io.ktor.client.statement.*
import io.ktor.http.*

internal class ResponsesApi(private val requester: HttpRequester) : Responses {
override suspend fun createResponse(request: ResponseRequest, requestOptions: RequestOptions?): Response {
return requester.perform { client: HttpClient ->
client.post {
url(path = ApiPath.Responses)
setBody(request.copy(stream = false))
contentType(ContentType.Application.Json)
requestOptions(requestOptions)
}.body()
}
}

override suspend fun getResponse(
responseId: String,
include: List<ResponseIncludable>?,
requestOptions: RequestOptions?
): Response {
return requester.perform { client: HttpClient ->
client.get {
url(path = "${ApiPath.Responses}/$responseId")
parameter("include", include)
requestOptions(requestOptions)
}.body()
}
}

override suspend fun deleteResponse(responseId: String, requestOptions: RequestOptions?): Boolean {
val response = requester.perform<HttpResponse> {
it.delete {
url(path = "${ApiPath.Responses}/$responseId")
requestOptions(requestOptions)
}
}

return when (response.status) {
HttpStatusCode.NotFound -> false
else -> response.body<DeleteResponse>().deleted
}
}

override suspend fun cancelResponse(responseId: String, requestOptions: RequestOptions?): Response {
return requester.perform<HttpResponse> {
it.post {
url(path = "${ApiPath.Responses}/$responseId/cancel")
requestOptions(requestOptions)
}
}.body()
}

override suspend fun listInputItems(
responseId: String,
after: String?,
before: String?,
include: List<ResponseIncludable>?,
limit: Int?,
order: String?,
requestOptions: RequestOptions?
): List<ResponseItem> {
return requester.perform<ListResponse<ResponseItem>> {
it.get {
url(path = "${ApiPath.Responses}/$responseId/items")
parameter("after", after)
parameter("before", before)
parameter("include", include)
parameter("limit", limit)
parameter("order", order)
requestOptions(requestOptions)
}
}.data
}

//TODO Add streaming

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
package com.aallam.openai.client

import com.aallam.openai.api.core.Parameters.Companion.buildJsonObject
import com.aallam.openai.api.model.ModelId
import com.aallam.openai.api.responses.*
import kotlinx.serialization.json.add
import kotlinx.serialization.json.put
import kotlinx.serialization.json.putJsonArray
import kotlinx.serialization.json.putJsonObject
import kotlin.test.Test
import kotlin.test.assertNotNull

class TestResponses : TestOpenAI() {

@Test
fun basicResponse() = test {
val response = openAI.createResponse(
request = responseRequest {
model = ModelId("gpt-4o")
input = ResponseInput.from("What is the capital of France?")
}
)

assertNotNull(response)
assertNotNull(response.output)
}

@Test
fun responseWithTools() = test {
val response = openAI.createResponse(
request = responseRequest {
model = ModelId("gpt-4o")
input = ResponseInput.from("What's the weather like in Paris?")
tools {
add(
ResponseTool.Function(
name = "get_weather",
description = "Get the current weather",
parameters = buildJsonObject {
put("type", "object")
putJsonObject("properties") {
putJsonObject("location") {
put("type", "string")
put("description", "The city and state, e.g. San Francisco, CA")
}
putJsonObject("unit") {
put("type", "string")
putJsonArray("enum") {
add("celsius")
add("fahrenheit")
}
}
}
putJsonArray("required") {
add("location")
}
})
)
}
})


assertNotNull(response)
assertNotNull(response.output)
}

@Test
fun responseWithInstructions() = test {
val response = openAI.createResponse(
request = responseRequest {
model = ModelId("gpt-4o")
input = ResponseInput.from("Tell me about artificial intelligence")
instructions = "Provide a concise answer focusing on recent developments"
maxOutputTokens = 200
}
)

assertNotNull(response)
assertNotNull(response.output)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import kotlin.jvm.JvmInline
public value class Role(public val role: String) {
public companion object {
public val System: Role = Role("system")
public val Developer: Role = Role("developer")
public val User: Role = Role("user")
public val Assistant: Role = Role("assistant")
public val Function: Role = Role("function")
Expand Down
Loading