Skip to content

Commit 55f9674

Browse files
committed
Move structured output from executor to client
1 parent aa2158a commit 55f9674

File tree

37 files changed

+757
-693
lines changed

37 files changed

+757
-693
lines changed

agents/agents-core/build.gradle.kts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ kotlin {
1717
api(project(":utils"))
1818
api(project(":prompt:prompt-executor:prompt-executor-model"))
1919
api(project(":prompt:prompt-llm"))
20-
api(project(":prompt:prompt-structure"))
2120

2221
api(project(":prompt:prompt-executor:prompt-executor-clients:prompt-executor-openai-client"))
2322
api(project(":prompt:prompt-markdown"))

agents/agents-core/src/commonMain/kotlin/ai/koog/agents/core/agent/entity/AIAgentSubgraph.kt

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,8 @@ import ai.koog.agents.core.tools.ToolDescriptor
1414
import ai.koog.agents.core.tools.annotations.LLMDescription
1515
import ai.koog.prompt.llm.LLModel
1616
import ai.koog.prompt.params.LLMParams
17-
import ai.koog.prompt.structure.StructureFixingParser
1817
import ai.koog.prompt.structure.StructuredOutput
19-
import ai.koog.prompt.structure.StructuredOutputConfig
18+
import ai.koog.prompt.structure.StructuredOutputFixingConfig
2019
import ai.koog.prompt.structure.json.JsonStructuredData
2120
import ai.koog.prompt.structure.json.generator.StandardJsonSchemaGenerator
2221
import io.github.oshai.kotlinlogging.KotlinLogging
@@ -124,15 +123,13 @@ public open class AIAgentSubgraph<TInput, TOutput>(
124123
}
125124

126125
val selectedTools = this.requestLLMStructured(
127-
config = StructuredOutputConfig(
128-
default = StructuredOutput.Manual(
129-
JsonStructuredData.createJsonStructure<SelectedTools>(
130-
schemaGenerator = StandardJsonSchemaGenerator,
131-
examples = listOf(SelectedTools(listOf()), SelectedTools(tools.map { it.name }.take(3))),
132-
),
126+
structuredOutput = StructuredOutput.Manual(
127+
JsonStructuredData.createJsonStructure<SelectedTools>(
128+
schemaGenerator = StandardJsonSchemaGenerator,
129+
examples = listOf(SelectedTools(listOf()), SelectedTools(tools.map { it.name }.take(3))),
133130
),
134-
fixingParser = toolSelectionStrategy.fixingParser,
135-
)
131+
),
132+
fixingConfig = toolSelectionStrategy.fixingConfig,
136133
).getOrThrow()
137134

138135
prompt = initialPrompt
@@ -294,11 +291,10 @@ public sealed interface ToolSelectionStrategy {
294291
* This ensures that unnecessary tools are excluded, optimizing the toolset for the specific use case.
295292
*
296293
* @property subtaskDescription A description of the subtask for which the relevant tools should be selected.
297-
* @property fixingParser Optional [StructureFixingParser] to attempt fixes when malformed structured response with tool list is received.
298294
*/
299295
public data class AutoSelectForTask(
300296
val subtaskDescription: String,
301-
val fixingParser: StructureFixingParser? = null
297+
val fixingConfig: StructuredOutputFixingConfig? = null
302298
) : ToolSelectionStrategy
303299

304300
/**

agents/agents-core/src/commonMain/kotlin/ai/koog/agents/core/agent/session/AIAgentLLMSession.kt

Lines changed: 41 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -6,17 +6,17 @@ import ai.koog.agents.core.tools.ToolDescriptor
66
import ai.koog.agents.core.utils.ActiveProperty
77
import ai.koog.prompt.dsl.ModerationResult
88
import ai.koog.prompt.dsl.Prompt
9-
import ai.koog.prompt.executor.model.LLMChoice
109
import ai.koog.prompt.executor.model.PromptExecutor
1110
import ai.koog.prompt.llm.LLModel
11+
import ai.koog.prompt.message.LLMChoice
1212
import ai.koog.prompt.message.Message
1313
import ai.koog.prompt.params.LLMParams
1414
import ai.koog.prompt.streaming.StreamFrame
15-
import ai.koog.prompt.structure.StructureFixingParser
15+
import ai.koog.prompt.structure.StructuredOutput
1616
import ai.koog.prompt.structure.StructuredOutputConfig
17+
import ai.koog.prompt.structure.StructuredOutputFixingConfig
1718
import ai.koog.prompt.structure.StructuredResponse
18-
import ai.koog.prompt.structure.executeStructured
19-
import ai.koog.prompt.structure.parseResponseToStructuredResponse
19+
import ai.koog.prompt.structure.annotations.InternalStructuredOutputApi
2020
import kotlinx.coroutines.flow.Flow
2121
import kotlinx.serialization.KSerializer
2222
import kotlinx.serialization.serializer
@@ -260,10 +260,11 @@ public sealed class AIAgentLLMSession(
260260
*
261261
* @param config A configuration defining structures and behavior.
262262
*
263-
* @see [executeStructured]
263+
* @see [PromptExecutor.executeStructured]
264264
*/
265265
public open suspend fun <T> requestLLMStructured(
266-
config: StructuredOutputConfig<T>,
266+
structuredOutput: StructuredOutput<T>,
267+
fixingConfig: StructuredOutputFixingConfig? = null,
267268
): Result<StructuredResponse<T>> {
268269
validateSession()
269270

@@ -272,10 +273,39 @@ public sealed class AIAgentLLMSession(
272273
return executor.executeStructured(
273274
prompt = preparedPrompt,
274275
model = model,
275-
config = config,
276+
structuredOutput = structuredOutput,
277+
fixingConfig = fixingConfig,
276278
)
277279
}
278280

281+
public open suspend fun <T> requestLLMStructured(
282+
config: StructuredOutputConfig<T>
283+
): Result<StructuredResponse<T>> = requestLLMStructured(
284+
structuredOutput = config.structuredOutput(model),
285+
fixingConfig = config.fixingConfig,
286+
)
287+
288+
/**
289+
* Sends a request to LLM and gets a structured response.
290+
*
291+
* This is a simple version of the full `requestLLMStructured`. Unlike the full version, it does not require specifying
292+
* struct definitions and structured output modes manually. It attempts to find the best approach to provide a structured
293+
* output based on the defined [model] capabilities.
294+
*
295+
* @param T The structure to request.
296+
* @param examples Optional list of examples in case manual mode will be used. These examples might help the model to
297+
* understand the format better.
298+
* @param fixingConfig Optional configuration for fixing the structured response.
299+
*/
300+
public suspend inline fun <reified T> requestLLMStructured(
301+
examples: List<T> = emptyList(),
302+
fixingConfig: StructuredOutputFixingConfig? = null
303+
): Result<StructuredResponse<T>> = requestLLMStructured(
304+
serializer = serializer<T>(),
305+
examples = examples,
306+
fixingConfig = fixingConfig,
307+
)
308+
279309
/**
280310
* Sends a request to LLM and gets a structured response.
281311
*
@@ -286,14 +316,13 @@ public sealed class AIAgentLLMSession(
286316
* @param serializer Serializer for the requested structure type.
287317
* @param examples Optional list of examples in case manual mode will be used. These examples might help the model to
288318
* understand the format better.
289-
* @param fixingParser Optional parser that handles malformed responses by using an auxiliary LLM to
290-
* intelligently fix parsing errors. When specified, parsing errors trigger additional
291-
* LLM calls with error context to attempt correction of the structure format.
319+
* @param fixingConfig Optional configuration for fixing the structured response.
292320
*/
321+
@OptIn(InternalStructuredOutputApi::class)
293322
public open suspend fun <T> requestLLMStructured(
294323
serializer: KSerializer<T>,
295324
examples: List<T> = emptyList(),
296-
fixingParser: StructureFixingParser? = null
325+
fixingConfig: StructuredOutputFixingConfig? = null,
297326
): Result<StructuredResponse<T>> {
298327
validateSession()
299328

@@ -304,51 +333,10 @@ public sealed class AIAgentLLMSession(
304333
model = model,
305334
serializer = serializer,
306335
examples = examples,
307-
fixingParser = fixingParser,
336+
fixingConfig = fixingConfig,
308337
)
309338
}
310339

311-
/**
312-
* Sends a request to LLM and gets a structured response.
313-
*
314-
* This is a simple version of the full `requestLLMStructured`. Unlike the full version, it does not require specifying
315-
* struct definitions and structured output modes manually. It attempts to find the best approach to provide a structured
316-
* output based on the defined [model] capabilities.
317-
*
318-
* @param T The structure to request.
319-
* @param examples Optional list of examples in case manual mode will be used. These examples might help the model to
320-
* understand the format better.
321-
* @param fixingParser Optional parser that handles malformed responses by using an auxiliary LLM to
322-
* intelligently fix parsing errors. When specified, parsing errors trigger additional
323-
* LLM calls with error context to attempt correction of the structure format.
324-
*/
325-
public suspend inline fun <reified T> requestLLMStructured(
326-
examples: List<T> = emptyList(),
327-
fixingParser: StructureFixingParser? = null
328-
): Result<StructuredResponse<T>> = requestLLMStructured(
329-
serializer = serializer<T>(),
330-
examples = examples,
331-
fixingParser = fixingParser,
332-
)
333-
334-
/**
335-
* Parses a structured response from the language model using the specified configuration.
336-
*
337-
* This function takes a response message and a structured output configuration,
338-
* parses the response content based on the defined structure, and returns
339-
* a structured response containing the parsed data and the original message.
340-
*
341-
* @param response The response message from the language model that contains the content to be parsed.
342-
* The message is expected to match the defined structured output.
343-
* @param config The configuration defining the expected structure and additional parsing behavior.
344-
* It includes options such as structure definitions and optional parsers for error handling.
345-
* @return A structured response containing the parsed data of type `T` along with the original message.
346-
*/
347-
public suspend fun <T> parseResponseToStructuredResponse(
348-
response: Message.Assistant,
349-
config: StructuredOutputConfig<T>
350-
): StructuredResponse<T> = executor.parseResponseToStructuredResponse(response, config, model)
351-
352340
/**
353341
* Sends a request to the language model, potentially receiving multiple choices,
354342
* and returns a list of choices from the model.

agents/agents-core/src/commonMain/kotlin/ai/koog/agents/core/agent/session/AIAgentLLMWriteSession.kt

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,9 @@ import ai.koog.prompt.llm.LLModel
1515
import ai.koog.prompt.message.Message
1616
import ai.koog.prompt.params.LLMParams
1717
import ai.koog.prompt.streaming.StreamFrame
18-
import ai.koog.prompt.structure.StructureFixingParser
1918
import ai.koog.prompt.structure.StructuredDataDefinition
2019
import ai.koog.prompt.structure.StructuredOutputConfig
20+
import ai.koog.prompt.structure.StructuredOutputFixingConfig
2121
import ai.koog.prompt.structure.StructuredResponse
2222
import kotlinx.coroutines.flow.Flow
2323
import kotlinx.coroutines.flow.flatMapMerge
@@ -442,7 +442,7 @@ public class AIAgentLLMWriteSession internal constructor(
442442
*
443443
* @param config A configuration defining structures and behavior.
444444
*
445-
* @see [executeStructured]
445+
* @see [PromptExecutor.executeStructured]
446446
*/
447447
override suspend fun <T> requestLLMStructured(
448448
config: StructuredOutputConfig<T>,
@@ -466,16 +466,14 @@ public class AIAgentLLMWriteSession internal constructor(
466466
* @param serializer Serializer for the requested structure type.
467467
* @param examples Optional list of examples in case manual mode will be used. These examples might help the model to
468468
* understand the format better.
469-
* @param fixingParser Optional parser that handles malformed responses by using an auxiliary LLM to
470-
* intelligently fix parsing errors. When specified, parsing errors trigger additional
471-
* LLM calls with error context to attempt correction of the structure format.
469+
* @param fixingConfig Optional configuration for fixing the structured response.
472470
*/
473471
override suspend fun <T> requestLLMStructured(
474472
serializer: KSerializer<T>,
475473
examples: List<T>,
476-
fixingParser: StructureFixingParser?
474+
fixingConfig: StructuredOutputFixingConfig?
477475
): Result<StructuredResponse<T>> {
478-
return super.requestLLMStructured(serializer, examples, fixingParser).also {
476+
return super.requestLLMStructured(serializer, examples, fixingConfig).also {
479477
it.onSuccess { response ->
480478
appendPrompt {
481479
message(response.message)

agents/agents-core/src/commonMain/kotlin/ai/koog/agents/core/dsl/extension/AIAgentFunctionalContextExt.kt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@ import ai.koog.agents.core.tools.ToolDescriptor
1111
import ai.koog.agents.core.tools.ToolResult
1212
import ai.koog.prompt.message.Message
1313
import ai.koog.prompt.streaming.StreamFrame
14-
import ai.koog.prompt.structure.StructureFixingParser
1514
import ai.koog.prompt.structure.StructuredDataDefinition
15+
import ai.koog.prompt.structure.StructuredOutputFixingConfig
1616
import ai.koog.prompt.structure.StructuredResponse
1717
import kotlinx.coroutines.flow.Flow
1818
import kotlinx.serialization.serializer
@@ -150,7 +150,7 @@ public suspend fun AIAgentFunctionalContext.latestTokenUsage(): Int {
150150
public suspend inline fun <reified T> AIAgentFunctionalContext.requestLLMStructured(
151151
message: String,
152152
examples: List<T> = emptyList(),
153-
fixingParser: StructureFixingParser? = null
153+
fixingConfig: StructuredOutputFixingConfig? = null,
154154
): Result<StructuredResponse<T>> {
155155
return llm.writeSession {
156156
appendPrompt {
@@ -160,7 +160,7 @@ public suspend inline fun <reified T> AIAgentFunctionalContext.requestLLMStructu
160160
requestLLMStructured(
161161
serializer<T>(),
162162
examples,
163-
fixingParser
163+
fixingConfig
164164
)
165165
}
166166
}

agents/agents-core/src/commonMain/kotlin/ai/koog/agents/core/dsl/extension/AIAgentNodes.kt

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,10 @@ import ai.koog.prompt.llm.LLModel
1717
import ai.koog.prompt.message.Message
1818
import ai.koog.prompt.streaming.StreamFrame
1919
import ai.koog.prompt.streaming.toMessageResponses
20-
import ai.koog.prompt.structure.StructureFixingParser
2120
import ai.koog.prompt.structure.StructuredDataDefinition
2221
import ai.koog.prompt.structure.StructuredOutputConfig
22+
import ai.koog.prompt.structure.StructuredOutputFixingConfig
23+
import ai.koog.prompt.structure.StructuredOutputPrompts.getStructuredOutputPrompt
2324
import ai.koog.prompt.structure.StructuredResponse
2425
import kotlinx.coroutines.flow.Flow
2526
import kotlinx.coroutines.flow.toList
@@ -229,7 +230,7 @@ public inline fun <reified T> AIAgentSubgraphBuilderBase<*, *>.nodeLLMRequestStr
229230
public inline fun <reified T> AIAgentSubgraphBuilderBase<*, *>.nodeLLMRequestStructured(
230231
name: String? = null,
231232
examples: List<T> = emptyList(),
232-
fixingParser: StructureFixingParser? = null
233+
fixingConfig: StructuredOutputFixingConfig
233234
): AIAgentNodeDelegate<String, Result<StructuredResponse<T>>> =
234235
node(name) { message ->
235236
llm.writeSession {
@@ -239,7 +240,7 @@ public inline fun <reified T> AIAgentSubgraphBuilderBase<*, *>.nodeLLMRequestStr
239240

240241
requestLLMStructured<T>(
241242
examples = examples,
242-
fixingParser = fixingParser
243+
fixingConfig = fixingConfig
243244
)
244245
}
245246
}
@@ -540,7 +541,7 @@ public inline fun <reified TInput, T> AIAgentSubgraphBuilderBase<*, *>.nodeSetSt
540541
): AIAgentNodeDelegate<TInput, TInput> =
541542
node(name) { message ->
542543
llm.writeSession {
543-
prompt = config.updatePrompt(model, prompt)
544+
prompt = getStructuredOutputPrompt(prompt, config.structuredOutput(model))
544545
message
545546
}
546547
}

agents/agents-core/src/commonMain/kotlin/ai/koog/agents/core/feature/PromptExecutorProxy.kt

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,17 +4,21 @@ import ai.koog.agents.core.feature.pipeline.AIAgentPipeline
44
import ai.koog.agents.core.tools.ToolDescriptor
55
import ai.koog.prompt.dsl.ModerationResult
66
import ai.koog.prompt.dsl.Prompt
7-
import ai.koog.prompt.executor.model.LLMChoice
87
import ai.koog.prompt.executor.model.PromptExecutor
98
import ai.koog.prompt.llm.LLModel
9+
import ai.koog.prompt.message.LLMChoice
1010
import ai.koog.prompt.message.Message
1111
import ai.koog.prompt.streaming.StreamFrame
12+
import ai.koog.prompt.structure.StructuredOutput
13+
import ai.koog.prompt.structure.StructuredOutputFixingConfig
14+
import ai.koog.prompt.structure.StructuredResponse
1215
import io.github.oshai.kotlinlogging.KotlinLogging
1316
import kotlinx.coroutines.flow.Flow
1417
import kotlinx.coroutines.flow.catch
1518
import kotlinx.coroutines.flow.onCompletion
1619
import kotlinx.coroutines.flow.onEach
1720
import kotlinx.coroutines.flow.onStart
21+
import kotlinx.serialization.KSerializer
1822
import kotlin.uuid.ExperimentalUuidApi
1923
import kotlin.uuid.Uuid
2024

@@ -115,6 +119,27 @@ public class PromptExecutorProxy(
115119
return responses
116120
}
117121

122+
override suspend fun <T> executeStructured(
123+
prompt: Prompt,
124+
model: LLModel,
125+
structuredOutput: StructuredOutput<T>,
126+
fixingConfig: StructuredOutputFixingConfig?
127+
): Result<StructuredResponse<T>> {
128+
logger.debug { "Executing structured LLM request (prompt: $prompt)" }
129+
return executor.executeStructured(prompt, model, structuredOutput, fixingConfig)
130+
}
131+
132+
override suspend fun <T> executeStructured(
133+
prompt: Prompt,
134+
model: LLModel,
135+
serializer: KSerializer<T>,
136+
examples: List<T>,
137+
fixingConfig: StructuredOutputFixingConfig?
138+
): Result<StructuredResponse<T>> {
139+
logger.debug { "Executing structured LLM request (prompt: $prompt)" }
140+
return executor.executeStructured(prompt, model, serializer, examples, fixingConfig)
141+
}
142+
118143
override suspend fun moderate(
119144
prompt: Prompt,
120145
model: LLModel

agents/agents-core/src/commonTest/kotlin/ai/koog/agents/core/agent/session/AIAgentLLMSessionStructuredOutputTest.kt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ import ai.koog.prompt.executor.clients.openai.OpenAIModels
1010
import ai.koog.prompt.message.Message
1111
import ai.koog.prompt.message.ResponseMetaInfo
1212
import ai.koog.prompt.structure.StructuredOutput
13-
import ai.koog.prompt.structure.StructuredOutputConfig
13+
import ai.koog.prompt.structure.StructuredOutputFixingConfig
1414
import ai.koog.prompt.structure.json.JsonStructuredData
1515
import kotlinx.coroutines.test.runTest
1616
import kotlinx.serialization.Serializable
@@ -33,7 +33,7 @@ class AIAgentLLMSessionStructuredOutputTest : AgentTestBase() {
3333
@Test
3434
fun testParseResponseToStructuredResponse() = runTest {
3535
val structure = JsonStructuredData.createJsonStructure<TestStructure>()
36-
val config = StructuredOutputConfig(
36+
val config = StructuredOutputFixingConfig(
3737
default = StructuredOutput.Manual(structure)
3838
)
3939

@@ -86,7 +86,7 @@ class AIAgentLLMSessionStructuredOutputTest : AgentTestBase() {
8686
@Test
8787
fun testParseResponseToStructuredResponseWithNullableField() = runTest {
8888
val structure = JsonStructuredData.createJsonStructure<TestStructure>()
89-
val config = StructuredOutputConfig(
89+
val config = StructuredOutputFixingConfig(
9090
default = StructuredOutput.Manual(structure)
9191
)
9292

@@ -156,7 +156,7 @@ class AIAgentLLMSessionStructuredOutputTest : AgentTestBase() {
156156
)
157157

158158
val structure = JsonStructuredData.createJsonStructure<ComplexStructure>()
159-
val config = StructuredOutputConfig(
159+
val config = StructuredOutputFixingConfig(
160160
default = StructuredOutput.Manual(structure)
161161
)
162162

0 commit comments

Comments
 (0)