Skip to content
Prev Previous commit
Next Next commit
feat(providers): add multi-turn support and token logging for deep re…
…search
  • Loading branch information
waleedlatif1 committed Feb 11, 2026
commit 56317f0928ccf6e1daa1d9d4bf9aa458e05d76e1
18 changes: 18 additions & 0 deletions apps/sim/blocks/blocks/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -641,6 +641,16 @@ Example 3 (Array Input):
generationType: 'json-schema',
},
},
{
id: 'previousInteractionId',
title: 'Previous Interaction ID',
type: 'short-input',
placeholder: 'e.g., {{agent_1.interactionId}}',
condition: {
field: 'model',
value: MODELS_WITH_DEEP_RESEARCH,
},
},
],
tools: {
access: [
Expand Down Expand Up @@ -804,5 +814,13 @@ Example 3 (Array Input):
description: 'Provider timing information',
},
cost: { type: 'json', description: 'Cost of the API call' },
interactionId: {
type: 'string',
description: 'Interaction ID for multi-turn deep research follow-ups',
condition: {
field: 'model',
value: MODELS_WITH_DEEP_RESEARCH,
},
},
},
}
2 changes: 2 additions & 0 deletions apps/sim/executor/handlers/agent/agent-handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -999,6 +999,7 @@ export class AgentBlockHandler implements BlockHandler {
reasoningEffort: inputs.reasoningEffort,
verbosity: inputs.verbosity,
thinkingLevel: inputs.thinkingLevel,
previousInteractionId: inputs.previousInteractionId,
}
}

Expand Down Expand Up @@ -1269,6 +1270,7 @@ export class AgentBlockHandler implements BlockHandler {
content: result.content,
model: result.model,
...this.createResponseMetadata(result),
...(result.interactionId && { interactionId: result.interactionId }),
}
}

Expand Down
2 changes: 2 additions & 0 deletions apps/sim/executor/handlers/agent/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ export interface AgentInputs {
conversationId?: string // Required for all non-none memory types
slidingWindowSize?: string // For message-based sliding window
slidingWindowTokens?: string // For token-based sliding window
// Deep research multi-turn
previousInteractionId?: string // Interactions API previous interaction reference
// LLM parameters
temperature?: string
maxTokens?: string
Expand Down
121 changes: 88 additions & 33 deletions apps/sim/providers/gemini/core.ts
Original file line number Diff line number Diff line change
Expand Up @@ -452,17 +452,34 @@ function extractTextFromInteractionOutputs(outputs: Interactions.Interaction['ou

/**
* Extracts token usage from an Interaction's Usage object.
* The Interactions API provides total_input_tokens, total_output_tokens, and total_tokens.
* The Interactions API provides total_input_tokens, total_output_tokens, total_tokens,
* and total_reasoning_tokens (for thinking models).
*
* Also handles the raw API field name total_thought_tokens which the SDK may
* map to total_reasoning_tokens.
*/
function extractInteractionUsage(usage: Interactions.Usage | undefined): {
inputTokens: number
outputTokens: number
reasoningTokens: number
totalTokens: number
} {
const inputTokens = usage?.total_input_tokens ?? 0
const outputTokens = usage?.total_output_tokens ?? 0
const totalTokens = usage?.total_tokens ?? inputTokens + outputTokens
return { inputTokens, outputTokens, totalTokens }
if (!usage) {
return { inputTokens: 0, outputTokens: 0, reasoningTokens: 0, totalTokens: 0 }
}

const usageLogger = createLogger('DeepResearchUsage')
usageLogger.info('Raw interaction usage', { usage: JSON.stringify(usage) })

const inputTokens = usage.total_input_tokens ?? 0
const outputTokens = usage.total_output_tokens ?? 0
const reasoningTokens =
usage.total_reasoning_tokens ??
((usage as Record<string, unknown>).total_thought_tokens as number) ??
0
const totalTokens = usage.total_tokens ?? inputTokens + outputTokens

return { inputTokens, outputTokens, reasoningTokens, totalTokens }
}

/**
Expand All @@ -471,9 +488,15 @@ function extractInteractionUsage(usage: Interactions.Usage | undefined): {
function buildDeepResearchResponse(
content: string,
model: string,
usage: { inputTokens: number; outputTokens: number; totalTokens: number },
usage: {
inputTokens: number
outputTokens: number
reasoningTokens: number
totalTokens: number
},
providerStartTime: number,
providerStartTimeISO: string
providerStartTimeISO: string,
interactionId?: string
): ProviderResponse {
const providerEndTime = Date.now()
const duration = providerEndTime - providerStartTime
Expand Down Expand Up @@ -505,6 +528,7 @@ function buildDeepResearchResponse(
],
},
cost: calculateCost(model, usage.inputTokens, usage.outputTokens),
interactionId,
}
}

Expand All @@ -524,12 +548,19 @@ function createDeepResearchStream(
stream: AsyncIterable<Interactions.InteractionSSEEvent>,
onComplete?: (
content: string,
usage: { inputTokens: number; outputTokens: number; totalTokens: number }
usage: {
inputTokens: number
outputTokens: number
reasoningTokens: number
totalTokens: number
},
interactionId?: string
) => void
): ReadableStream<Uint8Array> {
const streamLogger = createLogger('DeepResearchStream')
let fullContent = ''
let completionUsage = { inputTokens: 0, outputTokens: 0, totalTokens: 0 }
let completionUsage = { inputTokens: 0, outputTokens: 0, reasoningTokens: 0, totalTokens: 0 }
let completedInteractionId: string | undefined

return new ReadableStream({
async start(controller) {
Expand All @@ -546,6 +577,12 @@ function createDeepResearchStream(
if (interaction?.usage) {
completionUsage = extractInteractionUsage(interaction.usage)
}
completedInteractionId = interaction?.id
} else if (event.event_type === 'interaction.start') {
const interaction = (event as Interactions.InteractionEvent).interaction
if (interaction?.id) {
completedInteractionId = interaction.id
}
} else if (event.event_type === 'error') {
const errorEvent = event as { error?: { code?: string; message?: string } }
const message = errorEvent.error?.message ?? 'Unknown deep research stream error'
Expand All @@ -558,7 +595,7 @@ function createDeepResearchStream(
}
}

onComplete?.(fullContent, completionUsage)
onComplete?.(fullContent, completionUsage, completedInteractionId)
controller.close()
} catch (error) {
streamLogger.error('Error reading deep research stream', {
Expand Down Expand Up @@ -595,6 +632,7 @@ export async function executeDeepResearchRequest(
hasSystemPrompt: !!request.systemPrompt,
hasMessages: !!request.messages?.length,
streaming: !!request.stream,
hasPreviousInteractionId: !!request.previousInteractionId,
})

if (request.tools?.length) {
Expand All @@ -620,6 +658,9 @@ export async function executeDeepResearchRequest(
background: true,
store: true,
...(systemInstruction && { system_instruction: systemInstruction }),
...(request.previousInteractionId && {
previous_interaction_id: request.previousInteractionId,
}),
agent_config: {
type: 'deep-research' as const,
thinking_summaries: 'auto' as const,
Expand Down Expand Up @@ -685,31 +726,35 @@ export async function executeDeepResearchRequest(
},
}

streamingResult.stream = createDeepResearchStream(streamResponse, (content, usage) => {
streamingResult.execution.output.content = content
streamingResult.execution.output.tokens = {
input: usage.inputTokens,
output: usage.outputTokens,
total: usage.totalTokens,
}
streamingResult.stream = createDeepResearchStream(
streamResponse,
(content, usage, streamInteractionId) => {
streamingResult.execution.output.content = content
streamingResult.execution.output.tokens = {
input: usage.inputTokens,
output: usage.outputTokens,
total: usage.totalTokens,
}
streamingResult.execution.output.interactionId = streamInteractionId

const cost = calculateCost(model, usage.inputTokens, usage.outputTokens)
streamingResult.execution.output.cost = cost

const cost = calculateCost(model, usage.inputTokens, usage.outputTokens)
streamingResult.execution.output.cost = cost

const streamEndTime = Date.now()
if (streamingResult.execution.output.providerTiming) {
streamingResult.execution.output.providerTiming.endTime = new Date(
streamEndTime
).toISOString()
streamingResult.execution.output.providerTiming.duration =
streamEndTime - providerStartTime
const segments = streamingResult.execution.output.providerTiming.timeSegments
if (segments?.[0]) {
segments[0].endTime = streamEndTime
segments[0].duration = streamEndTime - providerStartTime
const streamEndTime = Date.now()
if (streamingResult.execution.output.providerTiming) {
streamingResult.execution.output.providerTiming.endTime = new Date(
streamEndTime
).toISOString()
streamingResult.execution.output.providerTiming.duration =
streamEndTime - providerStartTime
const segments = streamingResult.execution.output.providerTiming.timeSegments
if (segments?.[0]) {
segments[0].endTime = streamEndTime
segments[0].duration = streamEndTime - providerStartTime
}
}
}
})
)

return streamingResult
}
Expand Down Expand Up @@ -764,11 +809,21 @@ export async function executeDeepResearchRequest(
logger.info('Deep research completed', {
interactionId,
contentLength: content.length,
inputTokens: usage.inputTokens,
outputTokens: usage.outputTokens,
reasoningTokens: usage.reasoningTokens,
totalTokens: usage.totalTokens,
durationMs: Date.now() - providerStartTime,
})

return buildDeepResearchResponse(content, model, usage, providerStartTime, providerStartTimeISO)
return buildDeepResearchResponse(
content,
model,
usage,
providerStartTime,
providerStartTimeISO,
interactionId
)
} catch (error) {
const providerEndTime = Date.now()
const duration = providerEndTime - providerStartTime
Expand Down
4 changes: 4 additions & 0 deletions apps/sim/providers/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,8 @@ export interface ProviderResponse {
total: number
pricing: ModelPricing
}
/** Interaction ID returned by the Interactions API (used for multi-turn deep research) */
interactionId?: string
}

export type ToolUsageControl = 'auto' | 'force' | 'none'
Expand Down Expand Up @@ -169,6 +171,8 @@ export interface ProviderRequest {
verbosity?: string
thinkingLevel?: string
isDeployedContext?: boolean
/** Previous interaction ID for multi-turn Interactions API requests (deep research follow-ups) */
previousInteractionId?: string
}

export const providers: Record<string, ProviderConfig> = {}