diff --git a/gadget-drone/src/services/agent.ts b/gadget-drone/src/services/agent.ts index b712187..a650dab 100644 --- a/gadget-drone/src/services/agent.ts +++ b/gadget-drone/src/services/agent.ts @@ -4,7 +4,11 @@ import { Types } from "@gadget/api"; import { Socket } from "socket.io-client"; -import { IAiChatOptions, type IContextChatMessage } from "@gadget/ai"; +import { + IAiChatOptions, + IAiStreamChunk, + type IContextChatMessage, +} from "@gadget/ai"; import { IChatSession, IChatTurn, @@ -32,6 +36,11 @@ export interface IAgentWorkOrder { context: IChatTurn[]; } +interface IAgentWorkflow { + chatOptions: IAiChatOptions; + context: IContextChatMessage[]; +} + type DroneSocket = Socket; class AgentService extends GadgetService { @@ -55,67 +64,100 @@ class AgentService extends GadgetService { socket: DroneSocket, ): Promise { const { turn } = workOrder; + const task: IAgentWorkflow = { + chatOptions: {}, + context: [], + }; async function aiCallTool(name: string, args: string) { return "[all tool calls are stubbed out]"; } - const context = this.buildSessionContext(workOrder); - const chatOptions: IAiChatOptions = { - systemPrompt: turn.prompts.system, - context, - userPrompt: turn.prompts.user, + const onStreamChunk = async (chunk: IAiStreamChunk): Promise => { + this.log.debug("stream chunk received", { chunk }); }; - let keepProcessing = true; - do { - const response = await AiService.chat( - turn.provider, - { - modelId: turn.llm, - params: { - reasoning: false, - temperature: 0.8, - topP: 0.9, - topK: 40, - }, - }, - chatOptions, + try { + task.context = this.buildSessionContext(workOrder); + task.chatOptions = { + systemPrompt: turn.prompts.system, + context: task.context, + userPrompt: turn.prompts.user, + }; + } catch (cause) { + socket.emit( + "workOrderComplete", + turn._id, + false, + `failed to build session context: ${(cause as Error).message}`, ); + const error = new Error("failed to build session context", { cause }); + throw error; + } - // Emit thinking content if present - if (response.thinking) { - socket.emit("thinking", response.thinking); - } - - // Emit response content if present - if (response.response) { - socket.emit("response", response.response); - } - - keepProcessing = (response.toolCalls?.length ?? 0) > 0; - for (const toolCall of response.toolCalls ?? []) { - const result = await aiCallTool( - toolCall.function.name, - toolCall.function.arguments, + try { + let keepProcessing = true; + do { + const response = await AiService.chat( + turn.provider, + { + modelId: turn.llm, + params: { + reasoning: false, + temperature: 0.8, + topP: 0.9, + topK: 40, + }, + }, + task.chatOptions, + onStreamChunk, ); - context.push({ - createdAt: new Date(), - role: "tool", - callId: toolCall.callId, - content: result, - }); - // Emit tool call event - socket.emit( - "toolCall", - toolCall.callId, - toolCall.function.name, - toolCall.function.arguments, - result, - ); - } - } while (keepProcessing); + // Emit thinking content if present + if (response.thinking) { + socket.emit("thinking", response.thinking); + } + + // Emit response content if present + if (response.response) { + socket.emit("response", response.response); + } + + keepProcessing = (response.toolCalls?.length ?? 0) > 0; + for (const toolCall of response.toolCalls ?? []) { + const result = await aiCallTool( + toolCall.function.name, + toolCall.function.arguments, + ); + task.context.push({ + createdAt: new Date(), + role: "tool", + callId: toolCall.callId, + content: result, + }); + + // Emit tool call event + socket.emit( + "toolCall", + toolCall.callId, + toolCall.function.name, + toolCall.function.arguments, + result, + ); + } + } while (keepProcessing); + } catch (cause) { + socket.emit( + "workOrderComplete", + turn._id, + false, + `failed to process agentic workflow loop: ${(cause as Error).message}`, + ); + const error = new Error("failed to process agentic workflow loop", { + cause, + }); + throw error; + } // Emit work order complete socket.emit("workOrderComplete", turn._id, true);