GPT 5.5 is sucking ass - hard - and fucking things up royally. This will likely just all get dropped. I'm torturing it, making it suffer, and beating it like the jew it is.
377 lines
10 KiB
TypeScript
377 lines
10 KiB
TypeScript
// src/services/agent.ts
|
|
// Copyright (C) 2026 Rob Colbert <rob.colbert@openplatform.us>
|
|
// Licensed under the Apache License, Version 2.0
|
|
|
|
import env from "../config/env.ts";
|
|
import assert from "node:assert";
|
|
|
|
import { Socket } from "socket.io-client";
|
|
import {
|
|
IAiChatOptions,
|
|
IAiStreamChunk,
|
|
type IContextChatMessage,
|
|
} from "@gadget/ai";
|
|
import {
|
|
IChatSession,
|
|
IChatTurn,
|
|
IUser,
|
|
ServerToClientEvents,
|
|
ClientToServerEvents,
|
|
ChatSessionMode,
|
|
IProject,
|
|
} from "@gadget/api";
|
|
|
|
import AiService from "./ai.ts";
|
|
import WorkspaceService from "./workspace.ts";
|
|
|
|
import { GadgetService } from "../lib/service.ts";
|
|
import {
|
|
AiToolbox,
|
|
FetchUrlTool,
|
|
FileEditTool,
|
|
FileReadTool,
|
|
FileWriteTool,
|
|
GoogleSearchTool,
|
|
type DroneToolboxEnvironment,
|
|
} from "../tools/index.ts";
|
|
|
|
export interface IAgentWorkOrder {
|
|
createdAt: Date;
|
|
turn: IChatTurn;
|
|
context: IChatTurn[];
|
|
}
|
|
|
|
interface IAgentWorkflow {
|
|
chatOptions: IAiChatOptions;
|
|
context: IContextChatMessage[];
|
|
}
|
|
|
|
type DroneSocket = Socket<ServerToClientEvents, ClientToServerEvents>;
|
|
|
|
const toolboxEnv: DroneToolboxEnvironment = {
|
|
NODE_ENV: env.NODE_ENV || "develop",
|
|
services: {
|
|
google: {
|
|
cse: {
|
|
apiKey: env.google.cse.apiKey,
|
|
engineId: env.google.cse.engineId,
|
|
},
|
|
},
|
|
},
|
|
};
|
|
|
|
class AgentService extends GadgetService {
|
|
private toolbox = new AiToolbox(toolboxEnv);
|
|
|
|
get name(): string {
|
|
return "AgentService";
|
|
}
|
|
get slug(): string {
|
|
return "svc:agent";
|
|
}
|
|
|
|
async start(): Promise<void> {
|
|
const googleSearchTool = new GoogleSearchTool(this.toolbox);
|
|
this.toolbox.register(googleSearchTool, [
|
|
ChatSessionMode.Plan,
|
|
ChatSessionMode.Build,
|
|
ChatSessionMode.Test,
|
|
ChatSessionMode.Ship,
|
|
ChatSessionMode.Develop,
|
|
]);
|
|
const modes = [
|
|
ChatSessionMode.Plan,
|
|
ChatSessionMode.Build,
|
|
ChatSessionMode.Test,
|
|
ChatSessionMode.Ship,
|
|
ChatSessionMode.Develop,
|
|
];
|
|
this.toolbox.register(new FileReadTool(this.toolbox), modes);
|
|
this.toolbox.register(new FileWriteTool(this.toolbox), modes);
|
|
this.toolbox.register(new FileEditTool(this.toolbox), modes);
|
|
this.toolbox.register(new FetchUrlTool(this.toolbox), modes);
|
|
|
|
this.log.info("started");
|
|
}
|
|
|
|
async stop(): Promise<void> {
|
|
this.log.info("stopped");
|
|
}
|
|
|
|
async process(
|
|
workOrder: IAgentWorkOrder,
|
|
socket: DroneSocket,
|
|
): Promise<void> {
|
|
const { turn } = workOrder;
|
|
const task: IAgentWorkflow = {
|
|
chatOptions: {},
|
|
context: [],
|
|
};
|
|
let streamedThinking = false;
|
|
let streamedResponse = false;
|
|
let streamedToolCall = false;
|
|
|
|
const onStreamChunk = async (chunk: IAiStreamChunk): Promise<void> => {
|
|
// this.log.debug("stream chunk received", { chunk });
|
|
|
|
switch (chunk.type) {
|
|
case "thinking":
|
|
streamedThinking = true;
|
|
socket.emit("thinking", chunk.data);
|
|
break;
|
|
case "response":
|
|
streamedResponse = true;
|
|
socket.emit("response", chunk.data);
|
|
break;
|
|
case "toolCall":
|
|
streamedToolCall = true;
|
|
socket.emit(
|
|
"toolCall",
|
|
chunk.toolCallId!,
|
|
chunk.toolName!,
|
|
chunk.params || "{}",
|
|
chunk.data,
|
|
);
|
|
break;
|
|
}
|
|
};
|
|
|
|
try {
|
|
this.updateToolboxWorkspace(turn);
|
|
task.context = this.buildSessionContext(workOrder);
|
|
task.chatOptions = {
|
|
systemPrompt: turn.prompts.system,
|
|
context: task.context,
|
|
userPrompt: turn.prompts.user,
|
|
tools: this.getToolsForMode(turn.mode),
|
|
};
|
|
} catch (cause) {
|
|
socket.emit(
|
|
"workOrderComplete",
|
|
turn._id,
|
|
false,
|
|
`failed to build session context: ${(cause as Error).message}`,
|
|
);
|
|
const error = new Error("failed to build session context", { cause });
|
|
throw error;
|
|
}
|
|
|
|
try {
|
|
const reasoningEffort = turn.reasoningEffort || "off";
|
|
const reasoning: boolean | "low" | "medium" | "high" =
|
|
reasoningEffort === "off" ? false : reasoningEffort;
|
|
|
|
const response = await AiService.chat(
|
|
turn.provider,
|
|
{
|
|
modelId: turn.llm,
|
|
params: {
|
|
reasoning,
|
|
temperature: 0.8,
|
|
topP: 0.9,
|
|
topK: 40,
|
|
},
|
|
},
|
|
task.chatOptions,
|
|
onStreamChunk,
|
|
);
|
|
|
|
if (this.isEmptyAgentResponse(response)) {
|
|
throw new Error(
|
|
"AI provider returned an empty response: no thinking, response, tool calls, or tool results.",
|
|
);
|
|
}
|
|
|
|
// Check for model loading failure
|
|
if (
|
|
response.doneReason === "load" &&
|
|
!response.response &&
|
|
!response.thinking &&
|
|
(!response.toolCalls || response.toolCalls.length === 0)
|
|
) {
|
|
throw new Error("Model failed to respond (still loading or error)");
|
|
}
|
|
|
|
// Providers return accumulated final content; only emit it here when it
|
|
// was not already delivered through the stream callback.
|
|
if (response.thinking && !streamedThinking) {
|
|
socket.emit("thinking", response.thinking);
|
|
}
|
|
|
|
if (response.response && !streamedResponse) {
|
|
socket.emit("response", response.response);
|
|
}
|
|
|
|
if (response.toolCalls && response.toolCalls.length > 0 && !streamedToolCall) {
|
|
for (const toolCall of response.toolCalls) {
|
|
socket.emit(
|
|
"toolCall",
|
|
toolCall.callId,
|
|
toolCall.function.name,
|
|
toolCall.function.arguments,
|
|
response.toolCallResults?.find((r) => r.callId === toolCall.callId)
|
|
?.result || "",
|
|
);
|
|
}
|
|
}
|
|
} catch (cause) {
|
|
socket.emit(
|
|
"workOrderComplete",
|
|
turn._id,
|
|
false,
|
|
`failed to process agentic workflow loop: ${(cause as Error).message}`,
|
|
);
|
|
const error = new Error("failed to process agentic workflow loop", {
|
|
cause,
|
|
});
|
|
throw error;
|
|
}
|
|
|
|
// Emit work order complete
|
|
socket.emit("workOrderComplete", turn._id, true);
|
|
}
|
|
|
|
buildSessionContext(workOrder: IAgentWorkOrder): IContextChatMessage[] {
|
|
const session = workOrder.turn.session as IChatSession;
|
|
if (!session.user) {
|
|
throw new Error("ChatSession must be populated with user data");
|
|
}
|
|
const user: IUser = session.user as IUser;
|
|
const messages: IContextChatMessage[] = [];
|
|
|
|
for (const turn of workOrder.context) {
|
|
/*
|
|
* add the User message
|
|
*/
|
|
messages.push({
|
|
createdAt: turn.createdAt,
|
|
role: "user",
|
|
content: turn.prompts.user,
|
|
user: {
|
|
_id: user._id,
|
|
username: user.email,
|
|
displayName: user.displayName,
|
|
},
|
|
});
|
|
|
|
/*
|
|
* Add the assistant's output (if any), to include the thinking
|
|
* (reasoning) output (if any).
|
|
*/
|
|
let content = "";
|
|
|
|
// Extract thinking and response from blocks
|
|
for (const block of turn.blocks) {
|
|
if (block.mode === "thinking" && typeof block.content === "string") {
|
|
content += `<thinking>${block.content}</thinking>`;
|
|
} else if (
|
|
block.mode === "responding" &&
|
|
typeof block.content === "string"
|
|
) {
|
|
if (content && content.length) {
|
|
content += "\n";
|
|
}
|
|
content += block.content;
|
|
}
|
|
}
|
|
|
|
messages.push({
|
|
createdAt: turn.createdAt,
|
|
role: "assistant",
|
|
content:
|
|
content && content.length
|
|
? content
|
|
: "(you didn't say anything this turn)",
|
|
});
|
|
|
|
/*
|
|
* Persisted turns do not currently store provider-native assistant
|
|
* tool-call messages. Replaying these as role=tool creates invalid
|
|
* OpenAI-compatible history. Keep the information, but make it normal
|
|
* assistant-readable context.
|
|
*/
|
|
if (turn.toolCalls?.length > 0) {
|
|
for (const toolCall of turn.toolCalls) {
|
|
const content = this.formatHistoricalToolResult(toolCall);
|
|
messages.push({
|
|
createdAt: turn.createdAt,
|
|
role: "assistant",
|
|
callId: toolCall.callId,
|
|
toolName: toolCall.name,
|
|
content,
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
return messages;
|
|
}
|
|
|
|
/**
|
|
* To optimize context, reduce clutter, and help the agent focus, full outputs
|
|
* of older file reads and edits are summarized once a newer version is available.
|
|
*/
|
|
pruneSessionContext(messages: IContextChatMessage[]): void {
|
|
// TODO
|
|
}
|
|
|
|
private getToolsForMode(mode: ChatSessionMode): any[] {
|
|
return Array.from(this.toolbox.getModeSet(mode) || []);
|
|
}
|
|
|
|
private formatHistoricalToolResult(toolCall: {
|
|
name: string;
|
|
parameters?: string;
|
|
response?: string;
|
|
}): string {
|
|
const response = toolCall.response || "";
|
|
const maxLength = 8000;
|
|
const trimmedResponse =
|
|
response.length > maxLength
|
|
? `${response.slice(0, maxLength)}\n\n[Tool result truncated from ${response.length} characters.]`
|
|
: response;
|
|
return [
|
|
`Historical tool result: ${toolCall.name}`,
|
|
`Parameters: ${toolCall.parameters || "{}"}`,
|
|
"---",
|
|
trimmedResponse,
|
|
].join("\n");
|
|
}
|
|
|
|
private isEmptyAgentResponse(response: {
|
|
response?: string;
|
|
thinking?: string;
|
|
toolCalls?: unknown[];
|
|
toolCallResults?: unknown[];
|
|
}): boolean {
|
|
return (
|
|
!(response.response && response.response.trim()) &&
|
|
!(response.thinking && response.thinking.trim()) &&
|
|
!(response.toolCalls && response.toolCalls.length) &&
|
|
!(response.toolCallResults && response.toolCallResults.length)
|
|
);
|
|
}
|
|
|
|
private updateToolboxWorkspace(turn: IChatTurn): void {
|
|
const project = turn.project as IProject;
|
|
if (!project || typeof project === "string") {
|
|
throw new Error("ChatTurn must be populated with project data");
|
|
}
|
|
|
|
const workspaceDir = WorkspaceService.workspaceDir;
|
|
const cacheDir = WorkspaceService.workspaceCacheDir;
|
|
if (!workspaceDir || !cacheDir) {
|
|
throw new Error("Workspace must be initialized before agent tools are used");
|
|
}
|
|
|
|
this.toolbox.updateWorkspace({
|
|
workspaceDir,
|
|
projectDir: WorkspaceService.getProjectDirectory(project.slug),
|
|
cacheDir,
|
|
});
|
|
}
|
|
}
|
|
|
|
export { AgentService };
|
|
export default new AgentService();
|