const agentDefinition = {
  id: "context-pruner",
  displayName: "Context Pruner",
  publisher: "codebuff",
  version: "0.0.25",
  model: "openai/gpt-5-mini",
  toolNames: [
    "set_messages"
  ],
  spawnableAgents: [],
  inputSchema: {
    params: {
      type: "object",
      required: [],
      properties: {
        maxContextLength: {
          type: "number"
        }
      }
    }
  },
  includeMessageHistory: true,
  outputMode: "last_message",
  spawnerPrompt: `Spawn this agent between steps to prune context, starting with old tool results and then old messages.`,
  systemPrompt: ``,
  instructionsPrompt: ``,
  stepPrompt: ``,
  handleSteps: function* ({ agentState, params }) {
    const messages = agentState.messageHistory, countTokensJson = (obj) => {
      return Math.ceil(JSON.stringify(obj).length / 3);
    }, maxMessageTokens = params?.maxContextLength ?? 200000;
    let currentMessages = [...messages];
    currentMessages.pop();
    if (countTokensJson(currentMessages) < maxMessageTokens) {
      yield {
        toolName: "set_messages",
        input: { messages: currentMessages },
        includeToolCall: !1
      };
      return;
    }
    let numKeptTerminalCommands = 0;
    const afterTerminalPass = [];
    for (let i = currentMessages.length - 1;i >= 0; i--) {
      const message = currentMessages[i];
      if (message.role === "tool" && message.content.toolName === "run_terminal_command") {
        const toolMessage = message;
        if (numKeptTerminalCommands < 5) {
          numKeptTerminalCommands++;
          afterTerminalPass.unshift(message);
        } else {
          const simplifiedMessage = {
            ...toolMessage,
            content: {
              ...toolMessage.content,
              output: [
                {
                  type: "json",
                  value: {
                    command: toolMessage.content.output[0]?.value?.command || "",
                    stdoutOmittedForLength: !0
                  }
                }
              ]
            }
          };
          afterTerminalPass.unshift(simplifiedMessage);
        }
      } else
        afterTerminalPass.unshift(message);
    }
    if (countTokensJson(afterTerminalPass) < maxMessageTokens) {
      yield {
        toolName: "set_messages",
        input: {
          messages: afterTerminalPass
        },
        includeToolCall: !1
      };
      return;
    }
    const afterToolResultsPass = afterTerminalPass.map((message) => {
      if (message.role === "tool") {
        const outputSize = JSON.stringify(message.content.output).length;
        if (outputSize > 1000)
          return {
            ...message,
            content: {
              ...message.content,
              output: [
                {
                  type: "json",
                  value: {
                    message: "[LARGE_TOOL_RESULT_OMITTED]",
                    originalSize: outputSize
                  }
                }
              ]
            }
          };
      }
      return message;
    });
    if (countTokensJson(afterToolResultsPass) < maxMessageTokens) {
      yield {
        toolName: "set_messages",
        input: {
          messages: afterToolResultsPass
        },
        includeToolCall: !1
      };
      return;
    }
    const shortenedMessageTokenFactor = 0.5, replacementMessage = {
      role: "user",
      content: "<system>Previous message(s) omitted due to length</system>"
    }, keepLastTags = {};
    for (const [i, message] of afterToolResultsPass.entries()) {
      if (!message.keepLastTags)
        continue;
      for (const tag of message.keepLastTags)
        keepLastTags[tag] = i;
    }
    const keepLastIndices = Object.values(keepLastTags), requiredTokens = countTokensJson(afterToolResultsPass.filter((m) => m.keepDuringTruncation));
    let removedTokens = 0;
    const tokensToRemove = (maxMessageTokens - requiredTokens) * (1 - shortenedMessageTokenFactor), placeholder = "deleted", filteredMessages = [];
    for (const [i, message] of afterToolResultsPass.entries()) {
      if (removedTokens >= tokensToRemove || message.keepDuringTruncation || keepLastIndices.includes(i)) {
        filteredMessages.push(message);
        continue;
      }
      removedTokens += countTokensJson(message);
      if (filteredMessages.length === 0 || filteredMessages[filteredMessages.length - 1] !== placeholder) {
        filteredMessages.push(placeholder);
        removedTokens -= countTokensJson(replacementMessage);
      }
    }
    yield {
      toolName: "set_messages",
      input: {
        messages: filteredMessages.map((m) => m === placeholder ? replacementMessage : m)
      },
      includeToolCall: !1
    };
  },
  mcpServers: {},
  inheritParentSystemPrompt: false
}