Skip to content

Commit

Permalink
Add anthropic as a provider (#15)
Browse files Browse the repository at this point in the history
* Add anthropic as a provider

This depends on inngest/inngest-js#764, which
exposes model options in Inngest's model provider.  This makes types
happy.

* Add lint CI

* Fix lint CI ignored files

* Fix linting, switch compile error

* Update package.json

* Update lockfile

---------

Co-authored-by: Jack Williams <[email protected]>
Co-authored-by: Jack Williams <[email protected]>
  • Loading branch information
3 people authored Dec 6, 2024
1 parent f9dd9f6 commit 485afd1
Show file tree
Hide file tree
Showing 10 changed files with 196 additions and 35 deletions.
12 changes: 5 additions & 7 deletions demo/inngest.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import {
agenticOpenai,
anthropic,
createAgent,
createNetwork,
createTypedTool,
defaultRoutingAgent,
} from "@inngest/agent-kit";
} from "../src/index";
import { EventSchemas, Inngest } from "inngest";
import { z } from "zod";

Expand All @@ -24,7 +24,7 @@ export const fn = inngest.createFunction(
{ id: "agent" },
{ event: "agent/run" },
async ({ event, step }) => {
const model = agenticOpenai({ model: "gpt-4", step });
const model = anthropic({ model: "claude-3-5-haiku-latest", max_tokens: 1024, step });

// 1. Single agents
//
Expand All @@ -33,18 +33,16 @@ export const fn = inngest.createFunction(
model,
});

// 2. Networks of agents
const cheapModel = agenticOpenai({ model: "gpt-3.5-turbo", step });
// 2. A network of agents that works together

const network = createNetwork({
agents: [
codeWritingAgent.withModel(model),
executingAgent.withModel(cheapModel),
executingAgent.withModel(model),
],
defaultModel: model,
maxIter: 4,
});
// code -> executing -> code

// This uses the defaut agentic router to determine which agent to handle first. You can
// optionally specifiy the agent that should execute first, and provide your own logic for
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
}
},
"dependencies": {
"inngest": "^3.26.3",
"inngest": "3.27.1",
"openai-zod-to-json-schema": "^1.0.3",
"zod": "^3.23.8"
},
Expand Down
32 changes: 12 additions & 20 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

115 changes: 115 additions & 0 deletions src/adapters/anthropic.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
/**
* Adapters for Anthropic I/O to transform to/from internal network messages.
*
* @module
*/

import {
type AnthropicAiAdapter,
type AiAdapter,
type Anthropic,
} from "inngest";
import { zodToJsonSchema } from "openai-zod-to-json-schema";
import { type AgenticModel } from "../model";
import { type InternalNetworkMessage } from "../state";

/**
* Parse a request from internal network messages to an Anthropic input.
*/
export const requestParser: AgenticModel.RequestParser<Anthropic.AiModel> = (
model,
messages,
tools,
) => {
// Note that Anthropic has a top-level system prompt, then a series of prompts
// for assistants and users.
const systemMessage = messages.find((m) => m.role === "system");
const system =
typeof systemMessage?.content === "string" ? systemMessage.content : "";

const request: AiAdapter.Input<Anthropic.AiModel> = {
system,
model: model.options.model,
max_tokens: model.options.max_tokens,
messages: messages
.filter((m) => m.role !== "system")
.map((m) => {
return {
role: m.role,
content: m.content,
};
}) as AiAdapter.Input<Anthropic.AiModel>["messages"],
};

if (tools?.length) {
request.tools = tools.map((t) => {
return {
name: t.name,
description: t.description,
input_schema: zodToJsonSchema(
t.parameters,
) as AnthropicAiAdapter.Tool.InputSchema,
};
});
}

return request;
};

/**
* Parse a response from Anthropic output to internal network messages.
*/
export const responseParser: AgenticModel.ResponseParser<Anthropic.AiModel> = (
input,
) => {
return (input?.content ?? []).reduce<InternalNetworkMessage[]>(
(acc, item) => {
if (!item.type) {
return acc;
}

switch (item.type) {
case "text":
return [
...acc,
{
role: input.role,
content: item.text,
},
];
case "tool_use": {
let args;
try {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
args =
typeof item.input === "string"
? JSON.parse(item.input)
: item.input;
} catch {
args = item.input;
}

return [
...acc,
{
role: input.role,
content: "",
tools: [
{
type: "tool",
id: item.id,
name: item.name,
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
input: args,
},
],
},
];
}
}

return acc;
},
[],
);
};
1 change: 1 addition & 0 deletions src/adapters/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import { type InternalNetworkMessage, type ToolMessage } from "../state";
* Parse a request from internal network messages to an OpenAI input.
*/
export const requestParser: AgenticModel.RequestParser<OpenAi.AiModel> = (
model,
messages,
tools,
) => {
Expand Down
1 change: 1 addition & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ export * from "./util";
// Models
export * from "./models/gemini";
export * from "./models/openai";
export * from "./models/anthropic";
3 changes: 2 additions & 1 deletion src/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export class AgenticModel<TAiAdapter extends AiAdapter> {
): Promise<AgenticModel.InferenceResponse> {
const result = (await this.step.ai.infer(stepID, {
model: this.#model,
body: this.requestParser(input, tools),
body: this.requestParser(this.#model, input, tools),
})) as AiAdapter.Input<TAiAdapter>;

return { output: this.responseParser(result), raw: result };
Expand Down Expand Up @@ -56,6 +56,7 @@ export namespace AgenticModel {
}

export type RequestParser<TAiAdapter extends AiAdapter> = (
model: TAiAdapter,
state: InternalNetworkMessage[],
tools: Tool.Any[],
) => AiAdapter.Input<TAiAdapter>;
Expand Down
43 changes: 43 additions & 0 deletions src/models/anthropic.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import {
anthropic as ianthropic,
type GetStepTools,
type Inngest,
type Anthropic,
} from "inngest";
import { requestParser, responseParser } from "../adapters/anthropic";
import { AgenticModel } from "../model";

export namespace AnthropicModel {
export interface Options<TAiAdapter extends Anthropic.AiModel>
extends Omit<Anthropic.AiModelOptions, "model"> {
/**
* The Anthropic model to use.
*/
model: Anthropic.AiModelOptions["model"] | TAiAdapter;

/**
* The step tools to use internally within this model.
*/
step: GetStepTools<Inngest.Any>;
}
}

/**
* Create an agentic Anthropic model using the Anthropic chat format.
*/
export const anthropic = <TAiAdapter extends Anthropic.AiModel>({
step,
...modelOptions
}: AnthropicModel.Options<TAiAdapter>) => {
const model =
typeof modelOptions.model === "string"
? ianthropic({ ...modelOptions, model: modelOptions.model })
: modelOptions.model;

return new AgenticModel({
model,
step,
requestParser,
responseParser,
});
};
Loading

0 comments on commit 485afd1

Please sign in to comment.