Skip to content

Commit

Permalink
Add support for Groq Llama 3.3
Browse files Browse the repository at this point in the history
  • Loading branch information
elie222 committed Jan 9, 2025
1 parent e5ad1f5 commit 0c83f3a
Show file tree
Hide file tree
Showing 7 changed files with 71 additions and 0 deletions.
2 changes: 2 additions & 0 deletions apps/web/app/api/user/settings/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ async function saveAISettings(options: SaveSettingsBody) {
return Model.CLAUDE_3_5_SONNET_BEDROCK;
case Provider.GOOGLE:
return options.aiModel || Model.GEMINI_1_5_PRO;
case Provider.GROQ:
return options.aiModel || Model.GROQ_LLAMA_3_3_70B;
case Provider.OLLAMA:
return Model.OLLAMA;
default:
Expand Down
1 change: 1 addition & 0 deletions apps/web/app/api/user/settings/validation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ export const saveSettingsBody = z
Provider.ANTHROPIC,
Provider.OPEN_AI,
Provider.GOOGLE,
Provider.GROQ,
...(Provider.OLLAMA ? [Provider.OLLAMA] : []),
]),
aiModel: z.string(),
Expand Down
1 change: 1 addition & 0 deletions apps/web/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"@ai-sdk/amazon-bedrock": "^1.0.6",
"@ai-sdk/anthropic": "^1.0.6",
"@ai-sdk/google": "^1.0.12",
"@ai-sdk/groq": "^1.0.10",
"@ai-sdk/openai": "^1.0.11",
"@asteasolutions/zod-to-openapi": "^7.3.0",
"@auth/core": "^0.37.4",
Expand Down
9 changes: 9 additions & 0 deletions apps/web/utils/llms/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ export const Provider = {
OPEN_AI: "openai",
ANTHROPIC: "anthropic",
GOOGLE: "google",
GROQ: "groq",
...(supportsOllama ? { OLLAMA: "ollama" } : {}),
};

Expand All @@ -18,13 +19,15 @@ export const Model = {
CLAUDE_3_5_SONNET_ANTHROPIC: "claude-3-5-sonnet-20241022",
GEMINI_1_5_PRO: "gemini-1.5-pro-latest",
GEMINI_1_5_FLASH: "gemini-1.5-flash-latest",
GROQ_LLAMA_3_3_70B: "llama-3.3-70b-versatile",
...(supportsOllama ? { OLLAMA: env.NEXT_PUBLIC_OLLAMA_MODEL } : {}),
};

export const providerOptions: { label: string; value: string }[] = [
{ label: "OpenAI", value: Provider.OPEN_AI },
{ label: "Anthropic", value: Provider.ANTHROPIC },
{ label: "Google", value: Provider.GOOGLE },
{ label: "Groq", value: Provider.GROQ },
...(supportsOllama && Provider.OLLAMA
? [{ label: "Ollama", value: Provider.OLLAMA }]
: []),
Expand Down Expand Up @@ -52,6 +55,12 @@ export const modelOptions: Record<string, { label: string; value: string }[]> =
value: Model.GEMINI_1_5_FLASH,
},
],
[Provider.GROQ]: [
{
label: "Groq Llama 3.3 70B",
value: Model.GROQ_LLAMA_3_3_70B,
},
],
...(Provider.OLLAMA && Model.OLLAMA
? {
[Provider.OLLAMA]: [{ label: "Ollama", value: Model.OLLAMA }],
Expand Down
12 changes: 12 additions & 0 deletions apps/web/utils/llms/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { createOpenAI } from "@ai-sdk/openai";
import { createAnthropic } from "@ai-sdk/anthropic";
import { createAmazonBedrock } from "@ai-sdk/amazon-bedrock";
import { createGoogleGenerativeAI } from "@ai-sdk/google";
import { createGroq } from "@ai-sdk/groq";
import { createOllama } from "ollama-ai-provider";
import { env } from "@/env";
import { saveAiUsage } from "@/utils/usage";
Expand Down Expand Up @@ -82,6 +83,17 @@ function getModel({ aiProvider, aiModel, aiApiKey }: UserAIFields) {
};
}

if (provider === Provider.GROQ) {
if (!aiApiKey) throw new Error("Groq API key is not set");

const model = aiModel || Model.GROQ_LLAMA_3_3_70B;
return {
provider: Provider.GROQ,
model,
llmModel: createGroq({ apiKey: aiApiKey })(model),
};
}

if (provider === Provider.OLLAMA && env.NEXT_PUBLIC_OLLAMA_MODEL) {
return {
provider: Provider.OLLAMA,
Expand Down
5 changes: 5 additions & 0 deletions apps/web/utils/usage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,11 @@ const costs: Record<
input: 0.075 / 1_000_000,
output: 0.3 / 1_000_000,
},
// https://groq.com/pricing
"llama-3.3-70b-versatile": {
input: 0.59 / 1_000_000,
output: 0.79 / 1_000_000,
},
};

// returns cost in cents
Expand Down
41 changes: 41 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 0c83f3a

Please sign in to comment.