Skip to content

Commit

Permalink
Merge branch 'tauri-main' into runtime-improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
stephlow committed Oct 8, 2024
2 parents 4c978d1 + 4058b74 commit f835a13
Show file tree
Hide file tree
Showing 81 changed files with 65,160 additions and 327 deletions.
3 changes: 3 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion DEVELOPMENT.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ Follow the instructions in the [`client-library-otel` README](./packages/client-

## Developing

This project uses typescript, biome and pnpm workspaces. The frontend package also uses eslint for linting purposes, all other packages use biome for linting (formatting is always done with biome).
This project uses typescript, biome and pnpm workspaces. Linting and formatting is handled with [biome](https://biomejs.dev/).

In the project root you can format all typescript codebases with `pnpm run format`.

Expand Down
6 changes: 6 additions & 0 deletions api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
"db:drop": "drizzle-kit drop",
"db:seed": "tsx scripts/seed.ts",
"db:studio": "drizzle-kit studio",
"expand-function": "tsx src/lib/expand-function/tests/expand-function-smoke-test.ts",
"expand-function:debug": "node --inspect-brk -r tsx/cjs src/lib/expand-function/tests/expand-function-smoke-test.ts",
"build": "pnpm run db:generate && tsc",
"format": "biome check . --write",
"lint": "biome lint .",
Expand Down Expand Up @@ -56,6 +58,10 @@
"minimatch": "^10.0.1",
"openai": "^4.47.1",
"source-map": "^0.7.4",
"typescript": "^5.5.4",
"typescript-language-server": "^4.3.3",
"vscode-jsonrpc": "^8.2.1",
"vscode-uri": "^3.0.8",
"ws": "^8.17.1",
"zod": "^3.23.8"
},
Expand Down
2 changes: 1 addition & 1 deletion api/src/app.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import logger from "./logger.js";
import { cors } from "hono/cors";
import type * as webhoncType from "./lib/webhonc/index.js";
import appRoutes from "./routes/app-routes.js";
import inference from "./routes/inference.js";
import inference from "./routes/inference/index.js";
import settings from "./routes/settings.js";
import source from "./routes/source.js";
import traces from "./routes/traces.js";
Expand Down
6 changes: 6 additions & 0 deletions api/src/constants.ts
Original file line number Diff line number Diff line change
@@ -1 +1,7 @@
import path from "node:path";

export const DEFAULT_DATABASE_URL = "file:fpx.db";

export const USER_PROJECT_ROOT_DIR = path.resolve(
process.env.FPX_WATCH_DIR ?? process.cwd(),
);
19 changes: 16 additions & 3 deletions api/src/index.node.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,9 @@ import { drizzle } from "drizzle-orm/libsql";
import figlet from "figlet";
import type { WebSocket } from "ws";
import { createApp } from "./app.js";
import { DEFAULT_DATABASE_URL } from "./constants.js";
import { DEFAULT_DATABASE_URL, USER_PROJECT_ROOT_DIR } from "./constants.js";
import * as schema from "./db/schema.js";
import { getTSServer } from "./lib/expand-function/tsserver/index.js";
import { setupRealtimeService } from "./lib/realtime/index.js";
import { getSetting } from "./lib/settings/index.js";
import { resolveWebhoncUrl } from "./lib/utils.js";
Expand Down Expand Up @@ -76,8 +77,7 @@ server.on("error", (err) => {
//
// Additionally, this will watch for changes to files in the project directory,
// - If a file changes, send a new probe to the service
const watchDir = process.env.FPX_WATCH_DIR ?? process.cwd();
startRouteProbeWatcher(watchDir);
startRouteProbeWatcher(USER_PROJECT_ROOT_DIR);

// Set up websocket server
setupRealtimeService({ server, path: "/ws", wsConnections });
Expand All @@ -92,3 +92,16 @@ if (proxyRequestsEnabled ?? false) {
logger.debug("Proxy requests feature enabled.");
await webhonc.start();
}

// check settings if ai is enabled, and proactively start the typescript language server
const aiEnabled = await getSetting(db, "aiEnabled");
if (aiEnabled ?? false) {
logger.debug(
"AI Request Generation enabled. Starting typescript language server",
);
try {
await getTSServer(USER_PROJECT_ROOT_DIR);
} catch (error) {
logger.error("Error starting TSServer:", error);
}
}
12 changes: 10 additions & 2 deletions api/src/lib/ai/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,14 @@ type GenerateRequestOptions = {
handler: string;
baseUrl?: string;
history?: Array<string>;
handlerContext?: string;
openApiSpec?: string;
middleware?: {
handler: string;
method: string;
path: string;
}[];
middlewareContext?: string;
};

/**
Expand All @@ -42,9 +44,11 @@ export async function generateRequestWithAnthropic({
method,
path,
handler,
handlerContext,
history,
openApiSpec,
middleware,
middlewareContext,
}: GenerateRequestOptions) {
logger.debug(
"Generating request data with Anthropic",
Expand All @@ -54,18 +58,22 @@ export async function generateRequestWithAnthropic({
`method: ${method}`,
`path: ${path}`,
`handler: ${handler}`,
`openApiSpec: ${openApiSpec}`,
`middleware: ${middleware}`,
// `handlerContext: ${handlerContext}`,
// `openApiSpec: ${openApiSpec}`,
// `middleware: ${middleware}`,
// `middlewareContext: ${middlewareContext}`,
);
const anthropicClient = new Anthropic({ apiKey, baseURL: baseUrl });
const userPrompt = await invokeRequestGenerationPrompt({
persona,
method,
path,
handler,
handlerContext,
history,
openApiSpec,
middleware,
middlewareContext,
});

const toolChoice: Anthropic.Messages.MessageCreateParams.ToolChoiceTool = {
Expand Down
8 changes: 8 additions & 0 deletions api/src/lib/ai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,22 +8,26 @@ export async function generateRequestWithAiProvider({
method,
path,
handler,
handlerContext,
history,
openApiSpec,
middleware,
middlewareContext,
}: {
inferenceConfig: Settings;
persona: string;
method: string;
path: string;
handler: string;
handlerContext?: string;
history?: string[];
openApiSpec?: string;
middleware?: {
handler: string;
method: string;
path: string;
}[];
middlewareContext?: string;
}) {
const {
openaiApiKey,
Expand All @@ -43,9 +47,11 @@ export async function generateRequestWithAiProvider({
method,
path,
handler,
handlerContext,
history,
openApiSpec,
middleware,
middlewareContext,
}).then(
(parsedArgs) => {
return { data: parsedArgs, error: null };
Expand All @@ -67,9 +73,11 @@ export async function generateRequestWithAiProvider({
method,
path,
handler,
handlerContext,
history,
openApiSpec,
middleware,
middlewareContext,
}).then(
(parsedArgs) => {
return { data: parsedArgs, error: null };
Expand Down
14 changes: 11 additions & 3 deletions api/src/lib/ai/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,15 @@ type GenerateRequestOptions = {
method: string;
path: string;
handler: string;
handlerContext?: string;
history?: Array<string>;
openApiSpec?: string;
middleware?: {
handler: string;
method: string;
path: string;
}[];
middlewareContext?: string;
};

/**
Expand All @@ -35,9 +37,11 @@ export async function generateRequestWithOpenAI({
method,
path,
handler,
handlerContext,
history,
openApiSpec,
middleware,
middlewareContext,
}: GenerateRequestOptions) {
logger.debug(
"Generating request data with OpenAI",
Expand All @@ -46,19 +50,23 @@ export async function generateRequestWithOpenAI({
`persona: ${persona}`,
`method: ${method}`,
`path: ${path}`,
`handler: ${handler}`,
`openApiSpec: ${openApiSpec}`,
`middleware: ${middleware}`,
// `handler: ${handler}`,
// `handlerContext: ${handlerContext}`,
// `openApiSpec: ${openApiSpec}`,
// `middleware: ${middleware}`,
// `middlewareContext: ${middlewareContext}`,
);
const openaiClient = new OpenAI({ apiKey, baseURL: baseUrl });
const userPrompt = await invokeRequestGenerationPrompt({
persona,
method,
path,
handler,
handlerContext,
history,
openApiSpec,
middleware,
middlewareContext,
});

const response = await openaiClient.chat.completions.create({
Expand Down
18 changes: 18 additions & 0 deletions api/src/lib/ai/prompts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,31 +32,37 @@ export const invokeRequestGenerationPrompt = async ({
method,
path,
handler,
handlerContext,
history,
openApiSpec,
middleware,
middlewareContext,
}: {
persona: string;
method: string;
path: string;
handler: string;
handlerContext?: string;
history?: Array<string>;
openApiSpec?: string;
middleware?: {
handler: string;
method: string;
path: string;
}[];
middlewareContext?: string;
}) => {
const promptTemplate =
persona === "QA" ? qaTesterPrompt : friendlyTesterPrompt;
const userPromptInterface = await promptTemplate.invoke({
method,
path,
handler,
handlerContext: handlerContext ?? "NO HANDLER CONTEXT",
history: history?.join("\n") ?? "NO HISTORY",
openApiSpec: openApiSpec ?? "NO OPENAPI SPEC",
middleware: formatMiddleware(middleware),
middlewareContext: middlewareContext ?? "NO MIDDLEWARE CONTEXT",
});
const userPrompt = userPromptInterface.value;
return userPrompt;
Expand Down Expand Up @@ -87,9 +93,15 @@ Here is the OpenAPI spec for the handler:
Here is the middleware that will be applied to the request:
{middleware}
Here is some additional context for the middleware that will be applied to the request:
{middlewareContext}
Here is the code for the handler:
{handler}
Here is some additional context for the handler source code, if you need it:
{handlerContext}
`.trim(),
);

Expand All @@ -113,9 +125,15 @@ Here is the OpenAPI spec for the handler:
Here is the middleware that will be applied to the request:
{middleware}
Here is some additional context for the middleware that will be applied to the request:
{middlewareContext}
Here is the code for the handler:
{handler}
Here is some additional context for the handler source code, if you need it:
{handlerContext}
REMEMBER YOU ARE A QA. MISUSE THE API. BUT DO NOT MISUSE YOURSELF.
Keep your responses short-ish. Including your random data.
`.trim(),
Expand Down
Loading

0 comments on commit f835a13

Please sign in to comment.