Skip to content

Commit

Permalink
fix: hang bug due to never ending polling loop (#11)
Browse files Browse the repository at this point in the history
  • Loading branch information
nirga authored Nov 8, 2023
1 parent b8f6c5c commit efcda85
Show file tree
Hide file tree
Showing 7 changed files with 20 additions and 14 deletions.
11 changes: 6 additions & 5 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion packages/instrumentation-openai/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@traceloop/instrumentation-openai",
"version": "0.0.18",
"version": "0.0.19",
"description": "OpenTelemetry ai-specific semantic conventions",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
Expand Down
2 changes: 1 addition & 1 deletion packages/instrumentation-openai/src/instrumentation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ export class OpenAIInstrumentation extends InstrumentationBase<any> {
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
message.role;
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
message.content || "";
(message.content as string) || "";
});
} else {
if (typeof params.prompt === "string") {
Expand Down
4 changes: 2 additions & 2 deletions packages/sample-app/src/sample_with.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ traceloop.initialize({
const openai = new OpenAI();

async function chat() {
return traceloop.withWorkflow("sample_chat", async () => {
return await traceloop.withWorkflow("sample_chat", async () => {
const chatCompletion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Tell me a joke about OpenTelemetry" },
Expand All @@ -22,7 +22,7 @@ async function chat() {
}

async function completion() {
return traceloop.withWorkflow("sample_completion", async () => {
return await traceloop.withWorkflow("sample_completion", async () => {
const completion = await openai.completions.create({
prompt: "Tell me a joke about TypeScript",
model: "gpt-3.5-turbo-instruct",
Expand Down
4 changes: 2 additions & 2 deletions packages/traceloop-sdk/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@traceloop/node-server-sdk",
"version": "0.0.18",
"version": "0.0.19",
"description": "Traceloop Software Development Kit (SDK) for Node.js",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
Expand Down Expand Up @@ -34,7 +34,7 @@
"dependencies": {
"@opentelemetry/exporter-trace-otlp-proto": "^0.44.0",
"@opentelemetry/sdk-node": "^0.44.0",
"@traceloop/instrumentation-openai": "^0.0.18",
"@traceloop/instrumentation-openai": "^0.0.19",
"@types/nunjucks": "^3.2.5",
"fetch-retry": "^5.0.6",
"nunjucks": "^3.2.4",
Expand Down
2 changes: 1 addition & 1 deletion packages/traceloop-sdk/src/lib/configuration/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export const initialize = (options: InitializeOptions) => {
options.appName = process.env.npm_package_name;
}

if (!options.traceloopSyncEnabled) {
if (options.traceloopSyncEnabled === undefined) {
if (process.env.TRACELOOP_SYNC_ENABLED !== undefined) {
options.traceloopSyncEnabled = ["1", "true"].includes(
process.env.TRACELOOP_SYNC_ENABLED.toLowerCase(),
Expand Down
9 changes: 7 additions & 2 deletions packages/traceloop-sdk/src/lib/prompts/registry.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ const populateRegistry = (prompts: any) => {
export const initializeRegistry = (options: InitializeOptions) => {
const {
baseUrl,
suppressLogs,
traceloopSyncEnabled,
traceloopSyncPollingInterval,
traceloopSyncDevPollingInterval,
Expand All @@ -55,8 +56,12 @@ export const initializeRegistry = (options: InitializeOptions) => {
try {
const { prompts } = await fetchPrompts(options);
populateRegistry(prompts);
} catch (err) {}
}, pollingInterval! * 1000);
} catch (err) {
if (!suppressLogs) {
console.error("Failed to fetch prompt data", err);
}
}
}, pollingInterval! * 1000).unref();

return true;
})
Expand Down

0 comments on commit efcda85

Please sign in to comment.