Skip to content

Commit

Permalink
Update llm error handling for data extract
Browse files Browse the repository at this point in the history
Signed-off-by: Sean Sundberg <[email protected]>
  • Loading branch information
seansund committed Oct 26, 2023
1 parent 12f0cf7 commit 703f14a
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 6 deletions.
25 changes: 20 additions & 5 deletions src/services/data-extraction/data-extraction.impl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -219,11 +219,26 @@ export class DataExtractionImpl extends DataExtractionCsv<WatsonBackends, Contex
const input = prompt + '\n\n' + text;

const modelId = config.model || this.backendConfig.modelId;
const result: GenerativeResponse = await backends.wml.generate({
input,
modelId,
parameters,
});
const result: GenerativeResponse = await backends.wml
.generate({
input,
modelId,
parameters,
})
.then(result => {
if (result?.generatedText?.trim()) {
return result
}

const fallbackModelId: string = 'meta-llama/llama-2-70b-chat'
console.log(`*** No information returned from generate. Trying again with ${fallbackModelId} model`)

return backends.wml.generate({
input,
modelId: fallbackModelId,
parameters,
})
});

console.log('2. Text generated from watsonx.ai:', {prompt, modelId, max_new_tokens, generatedText: result.generatedText, input})

Expand Down
2 changes: 1 addition & 1 deletion src/utils/gen-ai-model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ export class GenAiModel {
}

console.log('Error generating text: ', err);
throw err;
return {generatedText: '[Error]'}
})
}
}

0 comments on commit 703f14a

Please sign in to comment.