import * as traceloop from "@traceloop/node-server-sdk";
import OpenAI from "openai";
traceloop.initialize({
appName: "sample_openai",
apiKey: process.env.TRACELOOP_API_KEY,
disableBatch: true,
});
const openai = new OpenAI();
class SampleOpenAI {
@traceloop.workflow("sample_completion")
async completion() {
const completion = await openai.completions.create({
prompt: "Tell me a joke about TypeScript",
model: "gpt-3.5-turbo-instruct",
stream: true
});
return completion;
}
}
traceloop.withAssociationProperties(
{ user_id: "12345", chat_id: "789" },
async () => {
const sampleOpenAI = new SampleOpenAI();
const completion = await sampleOpenAI.completion();
console.log(completion);
await traceloop.reportScore({ chat_id: "789" }, 1);
},
);
Traceloop exporting traces to https://api.traceloop.com
/Users/k/personal/openllmetry-js/packages/instrumentation-openai/dist/src/instrumentation.js:211
result.choices.forEach((choice, index) => {
^
TypeError: Cannot read properties of undefined (reading 'forEach')
at OpenAIInstrumentation._endSpan (/Users/k/personal/openllmetry-js/packages/instrumentation-openai/dist/src/instrumentation.js:211:32)
at /Users/k/personal/openllmetry-js/packages/instrumentation-openai/dist/src/instrumentation.js:171:30
at new Promise (<anonymous>)
at /Users/k/personal/openllmetry-js/packages/instrumentation-openai/dist/src/instrumentation.js:149:20
at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
at async Object.completion (/Users/k/personal/openllmetry-js/packages/sample-app/dist/src/sample_streaming.js:19:28)
at async /Users/k/personal/openllmetry-js/packages/traceloop-sdk/dist/src/lib/tracing/decorators.js:22:25
at async descriptor.value (/Users/k/personal/openllmetry-js/packages/traceloop-sdk/dist/src/lib/tracing/decorators.js:60:24)
at async /Users/k/personal/openllmetry-js/packages/sample-app/dist/src/sample_streaming.js:32:24
Node.js v18.19.0
Program should not crash and should send correct spans to traceloop.