Skip to content

Commit 8d64904

Browse files
author
Andrei Bratu
committed
Added back prompt test
1 parent 001376e commit 8d64904

File tree

1 file changed

+18
-18
lines changed

1 file changed

+18
-18
lines changed

tests/decorators/prompt.utility.test.ts

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -55,29 +55,29 @@ describe("prompt decorator", () => {
5555
expect(spans[1].attributes["prompt"]).toBeFalsy();
5656
});
5757

58-
// it.each(PROVIDER_AND_MODEL)(
59-
// "should enrich prompt span when using HLProcessor with provider %s and model %s",
60-
// async (provider, model) => {
61-
// const [tracer, exporter] = openTelemetryHLProcessorTestConfiguration();
62-
// const callLLM = testScenario(tracer);
58+
it.each(PROVIDER_AND_MODEL)(
59+
"should enrich prompt span when using HLProcessor with provider %s and model %s",
60+
async (provider, model) => {
61+
const [tracer, exporter] = openTelemetryHLProcessorTestConfiguration();
62+
const callLLM = testScenario(tracer);
6363

64-
// await callLLM(provider, model, callLLMMessages());
64+
await callLLM(provider, model, callLLMMessages());
6565

66-
// const spans = exporter.getFinishedSpans();
67-
// expect(spans.length).toBe(2);
66+
const spans = exporter.getFinishedSpans();
67+
expect(spans.length).toBe(2);
6868

69-
// expect(isHumanloopSpan(spans[0])).toBeFalsy();
70-
// expect(isHumanloopSpan(spans[1])).toBeTruthy();
69+
expect(isHumanloopSpan(spans[0])).toBeFalsy();
70+
expect(isHumanloopSpan(spans[1])).toBeTruthy();
7171

72-
// const promptKernel = readFromOpenTelemetrySpan(spans[1], HUMANLOOP_FILE_KEY)
73-
// .prompt as unknown as PromptKernelRequest;
72+
const promptKernel = readFromOpenTelemetrySpan(spans[1], HUMANLOOP_FILE_KEY)
73+
.prompt as unknown as PromptKernelRequest;
7474

75-
// expect(promptKernel.temperature).toBe(0.8);
76-
// expect(promptKernel.model).toBe(model);
77-
// expect(promptKernel.provider).toBe(provider);
78-
// expect(promptKernel.topP).toBeFalsy();
79-
// }
80-
// );
75+
expect(promptKernel.temperature).toBe(0.8);
76+
expect(promptKernel.model).toBe(model);
77+
expect(promptKernel.provider).toBe(provider);
78+
expect(promptKernel.topP).toBeFalsy();
79+
}
80+
);
8181

8282
it.each(PROVIDER_AND_MODEL)(
8383
"should prefer overrides over inferred values for provider %s and model %s",

0 commit comments

Comments
 (0)