Skip to content

Commit

Permalink
feat: v1 of otel semantic conventions (#232)
Browse files Browse the repository at this point in the history
  • Loading branch information
nirga authored Apr 29, 2024
1 parent 1586ae3 commit 8f44173
Show file tree
Hide file tree
Showing 25 changed files with 375 additions and 348 deletions.
2 changes: 1 addition & 1 deletion packages/ai-semantic-conventions/README
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ const span = tracer
.startSpan()
.startSpan(spanName, spanOptions)
.setAttributes({
[SemanticAttributes.LLM_VENDOR]: "openai",
[SemanticAttributes.LLM_SYSTEM]: "openai",
});
```

Expand Down
23 changes: 13 additions & 10 deletions packages/ai-semantic-conventions/src/SemanticAttributes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,23 +15,26 @@
*/

export const SpanAttributes = {
LLM_VENDOR: "llm.vendor",
LLM_SYSTEM: "gen_ai.system",
LLM_REQUEST_MODEL: "gen_ai.request.model",
LLM_REQUEST_MAX_TOKENS: "gen_ai.request.max_tokens",
LLM_REQUEST_TEMPERATURE: "gen_ai.request.temperature",
LLM_REQUEST_TOP_P: "gen_ai.request.top_p",
LLM_PROMPTS: "gen_ai.prompt",
LLM_COMPLETIONS: "gen_ai.completion",
LLM_RESPONSE_MODEL: "gen_ai.response.model",
LLM_USAGE_PROMPT_TOKENS: "gen_ai.usage.prompt_tokens",
LLM_USAGE_COMPLETION_TOKENS: "gen_ai.usage.completion_tokens",

// LLM
LLM_REQUEST_TYPE: "llm.request.type",
LLM_REQUEST_MODEL: "llm.request.model",
LLM_RESPONSE_MODEL: "llm.response.model",
LLM_REQUEST_MAX_TOKENS: "llm.request.max_tokens",
LLM_USAGE_TOTAL_TOKENS: "llm.usage.total_tokens",
LLM_USAGE_COMPLETION_TOKENS: "llm.usage.completion_tokens",
LLM_USAGE_PROMPT_TOKENS: "llm.usage.prompt_tokens",
LLM_TEMPERATURE: "llm.temperature",
LLM_TOP_P: "llm.top_p",
LLM_TOP_K: "llm.top_k",
LLM_FREQUENCY_PENALTY: "llm.frequency_penalty",
LLM_PRESENCE_PENALTY: "llm.presence_penalty",
LLM_PROMPTS: "llm.prompts",
LLM_COMPLETIONS: "llm.completions",
LLM_CHAT_STOP_SEQUENCES: "llm.chat.stop_sequences",
LLM_REQUEST_FUNCTIONS: "llm.request.functions",

// Vector DB
VECTOR_DB_VENDOR: "db.system",
VECTOR_DB_QUERY_TOP_K: "db.vector.query.top_k",
Expand Down
6 changes: 3 additions & 3 deletions packages/instrumentation-anthropic/src/instrumentation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -189,14 +189,14 @@ export class AnthropicInstrumentation extends InstrumentationBase<any> {
};
}): Span {
const attributes: Attributes = {
[SpanAttributes.LLM_VENDOR]: "Anthropic",
[SpanAttributes.LLM_SYSTEM]: "Anthropic",
[SpanAttributes.LLM_REQUEST_TYPE]: type,
};

try {
attributes[SpanAttributes.LLM_REQUEST_MODEL] = params.model;
attributes[SpanAttributes.LLM_TEMPERATURE] = params.temperature;
attributes[SpanAttributes.LLM_TOP_P] = params.top_p;
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE] = params.temperature;
attributes[SpanAttributes.LLM_REQUEST_TOP_P] = params.top_p;
attributes[SpanAttributes.LLM_TOP_K] = params.top_k;

if (type === "completion") {
Expand Down
2 changes: 1 addition & 1 deletion packages/instrumentation-azure/src/instrumentation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ export class AzureOpenAIInstrumentation extends InstrumentationBase<any> {
};
}): Span {
const attributes: Attributes = {
[SpanAttributes.LLM_VENDOR]: "Azure OpenAI",
[SpanAttributes.LLM_SYSTEM]: "Azure OpenAI",
[SpanAttributes.LLM_REQUEST_TYPE]: type,
};

Expand Down
28 changes: 14 additions & 14 deletions packages/instrumentation-bedrock/src/instrumentation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
: ["", ""];

attributes = {
[SpanAttributes.LLM_VENDOR]: vendor,
[SpanAttributes.LLM_SYSTEM]: vendor,
[SpanAttributes.LLM_REQUEST_MODEL]: model,
[SpanAttributes.LLM_RESPONSE_MODEL]: model,
[SpanAttributes.LLM_REQUEST_TYPE]: LLMRequestTypeValues.COMPLETION,
Expand Down Expand Up @@ -197,7 +197,7 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
? (span["attributes"] as Record<string, any>)
: {};

if (SpanAttributes.LLM_VENDOR in attributes) {
if (SpanAttributes.LLM_SYSTEM in attributes) {
if (!(result.body instanceof Object.getPrototypeOf(Uint8Array))) {
const rawRes = result.body as AsyncIterable<bedrock.ResponseStream>;

Expand Down Expand Up @@ -234,7 +234,7 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
}

let responseAttributes = this._setResponseAttributes(
attributes[SpanAttributes.LLM_VENDOR],
attributes[SpanAttributes.LLM_SYSTEM],
parsedResponse,
true,
);
Expand Down Expand Up @@ -265,7 +265,7 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
const parsedResponse = JSON.parse(jsonString);

const responseAttributes = this._setResponseAttributes(
attributes[SpanAttributes.LLM_VENDOR],
attributes[SpanAttributes.LLM_SYSTEM],
parsedResponse,
);

Expand All @@ -289,8 +289,8 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
switch (vendor) {
case "ai21": {
return {
[SpanAttributes.LLM_TOP_P]: requestBody["topP"],
[SpanAttributes.LLM_TEMPERATURE]: requestBody["temperature"],
[SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["topP"],
[SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"],
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody["maxTokens"],
[SpanAttributes.LLM_PRESENCE_PENALTY]:
requestBody["presencePenalty"]["scale"],
Expand All @@ -309,9 +309,9 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
}
case "amazon": {
return {
[SpanAttributes.LLM_TOP_P]:
[SpanAttributes.LLM_REQUEST_TOP_P]:
requestBody["textGenerationConfig"]["topP"],
[SpanAttributes.LLM_TEMPERATURE]:
[SpanAttributes.LLM_REQUEST_TEMPERATURE]:
requestBody["textGenerationConfig"]["temperature"],
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]:
requestBody["textGenerationConfig"]["maxTokenCount"],
Expand All @@ -328,9 +328,9 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
}
case "anthropic": {
return {
[SpanAttributes.LLM_TOP_P]: requestBody["top_p"],
[SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["top_p"],
[SpanAttributes.LLM_TOP_K]: requestBody["top_k"],
[SpanAttributes.LLM_TEMPERATURE]: requestBody["temperature"],
[SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"],
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]:
requestBody["max_tokens_to_sample"],

Expand All @@ -350,9 +350,9 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
}
case "cohere": {
return {
[SpanAttributes.LLM_TOP_P]: requestBody["p"],
[SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["p"],
[SpanAttributes.LLM_TOP_K]: requestBody["k"],
[SpanAttributes.LLM_TEMPERATURE]: requestBody["temperature"],
[SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"],
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody["max_tokens"],

// Prompt & Role
Expand All @@ -367,8 +367,8 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
}
case "meta": {
return {
[SpanAttributes.LLM_TOP_P]: requestBody["top_p"],
[SpanAttributes.LLM_TEMPERATURE]: requestBody["temperature"],
[SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["top_p"],
[SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"],
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody["max_gen_len"],

// Prompt & Role
Expand Down
9 changes: 6 additions & 3 deletions packages/instrumentation-bedrock/tests/ai21.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -117,13 +117,16 @@ describe("Test Ai21 with AWS Bedrock Instrumentation", () => {
const spans = memoryExporter.getFinishedSpans();

const attributes = spans[0].attributes;
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TYPE],
"completion",
);
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.topP);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
params.topP,
);
assert.strictEqual(
attributes[SpanAttributes.LLM_PRESENCE_PENALTY],
params.presencePenalty.scale,
Expand All @@ -133,7 +136,7 @@ describe("Test Ai21 with AWS Bedrock Instrumentation", () => {
params.frequencyPenalty.scale,
);
assert.strictEqual(
attributes[SpanAttributes.LLM_TEMPERATURE],
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
params.temperature,
);
assert.strictEqual(
Expand Down
12 changes: 6 additions & 6 deletions packages/instrumentation-bedrock/tests/amazon.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,18 +118,18 @@ describe("Test Amazon Titan with AWS Bedrock Instrumentation", () => {
const spans = memoryExporter.getFinishedSpans();

const attributes = spans[0].attributes;
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TYPE],
"completion",
);
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
assert.strictEqual(
attributes[SpanAttributes.LLM_TOP_P],
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
params.textGenerationConfig.topP,
);
assert.strictEqual(
attributes[SpanAttributes.LLM_TEMPERATURE],
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
params.textGenerationConfig.temperature,
);
assert.strictEqual(
Expand Down Expand Up @@ -203,18 +203,18 @@ describe("Test Amazon Titan with AWS Bedrock Instrumentation", () => {

const attributes = spans[0].attributes;

assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TYPE],
"completion",
);
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
assert.strictEqual(
attributes[SpanAttributes.LLM_TOP_P],
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
params.textGenerationConfig.topP,
);
assert.strictEqual(
attributes[SpanAttributes.LLM_TEMPERATURE],
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
params.textGenerationConfig.temperature,
);
assert.strictEqual(
Expand Down
18 changes: 12 additions & 6 deletions packages/instrumentation-bedrock/tests/anthropic.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -116,16 +116,19 @@ describe("Test Anthropic with AWS Bedrock Instrumentation", () => {
const spans = memoryExporter.getFinishedSpans();

const attributes = spans[0].attributes;
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TYPE],
"completion",
);
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.top_p);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
params.top_p,
);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_K], params.top_k);
assert.strictEqual(
attributes[SpanAttributes.LLM_TEMPERATURE],
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
params.temperature,
);
assert.strictEqual(
Expand Down Expand Up @@ -187,16 +190,19 @@ describe("Test Anthropic with AWS Bedrock Instrumentation", () => {

const attributes = spans[0].attributes;

assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TYPE],
"completion",
);
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.top_p);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
params.top_p,
);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_K], params.top_k);
assert.strictEqual(
attributes[SpanAttributes.LLM_TEMPERATURE],
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
params.temperature,
);
assert.strictEqual(
Expand Down
15 changes: 9 additions & 6 deletions packages/instrumentation-bedrock/tests/cohere.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -116,16 +116,16 @@ describe("Test Cohere with AWS Bedrock Instrumentation", () => {
const spans = memoryExporter.getFinishedSpans();

const attributes = spans[0].attributes;
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TYPE],
"completion",
);
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.p);
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_TOP_P], params.p);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_K], params.k);
assert.strictEqual(
attributes[SpanAttributes.LLM_TEMPERATURE],
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
params.temperature,
);
assert.strictEqual(
Expand Down Expand Up @@ -184,16 +184,19 @@ describe("Test Cohere with AWS Bedrock Instrumentation", () => {

const attributes = spans[0].attributes;

assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TYPE],
"completion",
);
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.p);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
params.p,
);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_K], params.k);
assert.strictEqual(
attributes[SpanAttributes.LLM_TEMPERATURE],
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
params.temperature,
);
assert.strictEqual(
Expand Down
18 changes: 12 additions & 6 deletions packages/instrumentation-bedrock/tests/meta.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -115,15 +115,18 @@ describe("Test Meta with AWS Bedrock Instrumentation", () => {
const spans = memoryExporter.getFinishedSpans();

const attributes = spans[0].attributes;
assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TYPE],
"completion",
);
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.top_p);
assert.strictEqual(
attributes[SpanAttributes.LLM_TEMPERATURE],
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
params.top_p,
);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
params.temperature,
);
assert.strictEqual(
Expand Down Expand Up @@ -194,15 +197,18 @@ describe("Test Meta with AWS Bedrock Instrumentation", () => {

const attributes = spans[0].attributes;

assert.strictEqual(attributes[SpanAttributes.LLM_VENDOR], vendor);
assert.strictEqual(attributes[SpanAttributes.LLM_SYSTEM], vendor);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TYPE],
"completion",
);
assert.strictEqual(attributes[SpanAttributes.LLM_REQUEST_MODEL], model);
assert.strictEqual(attributes[SpanAttributes.LLM_TOP_P], params.top_p);
assert.strictEqual(
attributes[SpanAttributes.LLM_TEMPERATURE],
attributes[SpanAttributes.LLM_REQUEST_TOP_P],
params.top_p,
);
assert.strictEqual(
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE],
params.temperature,
);
assert.strictEqual(
Expand Down
6 changes: 3 additions & 3 deletions packages/instrumentation-cohere/src/instrumentation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ export class CohereInstrumentation extends InstrumentationBase<any> {
type: LLM_COMPLETION_TYPE;
}): Span {
const attributes: Attributes = {
[SpanAttributes.LLM_VENDOR]: "Cohere",
[SpanAttributes.LLM_SYSTEM]: "Cohere",
[SpanAttributes.LLM_REQUEST_TYPE]: this._getLlmRequestTypeByMethod(type),
};

Expand All @@ -226,9 +226,9 @@ export class CohereInstrumentation extends InstrumentationBase<any> {
attributes[SpanAttributes.LLM_REQUEST_MODEL] = model;

if (!("query" in params)) {
attributes[SpanAttributes.LLM_TOP_P] = params.p;
attributes[SpanAttributes.LLM_REQUEST_TOP_P] = params.p;
attributes[SpanAttributes.LLM_TOP_K] = params.k;
attributes[SpanAttributes.LLM_TEMPERATURE] = params.temperature;
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE] = params.temperature;
attributes[SpanAttributes.LLM_FREQUENCY_PENALTY] =
params.frequencyPenalty;
attributes[SpanAttributes.LLM_PRESENCE_PENALTY] =
Expand Down
Loading

0 comments on commit 8f44173

Please sign in to comment.