"Using the OpenTelemetry Python SDK"

How to send data to Opik using the OpenTelemetry Python SDK

This guide shows you how to directly instrument your Python applications with the OpenTelemetry SDK to send trace data to Opik.

Installation

First, install the required OpenTelemetry packages:

$pip install opentelemetry-api opentelemetry-sdk opentelemetry-exporter-otlp

Full Example

Here’s a complete example that demonstrates how to instrument a chatbot application with OpenTelemetry and send the traces to Opik:

1# Dependencies: opentelemetry-exporter-otlp
2
3import os
4import time
5from opentelemetry import trace
6from opentelemetry.sdk.trace import TracerProvider
7from opentelemetry.sdk.trace.export import BatchSpanProcessor
8from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
9from opentelemetry.sdk.resources import Resource
10from opentelemetry.semconv.resource import ResourceAttributes
11
12
13# Configure OpenTelemetry
14
15# For comet.com
16os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = "https://www.comet.com/opik/api/v1/private/otel"
17os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = "Authorization=<your-api-key>,Comet-Workspace=<your-workspace-name>,projectName=<your-project-name>"
18
19# Configure the tracer provider
20resource = Resource.create({
21 ResourceAttributes.SERVICE_NAME: "opentelemetry-example"
22})
23
24# Create a tracer provider
25tracer_provider = TracerProvider(resource=resource)
26
27# Set up the OTLP HTTP exporter
28otlp_exporter = OTLPSpanExporter()
29
30# Add the exporter to the tracer provider
31tracer_provider.add_span_processor(BatchSpanProcessor(otlp_exporter))
32
33# Set the tracer provider
34trace.set_tracer_provider(tracer_provider)
35
36# Get a tracer
37tracer = trace.get_tracer("example-tracer")
38
39def main():
40 # Simulate user request
41 user_request = "What's the weather like today?"
42
43 # Create a parent span representing the entire chatbot conversation
44 with tracer.start_as_current_span("chatbot_conversation") as conversation_span:
45 print(f"User request: {user_request}")
46
47 # Add user request as an attribute to the parent span
48 conversation_span.set_attribute("input", user_request)
49 conversation_span.set_attribute("conversation.id", "conv_12345")
50 conversation_span.set_attribute("conversation.type", "weather_inquiry")
51
52 # Process the user request
53
54 # Simulate initial processing
55 time.sleep(0.2)
56
57 # Create a child span for LLM generation using GenAI conventions
58 with tracer.start_as_current_span("llm_completion") as llm_span:
59 print("Generating LLM response...")
60
61 # Create a prompt for the LLM
62 llm_prompt = f"User question: {user_request}\n\nProvide a concise answer about the weather."
63
64 # Add GenAI semantic convention attributes
65 llm_span.set_attribute("gen_ai.operation.name", "completion")
66 llm_span.set_attribute("gen_ai.system", "gpt")
67 llm_span.set_attribute("gen_ai.request.model", "gpt-4")
68 llm_span.set_attribute("gen_ai.response.model", "gpt-4")
69 llm_span.set_attribute("gen_ai.request.input", llm_prompt) # Add the prompt
70 llm_span.set_attribute("gen_ai.usage.input_tokens", 10) # Example token count
71 llm_span.set_attribute("gen_ai.usage.output_tokens", 25) # Example token count
72 llm_span.set_attribute("gen_ai.usage.total_tokens", 35) # Example token count
73 llm_span.set_attribute("gen_ai.request.temperature", 0.7)
74 llm_span.set_attribute("gen_ai.request.max_tokens", 100)
75
76 # Simulate LLM thinking time
77 time.sleep(0.5)
78
79 # Generate chatbot response
80 chatbot_response = "It's sunny with a high of 75°F in your area today!"
81
82 # Set response in the LLM span
83 llm_span.set_attribute("gen_ai.response.output", chatbot_response)
84
85 print("LLM generation completed")
86
87 # Back in parent span context
88 conversation_span.set_attribute("output", chatbot_response)
89 # Response has been generated
90
91 print(f"Chatbot response: {chatbot_response}")
92
93if __name__ == "__main__":
94 main()
95
96 # Ensure all spans are flushed before the program exits
97 tracer_provider.shutdown()
98
99 print("\nSpans have been sent to OpenTelemetry collector.")
100 print("If you configured Comet.com, you can view the traces in your Comet project.")