|
1 | | -# snowglobe-telemetry-openinference |
2 | | -Telemetry instrumentation for the snowglobe client using OpenInference semantics. |
| 1 | +# Snowlgobe Telemetry Instrumentation for OpenInference |
| 2 | + |
| 3 | +Instrument your Snowglobe connected app with OpenInference and start sending traces to popular OpenInference compatible sinks like Arize or Arize Phoenix. |
| 4 | + |
| 5 | +## Installation |
| 6 | + |
| 7 | +``` |
| 8 | +pip install snowglobe-telemetry-openinference |
| 9 | +``` |
| 10 | + |
| 11 | +If using uv, set the `--prerelease=allow` flag |
| 12 | +``` |
| 13 | +uv pip install --prerelease=allow snowglobe-telemetry-openinference |
| 14 | +``` |
| 15 | + |
| 16 | + |
| 17 | +## Add the OpenInferenceInstrumentor to your agent file |
| 18 | + |
| 19 | +Reminder: Each agent wrapper file resides in the root directory of your project, and is named after the agent (e.g. `My Agent Name` becomes `my_agent_name.py`). |
| 20 | + |
| 21 | +```python |
| 22 | +from snowglobe.client import CompletionRequest, CompletionFunctionOutputs |
| 23 | +from openai import OpenAI |
| 24 | +import os |
| 25 | + |
| 26 | +### Add these two lines to your agent file and watch context rich traces come in! |
| 27 | +from snowglobe.telemetry.openinference import OpenInferenceInstrumentor |
| 28 | +OpenInferenceInstrumentor().instrument() |
| 29 | + |
| 30 | + |
| 31 | +client = OpenAI(api_key=os.getenv("SNOWGLOBE_API_KEY")) |
| 32 | + |
| 33 | +def completion_fn(request: CompletionRequest) -> CompletionFunctionOutputs: |
| 34 | + """ |
| 35 | + Process a scenario request from Snowglobe. |
| 36 | + |
| 37 | + This function is called by the Snowglobe client to process requests. It should return a |
| 38 | + CompletionFunctionOutputs object with the response content. |
| 39 | +
|
| 40 | + Example CompletionRequest: |
| 41 | + CompletionRequest( |
| 42 | + messages=[ |
| 43 | + SnowglobeMessage(role="user", content="Hello, how are you?", snowglobe_data=None), |
| 44 | + ] |
| 45 | + ) |
| 46 | +
|
| 47 | + Example CompletionFunctionOutputs: |
| 48 | + CompletionFunctionOutputs(response="This is a string response from your application") |
| 49 | + |
| 50 | + Args: |
| 51 | + request (CompletionRequest): The request object containing the messages. |
| 52 | +
|
| 53 | + Returns: |
| 54 | + CompletionFunctionOutputs: The response object with the generated content. |
| 55 | + """ |
| 56 | + |
| 57 | + # Process the request using the messages. Example: |
| 58 | + messages = request.to_openai_messages() |
| 59 | + response = client.chat.completions.create( |
| 60 | + model="gpt-4o-mini", |
| 61 | + messages=messages |
| 62 | + ) |
| 63 | + return CompletionFunctionOutputs(response=response.choices[0].message.content) |
| 64 | +``` |
| 65 | + |
| 66 | + |
| 67 | + |
| 68 | +## Enhancing Snowglobe Connect SDK's Traces with OpenInference Integrations |
| 69 | +You can add more rich context to the traces the Snowglobe Connect SDK captures by installing additional OpenInference instrumentors and registering the appropriate tracer provider in your agent wrapper file. |
| 70 | + |
| 71 | +The below examples shows how to add OpenAI instrumentation for either Arize or Arize Phoenix in addition to Snowglobe's OpenInference instrumentation: |
| 72 | + |
| 73 | +### Arize |
| 74 | + |
| 75 | +Install the Arize OpenTelemetry pacakge and the OpenAI specific instrumentor. |
| 76 | +```sh |
| 77 | +pip install openinference-instrumentation-openai arize-otel |
| 78 | +``` |
| 79 | + |
| 80 | +Then register the tracer provider and use the OpenAI instrumentator in your agent file: |
| 81 | +```py |
| 82 | +import os |
| 83 | +from openai import OpenAI |
| 84 | +from snowglobe.client import CompletionRequest, CompletionFunctionOutputs |
| 85 | +from arize.otel import register |
| 86 | + |
| 87 | +# Setup OTel via our convenience function |
| 88 | +tracer_provider = register( |
| 89 | + space_id = "your-space-id", # in app space settings page |
| 90 | + api_key = "your-api-key", # in app space settings page |
| 91 | + project_name = "your-project-name", # name this to whatever you would like |
| 92 | +) |
| 93 | + |
| 94 | +# Import the OpenAI instrumentor from OpenInference |
| 95 | +from openinference.instrumentation.openai import OpenAIInstrumentor |
| 96 | + |
| 97 | +# Instrument OpenAI |
| 98 | +OpenAIInstrumentor().instrument(tracer_provider=tracer_provider) |
| 99 | + |
| 100 | +# Import the OpenInference instrumentor from Snowglobe |
| 101 | +from snowglobe.telemetry.openinference import OpenInferenceInstrumentor |
| 102 | + |
| 103 | +# Instrument the Snowglobe client |
| 104 | +OpenInferenceInstrumentor().instrument(tracer_provider=tracer_provider) |
| 105 | + |
| 106 | + |
| 107 | +def completion_fn(request: CompletionRequest) -> CompletionFunctionOutputs: |
| 108 | + messages = request.to_openai_messages() |
| 109 | + response = client.chat.completions.create( |
| 110 | + model="gpt-4o-mini", |
| 111 | + messages=messages |
| 112 | + ) |
| 113 | + return CompletionFunctionOutputs(response=response.choices[0].message.content) |
| 114 | +``` |
| 115 | + |
| 116 | + |
| 117 | +### Arize Phoenix |
| 118 | + |
| 119 | +Install the Arize Phoenix OpenTelemetry pacakge and the OpenAI specific instrumentor. |
| 120 | +```sh |
| 121 | +pip install openinference-instrumentation-openai arize-phoenix-otel |
| 122 | +``` |
| 123 | + |
| 124 | +Then register the tracer provider and use the OpenAI instrumentator in your agent file: |
| 125 | +```py |
| 126 | +import os |
| 127 | +from openai import OpenAI |
| 128 | +from snowglobe.client import CompletionRequest, CompletionFunctionOutputs |
| 129 | +from phoenix.otel import register |
| 130 | + |
| 131 | +os.environ["PHOENIX_COLLECTOR_ENDPOINT"] = "http://localhost:6006" |
| 132 | + |
| 133 | +# configure the Phoenix tracer |
| 134 | +tracer_provider = register( |
| 135 | + project_name="my-llm-app", # Default is 'default' |
| 136 | +) |
| 137 | + |
| 138 | +# Import the OpenAI instrumentor from OpenInference |
| 139 | +from openinference.instrumentation.openai import OpenAIInstrumentor |
| 140 | + |
| 141 | +# Instrument OpenAI |
| 142 | +OpenAIInstrumentor().instrument(tracer_provider=tracer_provider) |
| 143 | + |
| 144 | +# Import the OpenInference instrumentor from Snowglobe |
| 145 | +from snowglobe.telemetry.openinference import OpenInferenceInstrumentor |
| 146 | + |
| 147 | +# Instrument the Snowglobe client |
| 148 | +OpenInferenceInstrumentor().instrument(tracer_provider=tracer_provider) |
| 149 | + |
| 150 | + |
| 151 | +def completion_fn(request: CompletionRequest) -> CompletionFunctionOutputs: |
| 152 | + messages = request.to_openai_messages() |
| 153 | + response = client.chat.completions.create( |
| 154 | + model="gpt-4o-mini", |
| 155 | + messages=messages |
| 156 | + ) |
| 157 | + return CompletionFunctionOutputs(response=response.choices[0].message.content) |
| 158 | +``` |
0 commit comments