Skip to content

Commit 02a1e6d

Browse files
authored
Merge pull request #1 from guardrails-ai/initial-commit
[GR-987]: Package setup and initial implementation
2 parents d81eb6c + 65bf398 commit 02a1e6d

File tree

12 files changed

+938
-2
lines changed

12 files changed

+938
-2
lines changed

.github/workflows/pr_qc.yml

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
name: Quality Control
2+
3+
on:
4+
workflow_dispatch:
5+
pull_request: # Trigger the workflow on push events
6+
7+
jobs:
8+
quality_control:
9+
runs-on: ubuntu-latest
10+
11+
steps:
12+
- name: Checkout repository
13+
uses: actions/checkout@v4
14+
- name: Set up Python
15+
uses: actions/setup-python@v4
16+
with:
17+
python-version: '3.11'
18+
- name: Install dependencies
19+
run: |
20+
python -m pip install --upgrade pip
21+
make install-dev
22+
- name: Run Type Checks
23+
run: |
24+
make type
25+
- name: Run Linter
26+
run: |
27+
make lint

.github/workflows/release.yml

Lines changed: 120 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,120 @@
1+
name: Publish to PyPI
2+
3+
permissions:
4+
id-token: write
5+
contents: write
6+
7+
on:
8+
workflow_dispatch:
9+
# push:
10+
# branches:
11+
# - main
12+
13+
jobs:
14+
setup:
15+
runs-on: ubuntu-latest
16+
env:
17+
GUARDRAILS_TOKEN: ${{ secrets.PRIV_PYPI_PUBLISH_TOKEN }}
18+
PYPI_REPOSITORY_URL: 'https://pypi.guardrailsai.com'
19+
steps:
20+
- name: Checkout Repository
21+
uses: actions/checkout@v4
22+
with:
23+
fetch-tags: true
24+
fetch-depth: 0
25+
26+
- name: Set up Python
27+
uses: actions/setup-python@v4
28+
with:
29+
python-version: '3.11'
30+
31+
- name: Check for version bump
32+
id: version
33+
continue-on-error: false
34+
shell: bash {0}
35+
run: |
36+
PYPROJECT_TOML="pyproject.toml"
37+
38+
# Extract the version using grep and sed
39+
version=$(grep -m 1 "version" "$PYPROJECT_TOML" | sed -E 's/.*version[[:space:]]*=[[:space:]]*"([^"]*)".*/\1/')
40+
41+
echo "Project version: $version"
42+
43+
if [ -z "$version" ]
44+
then
45+
echo "Version is missing from pyproject.toml!"
46+
exit 1
47+
fi
48+
49+
echo "Checking if $version already exists..."
50+
version_commit="$(git rev-parse "$version" 2>/dev/null)"
51+
if [ ! -z "$version_commit" ] && [ "$version_commit" != "$version" ];
52+
then
53+
echo "Version $version already exist on commit $version_commit!"
54+
echo "Abandoning build..."
55+
echo "To complete this release update the version field in the pyproject.toml with an appropriate semantic version."
56+
exit 1
57+
else
58+
echo "version=$version" >> "$GITHUB_OUTPUT"
59+
exit 0
60+
fi
61+
62+
- name: Install Twine & Build
63+
shell: bash
64+
run: |
65+
python -m pip install --upgrade pip
66+
pip install twine build toml
67+
68+
- name: Create .pypirc
69+
shell: bash
70+
run: |
71+
touch ~/.pypirc
72+
echo "[distutils]" >> ~/.pypirc
73+
echo "index-servers =" >> ~/.pypirc
74+
echo " private-repository" >> ~/.pypirc
75+
echo "" >> ~/.pypirc
76+
echo "[private-repository]" >> ~/.pypirc
77+
echo "repository = $PYPI_REPOSITORY_URL" >> ~/.pypirc
78+
echo "username = __token__" >> ~/.pypirc
79+
echo "password = $GUARDRAILS_TOKEN" >> ~/.pypirc
80+
81+
- name: Build & Upload
82+
shell: bash
83+
run: |
84+
python -m build
85+
twine upload dist/* -u __token__ -p $GUARDRAILS_TOKEN -r private-repository
86+
87+
- name: Create .pypirc for PyPI.org
88+
shell: bash
89+
env:
90+
PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
91+
run: |
92+
echo "[distutils]" > ~/.pypirc
93+
echo "index-servers =" >> ~/.pypirc
94+
echo " pypi" >> ~/.pypirc
95+
echo "" >> ~/.pypirc
96+
echo "[pypi]" >> ~/.pypirc
97+
echo "repository = https://upload.pypi.org/legacy/" >> ~/.pypirc
98+
echo "username = __token__" >> ~/.pypirc
99+
echo "password = $PYPI_PASSWORD" >> ~/.pypirc
100+
101+
- name: Upload to PyPI.org
102+
shell: bash
103+
env:
104+
PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
105+
run: |
106+
twine upload dist/* -u __token__ -p $PYPI_PASSWORD -r pypi
107+
108+
- name: Tag
109+
id: tag
110+
continue-on-error: false
111+
run: |
112+
version="${{ steps.version.outputs.version }}"
113+
echo "Configuring github bot"
114+
git config user.name "github-actions[bot]"
115+
# Comes from https://api.github.com/users/github-actions%5Bbot%5D
116+
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
117+
echo "Creating github tag: $version"
118+
git tag "$version"
119+
echo "Pushing tags"
120+
git push --tags

.gitignore

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
build
2+
*.egg-info
3+
.venv
4+
.ruff_cache

.pre-commit-config.yaml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
repos:
2+
- repo: https://github.com/astral-sh/ruff-pre-commit
3+
# Ruff version.
4+
rev: v0.9.4
5+
hooks:
6+
# Run the linter.
7+
- id: ruff
8+
args: [ --fix ]
9+
# Run the formatter.
10+
- id: ruff-format

LICENSE

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
MIT License
2+
3+
Copyright (c) 2024 Guardrails AI
4+
5+
Permission is hereby granted, free of charge, to any person obtaining a copy
6+
of this software and associated documentation files (the "Software"), to deal
7+
in the Software without restriction, including without limitation the rights
8+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9+
copies of the Software, and to permit persons to whom the Software is
10+
furnished to do so, subject to the following conditions:
11+
12+
The above copyright notice and this permission notice shall be included in all
13+
copies or substantial portions of the Software.
14+
15+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21+
SOFTWARE.

Makefile

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
.PHONY: install install-dev lint lint-fix type qa test test-cov test-cov-ci
2+
# Installs production dependencies
3+
install:
4+
pip install .;
5+
6+
# Installs development dependencies
7+
install-dev:
8+
pip install ".[dev]";
9+
10+
lint:
11+
ruff check .
12+
ruff format .
13+
14+
lint-fix:
15+
ruff check . --fix
16+
ruff format .
17+
18+
type:
19+
pyright
20+
21+
qa:
22+
make install-dev
23+
make lint
24+
make type
25+
26+
test:
27+
python -m unittest discover -s tests --buffer --failfast
28+
29+
test-cov:
30+
coverage run -m unittest discover --start-directory tests --buffer --failfast
31+
coverage report -m
32+
33+
test-cov-ci:
34+
coverage run -m unittest discover --start-directory tests --buffer --failfast

README.md

Lines changed: 158 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,158 @@
1-
# snowglobe-telemetry-openinference
2-
Telemetry instrumentation for the snowglobe client using OpenInference semantics.
1+
# Snowlgobe Telemetry Instrumentation for OpenInference
2+
3+
Instrument your Snowglobe connected app with OpenInference and start sending traces to popular OpenInference compatible sinks like Arize or Arize Phoenix.
4+
5+
## Installation
6+
7+
```
8+
pip install snowglobe-telemetry-openinference
9+
```
10+
11+
If using uv, set the `--prerelease=allow` flag
12+
```
13+
uv pip install --prerelease=allow snowglobe-telemetry-openinference
14+
```
15+
16+
17+
## Add the OpenInferenceInstrumentor to your agent file
18+
19+
Reminder: Each agent wrapper file resides in the root directory of your project, and is named after the agent (e.g. `My Agent Name` becomes `my_agent_name.py`).
20+
21+
```python
22+
from snowglobe.client import CompletionRequest, CompletionFunctionOutputs
23+
from openai import OpenAI
24+
import os
25+
26+
### Add these two lines to your agent file and watch context rich traces come in!
27+
from snowglobe.telemetry.openinference import OpenInferenceInstrumentor
28+
OpenInferenceInstrumentor().instrument()
29+
30+
31+
client = OpenAI(api_key=os.getenv("SNOWGLOBE_API_KEY"))
32+
33+
def completion_fn(request: CompletionRequest) -> CompletionFunctionOutputs:
34+
"""
35+
Process a scenario request from Snowglobe.
36+
37+
This function is called by the Snowglobe client to process requests. It should return a
38+
CompletionFunctionOutputs object with the response content.
39+
40+
Example CompletionRequest:
41+
CompletionRequest(
42+
messages=[
43+
SnowglobeMessage(role="user", content="Hello, how are you?", snowglobe_data=None),
44+
]
45+
)
46+
47+
Example CompletionFunctionOutputs:
48+
CompletionFunctionOutputs(response="This is a string response from your application")
49+
50+
Args:
51+
request (CompletionRequest): The request object containing the messages.
52+
53+
Returns:
54+
CompletionFunctionOutputs: The response object with the generated content.
55+
"""
56+
57+
# Process the request using the messages. Example:
58+
messages = request.to_openai_messages()
59+
response = client.chat.completions.create(
60+
model="gpt-4o-mini",
61+
messages=messages
62+
)
63+
return CompletionFunctionOutputs(response=response.choices[0].message.content)
64+
```
65+
66+
67+
68+
## Enhancing Snowglobe Connect SDK's Traces with OpenInference Integrations
69+
You can add more rich context to the traces the Snowglobe Connect SDK captures by installing additional OpenInference instrumentors and registering the appropriate tracer provider in your agent wrapper file.
70+
71+
The below examples shows how to add OpenAI instrumentation for either Arize or Arize Phoenix in addition to Snowglobe's OpenInference instrumentation:
72+
73+
### Arize
74+
75+
Install the Arize OpenTelemetry pacakge and the OpenAI specific instrumentor.
76+
```sh
77+
pip install openinference-instrumentation-openai arize-otel
78+
```
79+
80+
Then register the tracer provider and use the OpenAI instrumentator in your agent file:
81+
```py
82+
import os
83+
from openai import OpenAI
84+
from snowglobe.client import CompletionRequest, CompletionFunctionOutputs
85+
from arize.otel import register
86+
87+
# Setup OTel via our convenience function
88+
tracer_provider = register(
89+
space_id = "your-space-id", # in app space settings page
90+
api_key = "your-api-key", # in app space settings page
91+
project_name = "your-project-name", # name this to whatever you would like
92+
)
93+
94+
# Import the OpenAI instrumentor from OpenInference
95+
from openinference.instrumentation.openai import OpenAIInstrumentor
96+
97+
# Instrument OpenAI
98+
OpenAIInstrumentor().instrument(tracer_provider=tracer_provider)
99+
100+
# Import the OpenInference instrumentor from Snowglobe
101+
from snowglobe.telemetry.openinference import OpenInferenceInstrumentor
102+
103+
# Instrument the Snowglobe client
104+
OpenInferenceInstrumentor().instrument(tracer_provider=tracer_provider)
105+
106+
107+
def completion_fn(request: CompletionRequest) -> CompletionFunctionOutputs:
108+
messages = request.to_openai_messages()
109+
response = client.chat.completions.create(
110+
model="gpt-4o-mini",
111+
messages=messages
112+
)
113+
return CompletionFunctionOutputs(response=response.choices[0].message.content)
114+
```
115+
116+
117+
### Arize Phoenix
118+
119+
Install the Arize Phoenix OpenTelemetry pacakge and the OpenAI specific instrumentor.
120+
```sh
121+
pip install openinference-instrumentation-openai arize-phoenix-otel
122+
```
123+
124+
Then register the tracer provider and use the OpenAI instrumentator in your agent file:
125+
```py
126+
import os
127+
from openai import OpenAI
128+
from snowglobe.client import CompletionRequest, CompletionFunctionOutputs
129+
from phoenix.otel import register
130+
131+
os.environ["PHOENIX_COLLECTOR_ENDPOINT"] = "http://localhost:6006"
132+
133+
# configure the Phoenix tracer
134+
tracer_provider = register(
135+
project_name="my-llm-app", # Default is 'default'
136+
)
137+
138+
# Import the OpenAI instrumentor from OpenInference
139+
from openinference.instrumentation.openai import OpenAIInstrumentor
140+
141+
# Instrument OpenAI
142+
OpenAIInstrumentor().instrument(tracer_provider=tracer_provider)
143+
144+
# Import the OpenInference instrumentor from Snowglobe
145+
from snowglobe.telemetry.openinference import OpenInferenceInstrumentor
146+
147+
# Instrument the Snowglobe client
148+
OpenInferenceInstrumentor().instrument(tracer_provider=tracer_provider)
149+
150+
151+
def completion_fn(request: CompletionRequest) -> CompletionFunctionOutputs:
152+
messages = request.to_openai_messages()
153+
response = client.chat.completions.create(
154+
model="gpt-4o-mini",
155+
messages=messages
156+
)
157+
return CompletionFunctionOutputs(response=response.choices[0].message.content)
158+
```

0 commit comments

Comments
 (0)