Skip to content

Commit 80b1ee5

Browse files
authored
Merge pull request #1 from Azure-Samples/githubmodels
Add support for GitHub models
2 parents b59400b + e1ed4d0 commit 80b1ee5

15 files changed

+280
-82
lines changed

.devcontainer/devcontainer.json

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,11 @@
88
"vscode": {
99
"extensions": [
1010
"ms-python.python",
11-
"ms-python.vscode-pylance",
12-
"charliermarsh.ruff",
11+
"ms-python.vscode-pylance",
12+
"charliermarsh.ruff",
1313
"ms-azuretools.azure-dev",
14-
"ms-azuretools.vscode-bicep"
14+
"ms-azuretools.vscode-bicep",
15+
"humao.rest-client"
1516
],
1617
"python.defaultInterpreterPath": "/usr/local/bin/python",
1718
"[python]": {

.env.sample

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,5 @@
1+
OPENAI_HOST=azure # Options are azure or github
12
AZURE_OPENAI_GPT_DEPLOYMENT=
23
AZURE_OPENAI_SERVICE=
4+
AZURE_TENANT_ID=
5+
GITHUB_TOKEN=

README.md

Lines changed: 24 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -122,15 +122,36 @@ A related option is VS Code Dev Containers, which will open the project in your
122122

123123
## Running the Python examples
124124

125-
1. If you're not already running in a Codespace or Dev Container, create a Python virtual environment.
125+
To run the samples, you'll either need to have already [deployed the Azure OpenAI account](#deployment) or use GitHub models.
126126
127-
2. Install the requirements:
127+
1. Check that the `.env` file exists in the root of the project. If you [deployed an Azure OpenAI account](#deployment), it should have been created for you, and look like this:
128+
129+
```shell
130+
OPENAI_HOST=azure
131+
AZURE_OPENAI_GPT_DEPLOYMENT=gpt-4o
132+
AZURE_OPENAI_SERVICE=your-service-name
133+
AZURE_TENANT_ID=your-tenant-id-1234
134+
```
135+
136+
If you're using GitHub models, create a `.env` file with the following content:
137+
138+
```shell
139+
OPENAI_HOST=github
140+
GITHUB_TOKEN=
141+
```
142+
143+
You can create a GitHub token by following the [GitHub documentation](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token),
144+
or open this project inside GitHub Codespaces where the token is already exposed as an environment variable.
145+
146+
2. If you're not already running in a Codespace or Dev Container, create a Python virtual environment.
147+
148+
3. Install the requirements:
128149
129150
```shell
130151
python -m pip install -r requirements.txt
131152
```
132153
133-
3. Run an example by running either `python example_file.py` or selecting the `Run` button on the opened file. Available examples:
154+
4. Run an example by running either `python example_file.py` or selecting the `Run` button on the opened file. Available examples:
134155
135156
| Script filename | Description |
136157
|---------------------------|-----------------------------------------------------------------------------|

extract_github_issue.py

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -12,18 +12,29 @@
1212
logging.basicConfig(level=logging.WARNING)
1313
load_dotenv()
1414

15-
# Configure Azure OpenAI
16-
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
17-
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT environment variables are empty. See README.")
18-
exit(1)
19-
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
20-
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
21-
client = openai.AzureOpenAI(
22-
api_version="2024-08-01-preview",
23-
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
24-
azure_ad_token_provider=token_provider,
25-
)
26-
model_name = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT")
15+
16+
if os.getenv("OPENAI_HOST", "azure") == "azure":
17+
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
18+
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT env variables are empty. See README.")
19+
exit(1)
20+
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
21+
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
22+
client = openai.AzureOpenAI(
23+
api_version="2024-08-01-preview",
24+
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
25+
azure_ad_token_provider=token_provider,
26+
)
27+
model_name = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT")
28+
else:
29+
if not os.getenv("GITHUB_TOKEN"):
30+
logging.warning("GITHUB_TOKEN env variable is empty. See README.")
31+
exit(1)
32+
client = openai.OpenAI(
33+
base_url="https://models.inference.ai.azure.com",
34+
api_key=os.environ["GITHUB_TOKEN"],
35+
# Specify the API version to use the Structured Outputs feature
36+
default_query={"api-version": "2024-08-01-preview"})
37+
model_name = "gpt-4o"
2738

2839

2940
# Define models for Structured Outputs

extract_github_repo.py

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -13,17 +13,28 @@
1313
logging.basicConfig(level=logging.WARNING)
1414
load_dotenv()
1515

16-
# Configure Azure OpenAI
17-
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
18-
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT environment variables are empty. See README.")
19-
exit(1)
20-
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
21-
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
22-
client = openai.AzureOpenAI(
23-
api_version="2024-08-01-preview",
24-
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
25-
azure_ad_token_provider=token_provider,
26-
)
16+
if os.getenv("OPENAI_HOST", "azure") == "azure":
17+
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
18+
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT env variables are empty. See README.")
19+
exit(1)
20+
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
21+
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
22+
client = openai.AzureOpenAI(
23+
api_version="2024-08-01-preview",
24+
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
25+
azure_ad_token_provider=token_provider,
26+
)
27+
model_name = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT")
28+
else:
29+
if not os.getenv("GITHUB_TOKEN"):
30+
logging.warning("GITHUB_TOKEN env variable is empty. See README.")
31+
exit(1)
32+
client = openai.OpenAI(
33+
base_url="https://models.inference.ai.azure.com",
34+
api_key=os.environ["GITHUB_TOKEN"],
35+
# Specify the API version to use the Structured Outputs feature
36+
default_query={"api-version": "2024-08-01-preview"})
37+
model_name = "gpt-4o"
2738

2839

2940
# Define models for Structured Outputs
@@ -69,7 +80,7 @@ class RepoOverview(BaseModel):
6980

7081
# Send request to GPT model to extract using Structured Outputs
7182
completion = client.beta.chat.completions.parse(
72-
model=os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"),
83+
model=model_name,
7384
messages=[
7485
{
7586
"role": "system",

extract_image_graph.py

Lines changed: 23 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -11,18 +11,28 @@
1111
logging.basicConfig(level=logging.WARNING)
1212
load_dotenv()
1313

14-
# Configure Azure OpenAI
15-
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
16-
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT environment variables are empty. See README.")
17-
exit(1)
18-
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
19-
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
20-
client = openai.AzureOpenAI(
21-
api_version="2024-08-01-preview",
22-
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
23-
azure_ad_token_provider=token_provider,
24-
)
25-
model_name = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT")
14+
if os.getenv("OPENAI_HOST", "azure") == "azure":
15+
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
16+
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT env variables are empty. See README.")
17+
exit(1)
18+
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
19+
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
20+
client = openai.AzureOpenAI(
21+
api_version="2024-08-01-preview",
22+
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
23+
azure_ad_token_provider=token_provider,
24+
)
25+
model_name = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT")
26+
else:
27+
if not os.getenv("GITHUB_TOKEN"):
28+
logging.warning("GITHUB_TOKEN env variable is empty. See README.")
29+
exit(1)
30+
client = openai.OpenAI(
31+
base_url="https://models.inference.ai.azure.com",
32+
api_key=os.environ["GITHUB_TOKEN"],
33+
# Specify the API version to use the Structured Outputs feature
34+
default_query={"api-version": "2024-08-01-preview"})
35+
model_name = "gpt-4o"
2636

2737

2838
# Define models for Structured Outputs
@@ -46,7 +56,7 @@ def open_image_as_base64(filename):
4656

4757
# Send request to GPT model to extract using Structured Outputs
4858
completion = client.beta.chat.completions.parse(
49-
model=os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"),
59+
model=model_name,
5060
messages=[
5161
{"role": "system", "content": "Extract the information from the graph"},
5262
{

extract_image_table.py

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -11,17 +11,28 @@
1111
logging.basicConfig(level=logging.WARNING)
1212
load_dotenv()
1313

14-
# Configure Azure OpenAI
15-
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
16-
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT environment variables are empty. See README.")
17-
exit(1)
18-
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
19-
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
20-
client = openai.AzureOpenAI(
21-
api_version="2024-08-01-preview",
22-
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
23-
azure_ad_token_provider=token_provider,
24-
)
14+
if os.getenv("OPENAI_HOST", "azure") == "azure":
15+
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
16+
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT env variables are empty. See README.")
17+
exit(1)
18+
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
19+
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
20+
client = openai.AzureOpenAI(
21+
api_version="2024-08-01-preview",
22+
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
23+
azure_ad_token_provider=token_provider,
24+
)
25+
model_name = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT")
26+
else:
27+
if not os.getenv("GITHUB_TOKEN"):
28+
logging.warning("GITHUB_TOKEN env variable is empty. See README.")
29+
exit(1)
30+
client = openai.OpenAI(
31+
base_url="https://models.inference.ai.azure.com",
32+
api_key=os.environ["GITHUB_TOKEN"],
33+
# Specify the API version to use the Structured Outputs feature
34+
default_query={"api-version": "2024-08-01-preview"})
35+
model_name = "gpt-4o"
2536

2637

2738
# Define models for Structured Outputs
@@ -53,7 +64,7 @@ def open_image_as_base64(filename):
5364

5465
# Send request to GPT model to extract using Structured Outputs
5566
completion = client.beta.chat.completions.parse(
56-
model=os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"),
67+
model=model_name,
5768
messages=[
5869
{"role": "system", "content": "Extract the information from the table"},
5970
{

extract_pdf_receipt.py

Lines changed: 23 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -11,18 +11,28 @@
1111
logging.basicConfig(level=logging.WARNING)
1212
load_dotenv()
1313

14-
# Configure Azure OpenAI
15-
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
16-
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT environment variables are empty. See README.")
17-
exit(1)
18-
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
19-
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
20-
client = openai.AzureOpenAI(
21-
api_version="2024-08-01-preview",
22-
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
23-
azure_ad_token_provider=token_provider,
24-
)
25-
model_name = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT")
14+
if os.getenv("OPENAI_HOST", "azure") == "azure":
15+
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
16+
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT env variables are empty. See README.")
17+
exit(1)
18+
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
19+
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
20+
client = openai.AzureOpenAI(
21+
api_version="2024-08-01-preview",
22+
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
23+
azure_ad_token_provider=token_provider,
24+
)
25+
model_name = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT")
26+
else:
27+
if not os.getenv("GITHUB_TOKEN"):
28+
logging.warning("GITHUB_TOKEN env variable is empty. See README.")
29+
exit(1)
30+
client = openai.OpenAI(
31+
base_url="https://models.inference.ai.azure.com",
32+
api_key=os.environ["GITHUB_TOKEN"],
33+
# Specify the API version to use the Structured Outputs feature
34+
default_query={"api-version": "2024-08-01-preview"})
35+
model_name = "gpt-4o"
2636

2737

2838
# Define models for Structured Outputs
@@ -45,7 +55,7 @@ class Receipt(BaseModel):
4555

4656
# Send request to GPT model to extract using Structured Outputs
4757
completion = client.beta.chat.completions.parse(
48-
model=os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"),
58+
model=model_name,
4959
messages=[
5060
{"role": "system", "content": "Extract the information from the blog post"},
5161
{"role": "user", "content": md_text},

extract_webpage.py

Lines changed: 23 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,28 @@
1212
logging.basicConfig(level=logging.WARNING)
1313
load_dotenv()
1414

15-
# Configure Azure OpenAI
16-
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
17-
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT environment variables are empty. See README.")
18-
exit(1)
19-
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
20-
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
21-
client = openai.AzureOpenAI(
22-
api_version="2024-08-01-preview",
23-
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
24-
azure_ad_token_provider=token_provider,
25-
)
26-
model_name = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT")
27-
15+
if os.getenv("OPENAI_HOST", "azure") == "azure":
16+
if not os.getenv("AZURE_OPENAI_SERVICE") or not os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"):
17+
logging.warning("AZURE_OPENAI_SERVICE and AZURE_OPENAI_GPT_DEPLOYMENT env variables are empty. See README.")
18+
exit(1)
19+
credential = azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"))
20+
token_provider = azure.identity.get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default")
21+
client = openai.AzureOpenAI(
22+
api_version="2024-08-01-preview",
23+
azure_endpoint=f"https://{os.getenv('AZURE_OPENAI_SERVICE')}.openai.azure.com",
24+
azure_ad_token_provider=token_provider,
25+
)
26+
model_name = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT")
27+
else:
28+
if not os.getenv("GITHUB_TOKEN"):
29+
logging.warning("GITHUB_TOKEN env variable is empty. See README.")
30+
exit(1)
31+
client = openai.OpenAI(
32+
base_url="https://models.inference.ai.azure.com",
33+
api_key=os.environ["GITHUB_TOKEN"],
34+
# Specify the API version to use the Structured Outputs feature
35+
default_query={"api-version": "2024-08-01-preview"})
36+
model_name = "gpt-4o"
2837

2938
# Define models for Structured Outputs
3039
class BlogPost(BaseModel):
@@ -46,7 +55,7 @@ class BlogPost(BaseModel):
4655

4756
# Send request to GPT model to extract using Structured Outputs
4857
completion = client.beta.chat.completions.parse(
49-
model=os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT"),
58+
model=model_name,
5059
messages=[
5160
{"role": "system", "content": "Extract the information from the blog post"},
5261
{"role": "user", "content": f"{post_title}\n{post_contents}"},

http/.env.azure

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
AZURE_OPENAI_SERVICE=
2+
AZURE_OPENAI_GPT_DEPLOYMENT=
3+
AZURE_OPENAI_TOKEN=

0 commit comments

Comments
 (0)