Skip to content
This repository was archived by the owner on Aug 14, 2025. It is now read-only.

Commit 7c9f616

Browse files
committed
feat: make custom code changes
1 parent 0cdc405 commit 7c9f616

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

64 files changed

+3859
-4
lines changed

src/llama_stack_client/__init__.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,12 @@
3939
from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient
4040
from ._utils._logs import setup_logging as _setup_logging
4141

42+
from .lib.agents.agent import Agent
43+
from .lib.agents.event_logger import EventLogger as AgentEventLogger
44+
from .lib.inference.event_logger import EventLogger as InferenceEventLogger
45+
from .types.agents.turn_create_params import Document
46+
from .types.shared_params.document import Document as RAGDocument
47+
4248
__all__ = [
4349
"types",
4450
"__version__",

src/llama_stack_client/_client.py

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
22

33
from __future__ import annotations
4+
import json
45

56
import os
67
from typing import Any, Union, Mapping
@@ -126,6 +127,7 @@ def __init__(
126127
# outlining your use-case to help us decide if it should be
127128
# part of our public interface in the future.
128129
_strict_response_validation: bool = False,
130+
provider_data: Mapping[str, Any] | None = None,
129131
) -> None:
130132
"""Construct a new synchronous LlamaStackClient client instance.
131133
@@ -140,13 +142,18 @@ def __init__(
140142
if base_url is None:
141143
base_url = f"http://any-hosted-llama-stack.com"
142144

145+
custom_headers = default_headers or {}
146+
custom_headers["X-LlamaStack-Client-Version"] = __version__
147+
if provider_data is not None:
148+
custom_headers["X-LlamaStack-Provider-Data"] = json.dumps(provider_data)
149+
143150
super().__init__(
144151
version=__version__,
145152
base_url=base_url,
146153
max_retries=max_retries,
147154
timeout=timeout,
148155
http_client=http_client,
149-
custom_headers=default_headers,
156+
custom_headers=custom_headers,
150157
custom_query=default_query,
151158
_strict_response_validation=_strict_response_validation,
152159
)
@@ -344,6 +351,7 @@ def __init__(
344351
# outlining your use-case to help us decide if it should be
345352
# part of our public interface in the future.
346353
_strict_response_validation: bool = False,
354+
provider_data: Mapping[str, Any] | None = None,
347355
) -> None:
348356
"""Construct a new async AsyncLlamaStackClient client instance.
349357
@@ -358,13 +366,18 @@ def __init__(
358366
if base_url is None:
359367
base_url = f"http://any-hosted-llama-stack.com"
360368

369+
custom_headers = default_headers or {}
370+
custom_headers["X-LlamaStack-Client-Version"] = __version__
371+
if provider_data is not None:
372+
custom_headers["X-LlamaStack-Provider-Data"] = json.dumps(provider_data)
373+
361374
super().__init__(
362375
version=__version__,
363376
base_url=base_url,
364377
max_retries=max_retries,
365378
timeout=timeout,
366379
http_client=http_client,
367-
custom_headers=default_headers,
380+
custom_headers=custom_headers,
368381
custom_query=default_query,
369382
_strict_response_validation=_strict_response_validation,
370383
)

src/llama_stack_client/_utils/_logs.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import os
22
import logging
3+
from rich.logging import RichHandler
34

45
logger: logging.Logger = logging.getLogger("llama_stack_client")
56
httpx_logger: logging.Logger = logging.getLogger("httpx")
@@ -10,6 +11,7 @@ def _basic_config() -> None:
1011
logging.basicConfig(
1112
format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s",
1213
datefmt="%Y-%m-%d %H:%M:%S",
14+
handlers=[RichHandler(rich_tracebacks=True)],
1315
)
1416

1517

src/llama_stack_client/_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
22

33
__title__ = "llama_stack_client"
4-
__version__ = "0.1.0-alpha.2" # x-release-please-version
4+
__version__ = "0.2.12"

src/llama_stack_client/lib/.keep

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
File generated from our OpenAPI spec by Stainless.
22

33
This directory can be used to store custom files to expand the SDK.
4-
It is ignored by Stainless code generation and its content (other than this keep file) won't be touched.
4+
It is ignored by Stainless code generation and its content (other than this keep file) won't be touched.
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the terms described in the LICENSE file in
5+
# the root directory of this source tree.
6+
7+
from .tools.mcp_oauth import get_oauth_token_for_mcp_server
8+
9+
__all__ = ["get_oauth_token_for_mcp_server"]
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the terms described in the LICENSE file in
5+
# the root directory of this source tree.

0 commit comments

Comments
 (0)