From 3a6290970b9e7c9fe026be5dc56c9d43b2650775 Mon Sep 17 00:00:00 2001 From: Thakee Nathees Date: Fri, 10 Oct 2025 12:32:27 +0800 Subject: [PATCH 1/2] Revert "Merge pull request #3039 from jaseci-labs/lazyload_byllm" This reverts commit c1af5621e11fc100d1436c4de833e8427c0ee0e7, reversing changes made to ef1f120c9147ac10bbfa60e472b0c921623669ed. --- docs/docs/communityhub/release_notes.md | 3 --- docs/docs/jac_book/chapter_5.md | 18 +++++++++--------- docs/docs/jac_book/chapter_9.md | 4 ++-- docs/docs/learn/beginners_guide_to_jac.md | 2 +- .../aider-genius-lite/genius_lite.jac | 2 +- .../examples/littleX/src/byllm_example.jac | 2 +- .../examples/littleX/src/example_game.jac | 2 +- .../mtp_examples/fantasy_trading_game.md | 2 +- .../learn/examples/mtp_examples/rpg_game.md | 2 +- docs/docs/learn/jac-byllm/litellm_proxy.md | 2 +- docs/docs/learn/jac-byllm/multimodality.md | 4 ++-- .../learn/jac-byllm/python_integration.md | 9 ++++----- docs/docs/learn/jac-byllm/quickstart.md | 4 ++-- docs/docs/learn/jac-byllm/usage.md | 14 +++++++------- docs/docs/learn/jac-byllm/with_llm.md | 12 ++++++------ docs/docs/learn/tour.md | 2 +- jac-byllm/README.md | 4 ++-- jac-byllm/byllm/__init__.py | 11 ++++++++++- jac-byllm/byllm/lib.py | 10 ---------- jac-byllm/byllm/plugin.py | 14 +++----------- .../aider-genius-lite/genius_lite.jac | 2 +- .../core_examples/level_genarator.jac | 2 +- .../core_examples/personality_finder.jac | 2 +- .../examples/microbenchmarks/essay_review.jac | 2 +- .../microbenchmarks/expert_answer.jac | 2 +- .../microbenchmarks/grammar_checker.jac | 2 +- .../examples/microbenchmarks/joke_gen.jac | 2 +- .../examples/microbenchmarks/odd_word_out.jac | 2 +- .../examples/microbenchmarks/text_to_type.jac | 2 +- .../examples/microbenchmarks/translator.jac | 2 +- .../examples/tool_calling/debate_agent.jac | 2 +- .../tool_calling/fantasy_trading_game.jac | 2 +- .../tool_calling/marketing_agency.jac | 2 +- .../examples/tool_calling/wikipedia_react.jac | 2 +- jac-byllm/examples/vision/math_solver.jac | 2 +- jac-byllm/examples/vision/mugen.jac | 2 +- .../examples/vision/personality_finder.jac | 2 +- .../examples/vision/receipt_analyzer.jac | 2 +- jac-byllm/tests/fixtures/by_expr.jac | 2 +- jac-byllm/tests/fixtures/enum_no_value.jac | 2 +- jac-byllm/tests/fixtures/image_test.jac | 2 +- .../tests/fixtures/llm_mail_summerize.jac | 2 +- jac-byllm/tests/fixtures/llm_semstrings.jac | 2 +- jac-byllm/tests/fixtures/method_incl_ctx.jac | 2 +- jac-byllm/tests/fixtures/method_tool.jac | 2 +- jac-byllm/tests/fixtures/python_lib_mode.py | 19 +++++++++---------- jac-byllm/tests/fixtures/streaming_output.jac | 2 +- .../tests/fixtures/webp_support_test.jac | 2 +- .../tests/fixtures/with_llm_function.jac | 2 +- jac-byllm/tests/fixtures/with_llm_image.jac | 2 +- jac-byllm/tests/fixtures/with_llm_lower.jac | 2 +- jac-byllm/tests/fixtures/with_llm_method.jac | 2 +- jac-byllm/tests/fixtures/with_llm_type.jac | 2 +- jac-byllm/tests/fixtures/with_llm_video.jac | 2 +- jac-byllm/tests/test_schema.jac | 2 +- jac/examples/reference/semstrings.jac | 2 +- jac/examples/reference/semstrings.py | 2 +- .../jac_impl_6/utils/level_manager.jac | 2 +- jac/examples/rpg_game/lib_mode/map.py | 2 +- 59 files changed, 102 insertions(+), 116 deletions(-) delete mode 100644 jac-byllm/byllm/lib.py diff --git a/docs/docs/communityhub/release_notes.md b/docs/docs/communityhub/release_notes.md index 00c1b12ea8..0bebcb5371 100644 --- a/docs/docs/communityhub/release_notes.md +++ b/docs/docs/communityhub/release_notes.md @@ -5,9 +5,6 @@ This document provides a summary of new features, improvements, and bug fixes in ## jaclang 0.8.10 / jac-cloud 0.2.10 / byllm 0.4.5 (Unreleased) -- **byLLM Lazy Loading**: Refactored byLLM to support lazy loading by moving all exports to `byllm.lib` module. Users should now import from `byllm.lib` in Python (e.g., `from byllm.lib import Model, by`) and use `import from byllm.lib { Model }` in Jac code. This improves startup performance and reduces unnecessary module loading. -- **NonGPT Fallback for byLLM**: Implemented automatic fallback when byLLM is not installed. When code attempts to import `byllm`, the system will provide mock implementations that return random using the `NonGPT.random_value_for_type()` utility. - ## jaclang 0.8.9 / jac-cloud 0.2.9 / byllm 0.4.4 (Latest Release) - **Typed Context Blocks (OSP)**: Fully implemented typed context blocks (`-> NodeType { }` and `-> WalkerType { }`) for Object-Spatial Programming, enabling conditional code execution based on runtime types. diff --git a/docs/docs/jac_book/chapter_5.md b/docs/docs/jac_book/chapter_5.md index 3c1d6583f9..1a6351dfb0 100644 --- a/docs/docs/jac_book/chapter_5.md +++ b/docs/docs/jac_book/chapter_5.md @@ -82,7 +82,7 @@ pip install byllm Next we replace the OpenAI import with that of the byLLM package ```jac -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4.1-mini"); ```
@@ -94,7 +94,7 @@ def write_poetry(topic: str) -> str by llm(); Finally, lets put it all together and run the Jac code: ```jac # mt_poem.jac - Simple AI integration -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4.1-mini"); @@ -137,7 +137,7 @@ Next we'll make use of MLTLLM's `Image` function to handle image inputs. This fu ```jac # image_captioning.jac - Simple Image Captioning Tool -import from byllm.lib { Model, Image } +import from byllm { Model, Image } glob llm = Model(model_name="gpt-4o-mini"); @@ -169,7 +169,7 @@ byLLM supports various AI models through the unified `Model` interface. For exam ```jac # basic_setup.jac -import from byllm.lib { Model, Image } +import from byllm { Model, Image } # Configure different models glob text_model = Model(model_name="gpt-4o"); @@ -182,7 +182,7 @@ glob gemini_model = Model(model_name="gemini-2.0-flash"); The `Model` class allows you to configure various parameters for your AI model, such as temperature, max tokens, and more. Here's an example of how to set up a model with custom parameters: ```jac -import from byllm.lib { Model, Image } +import from byllm { Model, Image } # Configure model with custom parameters glob creative_model = Model( @@ -210,7 +210,7 @@ Below is a breakdown of the parameters you can configure when creating a `Model` Here we have a simple example of how to use the `Model` class to create a model instance with custom parameters: ```jac # model_config.jac -import from byllm.lib { Model, Image } +import from byllm { Model, Image } # Configure model with custom parameters glob creative_model = Model( @@ -252,7 +252,7 @@ Let's progressively build an image captioning tool that demonstrates byLLM's cap ```jac # image_captioner.jac -import from byllm.lib { Model, Image } +import from byllm { Model, Image } glob vision_llm = Model(model_name="gpt-4o-mini"); @@ -307,7 +307,7 @@ the stylish outfit of the dog contribute to a fun and lighthearted atmosphere. ```jac # enhanced_captioner.jac -import from byllm.lib { Model, Image } +import from byllm { Model, Image } glob vision_llm = Model(model_name="gpt-4.1-mini"); @@ -363,7 +363,7 @@ AI applications require robust error handling and testing strategies. ```jac # robust_ai.jac -import from byllm.lib { Model, Image } +import from byllm { Model, Image } glob reliable_llm = Model(model_name="gpt-4o", max_tries=3); diff --git a/docs/docs/jac_book/chapter_9.md b/docs/docs/jac_book/chapter_9.md index 11ff1090f1..17ce7b6983 100644 --- a/docs/docs/jac_book/chapter_9.md +++ b/docs/docs/jac_book/chapter_9.md @@ -197,7 +197,7 @@ Using the byLLM plugin, we can define a function that sends the agent's state to Here’s the code for our mood function: ```jac -import from byllm.lib { Model } +import from byllm { Model } # Configure the LLM glob npc_model = Model(model_name="gpt-4.1-mini"); @@ -242,7 +242,7 @@ The `NPCWalker` first inherits the behavior of `StateAgent` (which collects cont Finally, we can compose everything in a single entry point: ```jac -import from byllm.lib { Model } +import from byllm { Model } # Configure different models glob npc_model = Model(model_name="gpt-4.1-mini"); diff --git a/docs/docs/learn/beginners_guide_to_jac.md b/docs/docs/learn/beginners_guide_to_jac.md index a3469e6491..e44d441f83 100644 --- a/docs/docs/learn/beginners_guide_to_jac.md +++ b/docs/docs/learn/beginners_guide_to_jac.md @@ -2678,7 +2678,7 @@ Now that you have the foundation, here are advanced Jac features to explore:
```jac - import from byllm.lib { Model } + import from byllm { Model } glob llm = Model(model="gpt-4"); diff --git a/docs/docs/learn/examples/agentic_ai/aider-genius-lite/genius_lite.jac b/docs/docs/learn/examples/agentic_ai/aider-genius-lite/genius_lite.jac index 9f5a6ed94e..21a5de7736 100644 --- a/docs/docs/learn/examples/agentic_ai/aider-genius-lite/genius_lite.jac +++ b/docs/docs/learn/examples/agentic_ai/aider-genius-lite/genius_lite.jac @@ -1,5 +1,5 @@ -import from byllm.lib { Model } +import from byllm { Model } import from pathlib { Path } glob llm = Model(model_name="gpt-4o-mini"); diff --git a/docs/docs/learn/examples/littleX/src/byllm_example.jac b/docs/docs/learn/examples/littleX/src/byllm_example.jac index b95c5675e1..6686c0b07e 100644 --- a/docs/docs/learn/examples/littleX/src/byllm_example.jac +++ b/docs/docs/learn/examples/littleX/src/byllm_example.jac @@ -1,4 +1,4 @@ -import from byllm.lib {Model} +import from byllm {Model} glob llm = Model(host="http://127.0.0.1:11434", model_name="ollama/llama3.2:1b"); diff --git a/docs/docs/learn/examples/littleX/src/example_game.jac b/docs/docs/learn/examples/littleX/src/example_game.jac index 455d446a07..c4bd40a54e 100644 --- a/docs/docs/learn/examples/littleX/src/example_game.jac +++ b/docs/docs/learn/examples/littleX/src/example_game.jac @@ -1,4 +1,4 @@ -import from byllm.lib {Model} +import from byllm {Model} glob llm = Model(model_name="gpt-4o"); diff --git a/docs/docs/learn/examples/mtp_examples/fantasy_trading_game.md b/docs/docs/learn/examples/mtp_examples/fantasy_trading_game.md index 7011843b10..0319b9734c 100644 --- a/docs/docs/learn/examples/mtp_examples/fantasy_trading_game.md +++ b/docs/docs/learn/examples/mtp_examples/fantasy_trading_game.md @@ -59,7 +59,7 @@ obj Chat { Configure the LLM for AI operations: ```jac -import from byllm.lib {Model} +import from byllm {Model} glob llm = Model(model_name="gpt-4o"); ``` diff --git a/docs/docs/learn/examples/mtp_examples/rpg_game.md b/docs/docs/learn/examples/mtp_examples/rpg_game.md index 99bcfc50f9..43471155fe 100644 --- a/docs/docs/learn/examples/mtp_examples/rpg_game.md +++ b/docs/docs/learn/examples/mtp_examples/rpg_game.md @@ -114,7 +114,7 @@ We’ll connect to an LLM (GPT-4o here) and define AI-powered methods for genera At the top of `level_manager.jac`, import the model: ```jac -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4o", verbose=True); ``` diff --git a/docs/docs/learn/jac-byllm/litellm_proxy.md b/docs/docs/learn/jac-byllm/litellm_proxy.md index 918a1d2020..da06c81946 100644 --- a/docs/docs/learn/jac-byllm/litellm_proxy.md +++ b/docs/docs/learn/jac-byllm/litellm_proxy.md @@ -11,7 +11,7 @@ Reference: [https://docs.litellm.ai/docs/proxy/deploy](https://docs.litellm.ai/d Once The proxy server is setted up and running, you can connect to it by simply passing the URL of the proxy server to the byLLM model with the parameter `proxy_url`: ```python -from byllm.lib import Model +from byllm import Model llm = Model( model_name="gpt-4o", # The model name to be used diff --git a/docs/docs/learn/jac-byllm/multimodality.md b/docs/docs/learn/jac-byllm/multimodality.md index 59d2b551a6..a3392e81a5 100644 --- a/docs/docs/learn/jac-byllm/multimodality.md +++ b/docs/docs/learn/jac-byllm/multimodality.md @@ -13,7 +13,7 @@ pip install byllm[video] byLLM supports image inputs through the `Image` format. Images can be provided as input to byLLM functions or methods: ```jac -import from byllm.lib { Model, Image } +import from byllm { Model, Image } glob llm = Model(model_name="gpt-4o"); @@ -57,7 +57,7 @@ In this example, an image of a person is provided as input to the `get_person_in byLLM supports video inputs through the `Video` format. Videos can be provided as input to byLLM functions or methods: ```jac -import from byllm.lib { Model, Video } +import from byllm { Model, Video } glob llm = Model(model_name="gpt-4o"); diff --git a/docs/docs/learn/jac-byllm/python_integration.md b/docs/docs/learn/jac-byllm/python_integration.md index 98b96f5f0b..6259c57faf 100644 --- a/docs/docs/learn/jac-byllm/python_integration.md +++ b/docs/docs/learn/jac-byllm/python_integration.md @@ -15,7 +15,7 @@ byLLM functionality is accessed by importing the `byllm` module and using the `b ```python linenums="1" import jaclang from dataclasses import dataclass -from byllm.lib import Model, Image, by +from byllm import Model, Image, by llm = Model(model_name="gpt-4o") @@ -43,7 +43,7 @@ print(f"Name: {person.full_name}, Description: {person.description}, Year of Bir In Jaclang, hyper-parameters are set by passing them to the LLM model: ```jac linenums="1" -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4o") @@ -56,7 +56,7 @@ In Python, hyper-parameters are passed as follows: ```python linenums="1" import jaclang -from byllm.lib import Model, by +from byllm import Model, by llm = Model(model_name="gpt-4o") @@ -70,7 +70,7 @@ Python functions can be used as tools in byLLM. Functions defined in Python are ```python linenums="1" import jaclang -from byllm.lib import Model +from byllm import Model llm = Model(model_name="gpt-4o") @@ -98,7 +98,6 @@ Using `sem` functionality in python is a bit diferent as the attachment is done ```python from jaclang import JacMachineInterface as Jac -from byllm.lib import Model, by @Jac.sem('', { 'name' : '', diff --git a/docs/docs/learn/jac-byllm/quickstart.md b/docs/docs/learn/jac-byllm/quickstart.md index 8367504ce6..df695be89a 100644 --- a/docs/docs/learn/jac-byllm/quickstart.md +++ b/docs/docs/learn/jac-byllm/quickstart.md @@ -44,7 +44,7 @@ The `by` keyword abstraction enables functions to process inputs of any type and #### Step 1: Configure LLM Model ```jac linenums="1" -import from byllm.lib {Model} +import from byllm {Model} glob llm = Model(model_name="gemini/gemini-2.0-flash"); ``` @@ -77,7 +77,7 @@ As byLLM is a python package, it can be natively used in jac. The following code ```python linenums="1" import jaclang -from byllm.lib import Model, by +from byllm import Model, by from enum import Enum llm = Model(model_name="gemini/gemini-2.0-flash") diff --git a/docs/docs/learn/jac-byllm/usage.md b/docs/docs/learn/jac-byllm/usage.md index 4a3131b1d2..8b68ee6fea 100644 --- a/docs/docs/learn/jac-byllm/usage.md +++ b/docs/docs/learn/jac-byllm/usage.md @@ -8,31 +8,31 @@ byLLM uses [LiteLLM](https://docs.litellm.ai/docs) to provide integration with a === "OpenAI" ```jac linenums="1" - import from byllm.lib {Model} + import from byllm {Model} glob llm = Model(model_name = "gpt-4o") ``` === "Gemini" ```jac linenums="1" - import from byllm.lib {Model} + import from byllm {Model} glob llm = Model(model_name = "gemini/gemini-2.0-flash") ``` === "Anthropic" ```jac linenums="1" - import from byllm.lib {Model} + import from byllm {Model} glob llm = Model(model_name = "claude-3-5-sonnet-20240620") ``` === "Ollama" ```jac linenums="1" - import from byllm.lib {Model} + import from byllm {Model} glob llm = Model(model_name = "ollama/llama3:70b") ``` === "HuggingFace Models" ```jac linenums="1" - import from byllm.lib {Model} + import from byllm {Model} glob llm = Model(model_name = "huggingface/meta-llama/Llama-3.3-70B-Instruct") ``` @@ -270,7 +270,7 @@ In this example: The ReAct (Reasoning and Acting) method enables agentic behavior by allowing functions to reason about problems and use external tools. Functions can be made agentic by adding the `by llm(tools=[...])` declaration. ```jac linenums="1" -import from byllm.lib { Model } +import from byllm { Model } import from datetime { datetime } glob llm = Model(model_name="gpt-4o"); @@ -304,7 +304,7 @@ The streaming feature enables real-time token reception from LLM functions, usef Set `stream=True` in the invoke parameters to enable streaming: ```jac linenums="1" -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4o-mini"); diff --git a/docs/docs/learn/jac-byllm/with_llm.md b/docs/docs/learn/jac-byllm/with_llm.md index 09dce5bb0d..15f73bcbc4 100644 --- a/docs/docs/learn/jac-byllm/with_llm.md +++ b/docs/docs/learn/jac-byllm/with_llm.md @@ -43,7 +43,7 @@ pip install byllm Consider building an application that translates english to other languages using an LLM. This can be simply built as follows: === "Jac" ```jac linenums="1" - import from byllm.lib { Model } + import from byllm { Model } glob llm = Model(model_name="gpt-4o"); @@ -56,7 +56,7 @@ Consider building an application that translates english to other languages usin ``` === "python" ```python linenums="1" - from byllm.lib import Model, by + from byllm import Model, by llm = Model(model_name="gpt-4o") @@ -75,7 +75,7 @@ Consider a program that detects the personality type of a historical figure from === "Jac" ```jac linenums="1" - import from byllm.lib { Model } + import from byllm { Model } glob llm = Model(model_name="gemini/gemini-2.0-flash"); enum Personality { @@ -94,7 +94,7 @@ Consider a program that detects the personality type of a historical figure from ``` === "Python" ```python linenums="1" - from byllm.lib import Model, by + from byllm import Model, by from enum import Enum llm = Model(model_name="gemini/gemini-2.0-flash") @@ -119,7 +119,7 @@ Even if we are elimination prompt engineering entierly, we allow specific ways t === "Jac" ```jac linenums="1" - import from byllm.lib { Model } + import from byllm { Model } glob llm = Model(model_name="gemini/gemini-2.0-flash"); """Represents the personal record of a person""" @@ -140,7 +140,7 @@ Even if we are elimination prompt engineering entierly, we allow specific ways t ```python linenums="1" from jaclang import JacMachineInterface as Jac from dataclasses import dataclass - from byllm.lib import Model, by + from byllm import Model, by llm = Model(model_name="gemini/gemini-2.0-flash") @Jac.sem('', { 'name': 'Full name of the person', diff --git a/docs/docs/learn/tour.md b/docs/docs/learn/tour.md index bc4080de8d..d1df0353a6 100644 --- a/docs/docs/learn/tour.md +++ b/docs/docs/learn/tour.md @@ -35,7 +35,7 @@ This snippet natively imports Python packages `math` and `random` and runs ident Jac provides novel constructs for integrating LLMs into code. A function body can simply be replaced with a call to an LLM, removing the need for prompt engineering or extensive use of new libraries. ```jac -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4o"); enum Personality { diff --git a/jac-byllm/README.md b/jac-byllm/README.md index 84ac193582..c2f05c63b2 100644 --- a/jac-byllm/README.md +++ b/jac-byllm/README.md @@ -26,7 +26,7 @@ pip install byllm Consider building an application that translates english to other languages using an LLM. This can be simply built as follows: ```python -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4o"); @@ -45,7 +45,7 @@ This simple piece of code replaces traditional prompt engineering without introd Consider a program that detects the personality type of a historical figure from their name. This can eb built in a way that LLM picks from an enum and the output strictly adhere this type. ```python -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gemini/gemini-2.0-flash"); enum Personality { diff --git a/jac-byllm/byllm/__init__.py b/jac-byllm/byllm/__init__.py index e51af1fb4f..a9e79b5b70 100644 --- a/jac-byllm/byllm/__init__.py +++ b/jac-byllm/byllm/__init__.py @@ -1 +1,10 @@ -"""byLLM Package - Lazy Loading.""" +"""byLLM Package.""" + +from byllm.llm import Model +from byllm.mtir import MTIR +from byllm.plugin import JacMachine +from byllm.types import Image, MockToolCall, Video + +by = JacMachine.by + +__all__ = ["by", "Image", "MockToolCall", "Model", "MTIR", "Video"] diff --git a/jac-byllm/byllm/lib.py b/jac-byllm/byllm/lib.py deleted file mode 100644 index a9e79b5b70..0000000000 --- a/jac-byllm/byllm/lib.py +++ /dev/null @@ -1,10 +0,0 @@ -"""byLLM Package.""" - -from byllm.llm import Model -from byllm.mtir import MTIR -from byllm.plugin import JacMachine -from byllm.types import Image, MockToolCall, Video - -by = JacMachine.by - -__all__ = ["by", "Image", "MockToolCall", "Model", "MTIR", "Video"] diff --git a/jac-byllm/byllm/plugin.py b/jac-byllm/byllm/plugin.py index bd9fb02733..7023a4a091 100644 --- a/jac-byllm/byllm/plugin.py +++ b/jac-byllm/byllm/plugin.py @@ -1,16 +1,12 @@ """Plugin for Jac's with_llm feature.""" -from __future__ import annotations +from typing import Callable - -from typing import Callable, TYPE_CHECKING +from byllm.llm import Model +from byllm.mtir import MTIR from jaclang.runtimelib.machine import hookimpl -if TYPE_CHECKING: - from byllm.llm import Model - from byllm.mtir import MTIR - class JacMachine: """Jac's with_llm feature.""" @@ -19,8 +15,6 @@ class JacMachine: @hookimpl def get_mtir(caller: Callable, args: dict, call_params: dict) -> object: """Call JacLLM and return the result.""" - from byllm.mtir import MTIR - return MTIR.factory(caller, args, call_params) @staticmethod @@ -36,8 +30,6 @@ def by(model: Model) -> Callable: def _decorator(caller: Callable) -> Callable: def _wrapped_caller(*args: object, **kwargs: object) -> object: - from byllm.mtir import MTIR - invoke_args: dict[int | str, object] = {} for i, arg in enumerate(args): invoke_args[i] = arg diff --git a/jac-byllm/examples/agentic_ai/aider-genius-lite/genius_lite.jac b/jac-byllm/examples/agentic_ai/aider-genius-lite/genius_lite.jac index ec03143dce..7ccd8ef527 100644 --- a/jac-byllm/examples/agentic_ai/aider-genius-lite/genius_lite.jac +++ b/jac-byllm/examples/agentic_ai/aider-genius-lite/genius_lite.jac @@ -1,5 +1,5 @@ -import from byllm.lib { Model } +import from byllm { Model } import from pathlib { Path } glob llm = Model(model_name="gpt-4o-mini"); diff --git a/jac-byllm/examples/core_examples/level_genarator.jac b/jac-byllm/examples/core_examples/level_genarator.jac index f4dd292ab4..d0be44bd97 100644 --- a/jac-byllm/examples/core_examples/level_genarator.jac +++ b/jac-byllm/examples/core_examples/level_genarator.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4o"); diff --git a/jac-byllm/examples/core_examples/personality_finder.jac b/jac-byllm/examples/core_examples/personality_finder.jac index c081dffdb1..3fc91eee1f 100644 --- a/jac-byllm/examples/core_examples/personality_finder.jac +++ b/jac-byllm/examples/core_examples/personality_finder.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(verbose=True, model_name="gpt-4o-mini"); diff --git a/jac-byllm/examples/microbenchmarks/essay_review.jac b/jac-byllm/examples/microbenchmarks/essay_review.jac index b88cc73b74..4e3cfffcff 100644 --- a/jac-byllm/examples/microbenchmarks/essay_review.jac +++ b/jac-byllm/examples/microbenchmarks/essay_review.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4o"); diff --git a/jac-byllm/examples/microbenchmarks/expert_answer.jac b/jac-byllm/examples/microbenchmarks/expert_answer.jac index 72ead16e10..3ab76b108c 100644 --- a/jac-byllm/examples/microbenchmarks/expert_answer.jac +++ b/jac-byllm/examples/microbenchmarks/expert_answer.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(); diff --git a/jac-byllm/examples/microbenchmarks/grammar_checker.jac b/jac-byllm/examples/microbenchmarks/grammar_checker.jac index 39efc9bad0..8966df91ee 100644 --- a/jac-byllm/examples/microbenchmarks/grammar_checker.jac +++ b/jac-byllm/examples/microbenchmarks/grammar_checker.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(); diff --git a/jac-byllm/examples/microbenchmarks/joke_gen.jac b/jac-byllm/examples/microbenchmarks/joke_gen.jac index 88df053429..69494a7623 100644 --- a/jac-byllm/examples/microbenchmarks/joke_gen.jac +++ b/jac-byllm/examples/microbenchmarks/joke_gen.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model( # model_name="gpt-4o", diff --git a/jac-byllm/examples/microbenchmarks/odd_word_out.jac b/jac-byllm/examples/microbenchmarks/odd_word_out.jac index cf2cdcbc3c..d19bc823b4 100644 --- a/jac-byllm/examples/microbenchmarks/odd_word_out.jac +++ b/jac-byllm/examples/microbenchmarks/odd_word_out.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4o"); diff --git a/jac-byllm/examples/microbenchmarks/text_to_type.jac b/jac-byllm/examples/microbenchmarks/text_to_type.jac index 6ecbd8ac46..f835d1a3b0 100644 --- a/jac-byllm/examples/microbenchmarks/text_to_type.jac +++ b/jac-byllm/examples/microbenchmarks/text_to_type.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } obj Employer { diff --git a/jac-byllm/examples/microbenchmarks/translator.jac b/jac-byllm/examples/microbenchmarks/translator.jac index 015a6c795c..977b737828 100644 --- a/jac-byllm/examples/microbenchmarks/translator.jac +++ b/jac-byllm/examples/microbenchmarks/translator.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4o-mini", verbose=True); diff --git a/jac-byllm/examples/tool_calling/debate_agent.jac b/jac-byllm/examples/tool_calling/debate_agent.jac index 0293f13a58..6d2b0026af 100644 --- a/jac-byllm/examples/tool_calling/debate_agent.jac +++ b/jac-byllm/examples/tool_calling/debate_agent.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } import wikipedia; glob llm = Model(model_name="gpt-4o-mini"); diff --git a/jac-byllm/examples/tool_calling/fantasy_trading_game.jac b/jac-byllm/examples/tool_calling/fantasy_trading_game.jac index e052ea4a19..b855bc65df 100644 --- a/jac-byllm/examples/tool_calling/fantasy_trading_game.jac +++ b/jac-byllm/examples/tool_calling/fantasy_trading_game.jac @@ -3,7 +3,7 @@ Fantasy Trading Game - An interactive RPG trading simulation using byLLM Demonstrates byLLM character generation, conversation, and transaction systems """ -import from byllm.lib { Model } +import from byllm { Model } import from os { get_terminal_size } diff --git a/jac-byllm/examples/tool_calling/marketing_agency.jac b/jac-byllm/examples/tool_calling/marketing_agency.jac index d49b260ca2..137c7b705d 100644 --- a/jac-byllm/examples/tool_calling/marketing_agency.jac +++ b/jac-byllm/examples/tool_calling/marketing_agency.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } # Note that these tools are not available in byllm package and # Should be defined by the user him/herself. diff --git a/jac-byllm/examples/tool_calling/wikipedia_react.jac b/jac-byllm/examples/tool_calling/wikipedia_react.jac index 095b4387e1..2da6ebc26e 100644 --- a/jac-byllm/examples/tool_calling/wikipedia_react.jac +++ b/jac-byllm/examples/tool_calling/wikipedia_react.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } import wikipedia; glob llm = Model(verbose=True, model_name="gpt-4o-mini"); diff --git a/jac-byllm/examples/vision/math_solver.jac b/jac-byllm/examples/vision/math_solver.jac index fd53234a9b..7424d69bec 100644 --- a/jac-byllm/examples/vision/math_solver.jac +++ b/jac-byllm/examples/vision/math_solver.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model, Image } +import from byllm { Model, Image } glob llm = Model(verbose=True, model_name="gpt-4o"); diff --git a/jac-byllm/examples/vision/mugen.jac b/jac-byllm/examples/vision/mugen.jac index b5d2bc960e..df95b41a77 100644 --- a/jac-byllm/examples/vision/mugen.jac +++ b/jac-byllm/examples/vision/mugen.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model, Video } +import from byllm { Model, Video } glob llm = Model(model_name="gpt-4o"); diff --git a/jac-byllm/examples/vision/personality_finder.jac b/jac-byllm/examples/vision/personality_finder.jac index 4dbaa73242..a802918186 100644 --- a/jac-byllm/examples/vision/personality_finder.jac +++ b/jac-byllm/examples/vision/personality_finder.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model, Image } +import from byllm { Model, Image } glob llm = Model(model_name="gpt-4o"); diff --git a/jac-byllm/examples/vision/receipt_analyzer.jac b/jac-byllm/examples/vision/receipt_analyzer.jac index e66a520910..26de10697e 100644 --- a/jac-byllm/examples/vision/receipt_analyzer.jac +++ b/jac-byllm/examples/vision/receipt_analyzer.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model, Image } +import from byllm { Model, Image } glob llm = Model(model_name="gpt-4o"); diff --git a/jac-byllm/tests/fixtures/by_expr.jac b/jac-byllm/tests/fixtures/by_expr.jac index 1d710996cb..04b32929ef 100644 --- a/jac-byllm/tests/fixtures/by_expr.jac +++ b/jac-byllm/tests/fixtures/by_expr.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model, MockToolCall } +import from byllm { Model, MockToolCall } """Run Python code and return the output, if any error exists, return the error message.""" diff --git a/jac-byllm/tests/fixtures/enum_no_value.jac b/jac-byllm/tests/fixtures/enum_no_value.jac index 3b0a9a6311..b71cfe7183 100644 --- a/jac-byllm/tests/fixtures/enum_no_value.jac +++ b/jac-byllm/tests/fixtures/enum_no_value.jac @@ -1,4 +1,4 @@ -import from byllm.lib {Model} +import from byllm {Model} enum Tell { YES, NO diff --git a/jac-byllm/tests/fixtures/image_test.jac b/jac-byllm/tests/fixtures/image_test.jac index 8f96a50d0d..904c3a28d5 100644 --- a/jac-byllm/tests/fixtures/image_test.jac +++ b/jac-byllm/tests/fixtures/image_test.jac @@ -1,5 +1,5 @@ -import from byllm.lib { Model, Image } +import from byllm { Model, Image } import os; glob llm = Model( diff --git a/jac-byllm/tests/fixtures/llm_mail_summerize.jac b/jac-byllm/tests/fixtures/llm_mail_summerize.jac index d97f3102c2..3d14b5eb5e 100644 --- a/jac-byllm/tests/fixtures/llm_mail_summerize.jac +++ b/jac-byllm/tests/fixtures/llm_mail_summerize.jac @@ -1,5 +1,5 @@ -import from byllm.lib { Model } +import from byllm { Model } import from os { path } diff --git a/jac-byllm/tests/fixtures/llm_semstrings.jac b/jac-byllm/tests/fixtures/llm_semstrings.jac index e39486af4c..46b736f1ec 100644 --- a/jac-byllm/tests/fixtures/llm_semstrings.jac +++ b/jac-byllm/tests/fixtures/llm_semstrings.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model( model_name="mockllm", outputs=["120597", "R8@jL3pQ"], diff --git a/jac-byllm/tests/fixtures/method_incl_ctx.jac b/jac-byllm/tests/fixtures/method_incl_ctx.jac index 4ea83c190b..2e408fe6ea 100644 --- a/jac-byllm/tests/fixtures/method_incl_ctx.jac +++ b/jac-byllm/tests/fixtures/method_incl_ctx.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model( # model_name="gpt-4o-mini", diff --git a/jac-byllm/tests/fixtures/method_tool.jac b/jac-byllm/tests/fixtures/method_tool.jac index 1aa171136c..1b7d31bbb4 100644 --- a/jac-byllm/tests/fixtures/method_tool.jac +++ b/jac-byllm/tests/fixtures/method_tool.jac @@ -1,5 +1,5 @@ -import from byllm.lib { Model, MockToolCall } +import from byllm { Model, MockToolCall } node Calculator { def add(num1: int, num2: int) -> int { diff --git a/jac-byllm/tests/fixtures/python_lib_mode.py b/jac-byllm/tests/fixtures/python_lib_mode.py index f321cfa9fc..cb1b55278d 100644 --- a/jac-byllm/tests/fixtures/python_lib_mode.py +++ b/jac-byllm/tests/fixtures/python_lib_mode.py @@ -1,8 +1,8 @@ + from os import path from dataclasses import dataclass -from byllm.lib import by, Model, Image - +from byllm import by, Model, Image @dataclass class Person: @@ -10,22 +10,21 @@ class Person: birth_year: int description: str - llm = Model( # model_name="gpt-4o", model_name="mockllm", outputs=[ Person( - name="Alan Turing", + name='Alan Turing', birth_year=1912, description=( - "A pioneering mathematician and computer scientist, known for " - "his work in developing the concept of a Turing machine and " - "for his crucial role in breaking the Enigma code during World " - "War II." - ), + 'A pioneering mathematician and computer scientist, known for ' + 'his work in developing the concept of a Turing machine and ' + 'for his crucial role in breaking the Enigma code during World ' + 'War II.' + ) ) - ], + ] ) diff --git a/jac-byllm/tests/fixtures/streaming_output.jac b/jac-byllm/tests/fixtures/streaming_output.jac index cf05207b2d..c45fa55fd9 100644 --- a/jac-byllm/tests/fixtures/streaming_output.jac +++ b/jac-byllm/tests/fixtures/streaming_output.jac @@ -1,5 +1,5 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model( model_name="mockllm", diff --git a/jac-byllm/tests/fixtures/webp_support_test.jac b/jac-byllm/tests/fixtures/webp_support_test.jac index ed046bacce..69b18e56b5 100644 --- a/jac-byllm/tests/fixtures/webp_support_test.jac +++ b/jac-byllm/tests/fixtures/webp_support_test.jac @@ -1,4 +1,4 @@ -import from byllm.lib {Model, Image} +import from byllm {Model, Image} import os; 'Personality of the Person' diff --git a/jac-byllm/tests/fixtures/with_llm_function.jac b/jac-byllm/tests/fixtures/with_llm_function.jac index 26092c4b60..243ef22dff 100644 --- a/jac-byllm/tests/fixtures/with_llm_function.jac +++ b/jac-byllm/tests/fixtures/with_llm_function.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model( model_name="mockllm", diff --git a/jac-byllm/tests/fixtures/with_llm_image.jac b/jac-byllm/tests/fixtures/with_llm_image.jac index b2e6f48522..d740972e3e 100644 --- a/jac-byllm/tests/fixtures/with_llm_image.jac +++ b/jac-byllm/tests/fixtures/with_llm_image.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model, Image } +import from byllm { Model, Image } import os; glob llm = Model( diff --git a/jac-byllm/tests/fixtures/with_llm_lower.jac b/jac-byllm/tests/fixtures/with_llm_lower.jac index 7bb5205783..f8a6a33779 100644 --- a/jac-byllm/tests/fixtures/with_llm_lower.jac +++ b/jac-byllm/tests/fixtures/with_llm_lower.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } enum Personality { INTROVERT = "Introvert", diff --git a/jac-byllm/tests/fixtures/with_llm_method.jac b/jac-byllm/tests/fixtures/with_llm_method.jac index e17604bb14..363a630194 100644 --- a/jac-byllm/tests/fixtures/with_llm_method.jac +++ b/jac-byllm/tests/fixtures/with_llm_method.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } class PersonalityIndex { has index: int; diff --git a/jac-byllm/tests/fixtures/with_llm_type.jac b/jac-byllm/tests/fixtures/with_llm_type.jac index 373c437212..133be559b6 100644 --- a/jac-byllm/tests/fixtures/with_llm_type.jac +++ b/jac-byllm/tests/fixtures/with_llm_type.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } obj Person { diff --git a/jac-byllm/tests/fixtures/with_llm_video.jac b/jac-byllm/tests/fixtures/with_llm_video.jac index bee8df50dc..1addf29ee3 100644 --- a/jac-byllm/tests/fixtures/with_llm_video.jac +++ b/jac-byllm/tests/fixtures/with_llm_video.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model, Video } +import from byllm { Model, Video } import os; diff --git a/jac-byllm/tests/test_schema.jac b/jac-byllm/tests/test_schema.jac index d9eca9aeef..513bdd04e4 100644 --- a/jac-byllm/tests/test_schema.jac +++ b/jac-byllm/tests/test_schema.jac @@ -13,7 +13,7 @@ To run a specific test: """ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model( model_name="gpt-4o-mini", diff --git a/jac/examples/reference/semstrings.jac b/jac/examples/reference/semstrings.jac index 7fdd6e732b..34f1f82a0d 100644 --- a/jac/examples/reference/semstrings.jac +++ b/jac/examples/reference/semstrings.jac @@ -1,6 +1,6 @@ """Semstrings: Semantic string definitions for LLM-guided functions.""" -import from byllm.lib { Model } +import from byllm { Model } let llm = Model(model_name="mockllm", outputs=["SecureP@ss1"]); diff --git a/jac/examples/reference/semstrings.py b/jac/examples/reference/semstrings.py index b949501f50..d86b957f16 100644 --- a/jac/examples/reference/semstrings.py +++ b/jac/examples/reference/semstrings.py @@ -1,7 +1,7 @@ """Semstrings: Semantic string definitions for LLM-guided functions.""" from __future__ import annotations from jaclang.lib import call_llm, get_mtir, sem -from byllm.lib import Model +from byllm import Model llm = Model(model_name='mockllm', outputs=['SecureP@ss1']) @sem('\nPassword is at least 8 characters, has one uppercase letter,\none lowercase letter, one digit, and one special character.\n', {}) diff --git a/jac/examples/rpg_game/jac_impl/jac_impl_6/utils/level_manager.jac b/jac/examples/rpg_game/jac_impl/jac_impl_6/utils/level_manager.jac index e9c78fc151..7394ddf6a6 100644 --- a/jac/examples/rpg_game/jac_impl/jac_impl_6/utils/level_manager.jac +++ b/jac/examples/rpg_game/jac_impl/jac_impl_6/utils/level_manager.jac @@ -1,4 +1,4 @@ -import from byllm.lib { Model } +import from byllm { Model } glob llm = Model(model_name="gpt-4o", verbose=True); diff --git a/jac/examples/rpg_game/lib_mode/map.py b/jac/examples/rpg_game/lib_mode/map.py index 2704ef00e3..a183cf958f 100644 --- a/jac/examples/rpg_game/lib_mode/map.py +++ b/jac/examples/rpg_game/lib_mode/map.py @@ -1,4 +1,4 @@ -from byllm.lib import Model, by +from byllm import Model, by from dataclasses import dataclass @dataclass From 164d58ba5195274657777c1be3ccd5a153506ad6 Mon Sep 17 00:00:00 2001 From: Thakee Nathees Date: Fri, 10 Oct 2025 12:45:49 +0800 Subject: [PATCH 2/2] LiteLLM Lazy loading implemented --- jac-byllm/byllm/llm.py | 3 ++- jac-byllm/byllm/llm_connector.py | 2 +- jac-byllm/byllm/mtir.py | 3 +-- jac-byllm/byllm/types.py | 15 +++++++++------ 4 files changed, 13 insertions(+), 10 deletions(-) diff --git a/jac-byllm/byllm/llm.py b/jac-byllm/byllm/llm.py index 32b91937f8..a3cadc9c05 100644 --- a/jac-byllm/byllm/llm.py +++ b/jac-byllm/byllm/llm.py @@ -17,7 +17,6 @@ # https://raw.githubusercontent.com/BerriAI/litellm/main/model_prices_and_context_window.json os.environ["LITELLM_LOCAL_MODEL_COST_MAP"] = "True" -from .llm_connector import LLMConnector from .types import CompletionResult SYSTEM_PERSONA = """\ @@ -48,6 +47,8 @@ def __init__(self, model_name: str, **kwargs: object) -> None: api_key: API key for the model provider **kwargs: Additional configuration options """ + from .llm_connector import LLMConnector + self.llm_connector = LLMConnector.for_model(model_name, **kwargs) def __call__(self, **kwargs: object) -> "Model": diff --git a/jac-byllm/byllm/llm_connector.py b/jac-byllm/byllm/llm_connector.py index c5b3c5e9e7..9988756d36 100644 --- a/jac-byllm/byllm/llm_connector.py +++ b/jac-byllm/byllm/llm_connector.py @@ -22,6 +22,7 @@ import litellm from litellm._logging import _disable_debugging +from litellm.types.utils import Message as LiteLLMMessage from openai import OpenAI @@ -29,7 +30,6 @@ from .types import ( CompletionResult, - LiteLLMMessage, MockToolCall, ToolCall, ) diff --git a/jac-byllm/byllm/mtir.py b/jac-byllm/byllm/mtir.py index 3880fd965b..102dad1ce4 100644 --- a/jac-byllm/byllm/mtir.py +++ b/jac-byllm/byllm/mtir.py @@ -8,7 +8,6 @@ from byllm.schema import json_to_instance, type_to_schema from byllm.types import ( - LiteLLMMessage, Media, Message, MessageRole, @@ -150,7 +149,7 @@ def add_message(self, message: MessageType) -> None: """Add a message to the request.""" self.messages.append(message) - def get_msg_list(self) -> list[dict[str, object] | LiteLLMMessage]: + def get_msg_list(self) -> "list[dict[str, object] | MessageType]": """Return the messages in a format suitable for LLM API.""" return [ msg.to_dict() if isinstance(msg, Message) else msg for msg in self.messages diff --git a/jac-byllm/byllm/types.py b/jac-byllm/byllm/types.py index 40dc9227bf..957bf631b7 100644 --- a/jac-byllm/byllm/types.py +++ b/jac-byllm/byllm/types.py @@ -12,19 +12,22 @@ from dataclasses import dataclass from enum import StrEnum from io import BytesIO -from typing import Callable, TypeAlias, get_type_hints +from typing import Callable, TYPE_CHECKING, TypeAlias, get_type_hints from PIL.Image import open as open_image -from litellm.types.utils import Message as LiteLLMMessage - from pydantic import TypeAdapter from .schema import tool_to_schema -# The message can be a jaclang defined message or what ever the llm -# returned object that was feed back to the llm as it was given (dict). -MessageType: TypeAlias = "Message | LiteLLMMessage" +if TYPE_CHECKING: + from litellm.types.utils import Message as LiteLLMMessage + + # The message can be a jaclang defined message or what ever the llm + # returned object that was feed back to the llm as it was given (dict). + MessageType: TypeAlias = "Message | LiteLLMMessage" +else: + MessageType: TypeAlias = "Message" class MessageRole(StrEnum):