diff --git a/backend/config.py b/backend/config.py index dd5caa3..89bc658 100644 --- a/backend/config.py +++ b/backend/config.py @@ -1,6 +1,8 @@ -from dotenv import load_dotenv, find_dotenv +import logging import os +from dotenv import load_dotenv, find_dotenv +logger = logging.getLogger(__name__) dotenv_path = find_dotenv(usecwd=True) if dotenv_path: @@ -12,5 +14,14 @@ GITHUB_TOKEN = os.getenv("GITHUB_TOKEN") or os.getenv("GH_TOKEN") MODEL_NAME = os.getenv("EMBEDDING_MODEL", "BAAI/bge-small-en-v1.5") -MAX_BATCH_SIZE = int(os.getenv("EMBEDDING_MAX_BATCH_SIZE", "32")) + +raw_batch_size = os.getenv("EMBEDDING_MAX_BATCH_SIZE", "32") +try: + MAX_BATCH_SIZE = int(raw_batch_size) + if MAX_BATCH_SIZE <= 0: + raise ValueError +except ValueError: + logger.warning("Invalid EMBEDDING_MAX_BATCH_SIZE '%s'. Defaulting to 32.", raw_batch_size) + MAX_BATCH_SIZE = 32 + EMBEDDING_DEVICE = os.getenv("EMBEDDING_DEVICE", "cpu")