Skip to content

Commit de5a1b3

Browse files
committed
add batch cli process status endpoint
1 parent db4dafa commit de5a1b3

File tree

3 files changed

+60
-3
lines changed

3 files changed

+60
-3
lines changed

app/lib/endpoints/llama_cli_endpoints.py

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@
1010
LlamaCliChatRequest,
1111
BatchLlamaCliInitRequest,
1212
BatchLlamaCliRemoveRequest,
13-
BatchLlamaCliChatRequest
13+
BatchLlamaCliChatRequest,
14+
BatchLlamaCliStatusRequest
1415
)
1516

1617
# Import the process management functions for persistent sessions
@@ -200,3 +201,21 @@ async def process_request(req: LlamaCliChatRequest):
200201

201202
results = await asyncio.gather(*(process_request(req) for req in batch_request.requests))
202203
return results
204+
205+
async def handle_get_batch_llama_cli_status(batch_request: BatchLlamaCliStatusRequest) -> List[Dict[str, Any]]:
206+
"""
207+
Processes a batch request to get the status of multiple persistent llama-cli sessions.
208+
"""
209+
async def process_request(alias: str):
210+
try:
211+
# Reuse the single status handler logic
212+
result = await get_llama_cli_session_status(alias)
213+
return {"cli_alias": alias, "status": "success", "data": result}
214+
except HTTPException as e:
215+
return {"cli_alias": alias, "status": "error", "detail": e.detail, "status_code": e.status_code}
216+
except Exception as e:
217+
logger.error(f"Unexpected error processing batch status for alias {alias}: {str(e)}", exc_info=True)
218+
return {"cli_alias": alias, "status": "error", "detail": "An unexpected server error occurred.", "status_code": 500}
219+
220+
results = await asyncio.gather(*(process_request(alias) for alias in batch_request.aliases))
221+
return results

app/lib/models.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -154,5 +154,14 @@ def validate_aliases(cls, v):
154154
raise ValueError('An alias in the list contains invalid characters. Use only alphanumeric, hyphens, and underscores.')
155155
return v
156156

157+
class BatchLlamaCliStatusRequest(BaseModel):
158+
aliases: list[str] = Field(..., description="A list of llama-cli configuration aliases to check the status of.")
159+
160+
@validator('aliases', each_item=True)
161+
def validate_aliases(cls, v):
162+
if not is_safe_cli_alias(v):
163+
raise ValueError('An alias in the list contains invalid characters. Use only alphanumeric, hyphens, and underscores.')
164+
return v
165+
157166
class BatchLlamaCliChatRequest(BaseModel):
158167
requests: list[LlamaCliChatRequest] = Field(..., description="A list of chat requests to process.")

app/main.py

Lines changed: 31 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,8 @@
1212
LlamaCliChatRequest,
1313
BatchLlamaCliInitRequest,
1414
BatchLlamaCliRemoveRequest,
15-
BatchLlamaCliChatRequest
15+
BatchLlamaCliChatRequest,
16+
BatchLlamaCliStatusRequest
1617
)
1718

1819
from lib.endpoints.chat_endpoints import ChatRequest, MultiChatRequest
@@ -46,7 +47,8 @@
4647
chat_with_llama_cli_session,
4748
handle_initialize_batch_llama_cli_configs,
4849
handle_remove_batch_llama_cli_configs,
49-
handle_batch_chat_with_llama_cli
50+
handle_batch_chat_with_llama_cli,
51+
handle_get_batch_llama_cli_status
5052
)
5153

5254
# --- Logging Configuration ---
@@ -628,6 +630,33 @@ async def get_llama_cli_status_endpoint(cli_alias: str = Path(..., description="
628630
logger.error(f"Unexpected error in /llama-cli-status for alias {cli_alias}: {str(e)}", exc_info=True)
629631
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"An unexpected error occurred: {str(e)}")
630632

633+
@app.post(
634+
"/batch-llama-cli-status",
635+
summary="Get Status of Multiple Llama CLI Sessions",
636+
tags=["Llama CLI Management"],
637+
operation_id="get_batch_llama_cli_session_status"
638+
)
639+
async def batch_llama_cli_status(request: BatchLlamaCliStatusRequest):
640+
"""
641+
Retrieves the status of multiple `llama-cli` sessions in a single batch request.
642+
643+
**Request Body**:
644+
- `BatchLlamaCliStatusRequest`: A JSON object containing a list of `aliases` (strings)
645+
of the `llama-cli` sessions to check.
646+
647+
**Successful Response (200 OK)**:
648+
- A JSON list where each item corresponds to an alias in the request, detailing its
649+
status (running/stopped), PID (if running), and configuration.
650+
651+
**Error Responses**:
652+
- `500 Internal Server Error`: If an unexpected error occurs during the batch processing logic.
653+
"""
654+
try:
655+
return await handle_get_batch_llama_cli_status(request)
656+
except Exception as e:
657+
logger.error(f"Error in /batch-llama-cli-status endpoint: {str(e)}", exc_info=True)
658+
raise HTTPException(status_code=500, detail=f"An unexpected error occurred while fetching batch CLI status: {str(e)}")
659+
631660
@app.post(
632661
"/chat-llama-cli",
633662
summary="Chat with a Persistent Llama CLI Session",

0 commit comments

Comments
 (0)