-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.example
More file actions
77 lines (65 loc) · 3.08 KB
/
.env.example
File metadata and controls
77 lines (65 loc) · 3.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# Docker user permissions
USER_ID=1000
GROUP_ID=1000
# Generated by setup-github-actions-permissions.sh
# This ensures containers run with proper user permissions
OPENROUTER_API_KEY=your_api_key_here
GITHUB_REPOSITORY=AndrewAltimit/rust-psp
GITHUB_PROJECTS_TOKEN=your_api_key_here
GITHUB_TOKEN=your_api_key_here
# Codex Configuration -- DISABLED
# OpenAI Codex has been removed from this project due to OpenAI's partnerships
# enabling mass surveillance and autonomous weapons decision-making. See README.md.
# These variables are retained for reference only. Do not enable.
# CODEX_BYPASS_SANDBOX=false
# Optional: ElevenLabs Configuration
ELEVENLABS_API_KEY=your_api_key_here
ELEVENLABS_DEFAULT_MODEL=eleven_v3
# Virtual Character Storage Service
# Secure file exchange service for audio, animations, and other virtual character assets
# Supports cross-machine transfer (VM to host, remote servers, etc.)
# Generate secret with: python -c "import secrets; print(secrets.token_hex(32))"
STORAGE_SECRET_KEY=your_api_key_here
# Base URL for storage service (update for remote access)
STORAGE_BASE_URL=http://192.168.0.222:8021
# GPU device selection for multi-GPU systems (default: 0)
# Set to specific GPU index (0, 1, 2, etc.) or "all" for all GPUs
GPU_DEVICE=0
# Gemini API Key (Free Tier from Google AI Studio)
# Get your free API key at: https://aistudio.google.com/app/apikey
# This is required for automated PR reviews and local Gemini CLI usage
# Note: Gemini CLI prioritizes GEMINI_API_KEY; set both for compatibility
GOOGLE_API_KEY=your_api_key_here
GEMINI_API_KEY=your_api_key_here
# Gemini Model Configuration (optional - defaults configured in .agents.yaml)
# Available models: gemini-3-pro-preview, gemini-3-flash-preview
GEMINI_PRIMARY_MODEL=gemini-3-pro-preview
GEMINI_FALLBACK_MODEL=gemini-3-flash-preview
# =============================================================================
# AgentCore Memory Configuration
# =============================================================================
# Multi-provider memory system for AI agents.
# - agentcore: AWS Bedrock AgentCore (managed, rate-limited)
# - chromadb: Self-hosted ChromaDB (no limits, free)
# Provider selection: agentcore | chromadb
MEMORY_PROVIDER=agentcore
# --- AWS AgentCore Provider ---
# Required when MEMORY_PROVIDER=agentcore
# Memory ID is created via: python -m mcp_agentcore_memory.scripts.setup_memory
AWS_REGION=us-east-1
AGENTCORE_MEMORY_ID=mem-xxxxxxxxxxxx
# Optional: VPC PrivateLink endpoint (data plane only, leave empty for public endpoint)
AGENTCORE_DATA_PLANE_ENDPOINT=
# --- ChromaDB Provider (Self-Hosted) ---
# Required when MEMORY_PROVIDER=chromadb
# Start ChromaDB with: docker compose --profile memory-chromadb up -d
#
# IMPORTANT: Network configuration
# - For container-to-container (MCP server in Docker): use "chromadb" (Docker service name)
# - For host-to-container (local development/testing): use "localhost"
# The default uses the Docker service name for container-first architecture.
CHROMADB_HOST=chromadb
CHROMADB_PORT=8000
CHROMADB_COLLECTION=agent_memory
# Read only token for fetching models
HUGGINGFACE_TOKEN=