43 lines
No EOL
1.2 KiB
YAML
43 lines
No EOL
1.2 KiB
YAML
# LLM Proxy Configuration
|
|
# This file configures the LLM proxy server and its integrations
|
|
|
|
# Server configuration
|
|
server:
|
|
port: 3001
|
|
timeouts:
|
|
read: 10m # Read timeout (default: 10 minutes)
|
|
write: 10m # Write timeout (default: 10 minutes)
|
|
idle: 10m # Idle timeout (default: 10 minutes)
|
|
|
|
# Provider configurations
|
|
providers:
|
|
# Anthropic Claude configuration
|
|
anthropic:
|
|
base_url: "https://api.anthropic.com"
|
|
version: "2023-06-01"
|
|
max_retries: 3
|
|
|
|
# OpenAI configuration
|
|
openai:
|
|
# API key can be set here or via OPENAI_API_KEY environment variable
|
|
# api_key: "your-api-key-here"
|
|
# base_url: ""
|
|
|
|
# Storage configuration
|
|
storage:
|
|
# SQLite database path for storing request history
|
|
db_path: "requests.db"
|
|
|
|
# Subagent Configuration (Optional)
|
|
# Enable this feature if you want to route specific Claude Code agents to different LLM providers
|
|
# For subaget setup instructions, see: https://docs.anthropic.com/en/docs/claude-code/sub-agents
|
|
subagents:
|
|
# Enable subagent routing
|
|
enable: false
|
|
|
|
# Maps subagent types to specific models
|
|
mappings:
|
|
streaming-systems-engineer: "gpt-4o"
|
|
codebase-analyzer: "gpt-4o"
|
|
# Add more subagent mappings as needed
|
|
# example-agent: "gpt-4o" |