This commit is contained in:
Seif Ghazi 2025-08-01 21:19:30 -04:00
parent 9cb513019d
commit 80a25f7ba7
No known key found for this signature in database
GPG key ID: 4519A4B1EEC1494E
17 changed files with 1566 additions and 28 deletions

View file

@ -1,4 +1,4 @@
.PHONY: all build run clean install dev .PHONY: all build run clean install dev test test-proxy test-coverage
# Default target # Default target
all: install build all: install build
@ -43,6 +43,19 @@ clean:
rm -f requests.db rm -f requests.db
rm -rf requests/ rm -rf requests/
# Testing
test: test-proxy
test-proxy:
@echo "🧪 Running proxy tests..."
cd proxy && go test -v ./...
test-coverage:
@echo "📊 Running tests with coverage..."
cd proxy && go test -v -coverprofile=coverage.out ./...
cd proxy && go tool cover -html=coverage.out -o coverage.html
@echo "📊 Coverage report generated: proxy/coverage.html"
# Database operations # Database operations
db-reset: db-reset:
@echo "🗑️ Resetting database..." @echo "🗑️ Resetting database..."

37
config.yaml Normal file
View file

@ -0,0 +1,37 @@
# LLM Proxy Configuration
# This file configures the LLM proxy server and its integrations
# Server configuration
server:
port: 3001
timeouts:
read: 10m # Read timeout (default: 10 minutes)
write: 10m # Write timeout (default: 10 minutes)
idle: 10m # Idle timeout (default: 10 minutes)
# Provider configurations
providers:
# Anthropic Claude configuration
anthropic:
base_url: "https://api.anthropic.com"
version: "2023-06-01"
max_retries: 3
# OpenAI configuration
openai:
# API key can be set here or via OPENAI_API_KEY environment variable
# api_key: "your-api-key-here"
base_url: "https://proxy-shopify-ai.local.shop.dev"
# Storage configuration
storage:
# SQLite database path for storing request history
db_path: "requests.db"
# Subagent mappings
# Maps subagent types to specific models
subagents:
mappings:
streaming-systems-engineer: "gpt-4o"
# Add more subagent mappings as needed
# example-agent: "gpt-4o"

87
config.yaml.example Normal file
View file

@ -0,0 +1,87 @@
# LLM Proxy Configuration Example
# This file demonstrates all available configuration options
# Copy this file to config.yaml and customize as needed
# Server configuration
server:
# Port to listen on (default: 3001)
port: 3001
# Timeout configurations
timeouts:
# Maximum duration for reading the entire request, including the body
read: 10m
# Maximum duration before timing out writes of the response
write: 10m
# Maximum amount of time to wait for the next request when keep-alives are enabled
idle: 10m
# Provider configurations
providers:
# Anthropic Claude configuration
anthropic:
# Base URL for Anthropic API (can be changed for custom endpoints)
base_url: "https://api.anthropic.com"
# API version to use
version: "2023-06-01"
# Maximum number of retries for failed requests
max_retries: 3
# OpenAI configuration
openai:
# API key for OpenAI
# Can also be set via OPENAI_API_KEY environment variable
# api_key: "sk-..."
# Base URL for OpenAI API (can be changed for custom endpoints)
# Can also be set via OPENAI_BASE_URL environment variable
# base_url: "https://api.openai.com"
# Storage configuration
storage:
# SQLite database path for storing request history
db_path: "requests.db"
# Directory for storing request files (if needed in future)
# requests_dir: "./requests"
# Subagent mappings
# Maps subagent types to specific models
subagents:
mappings:
# Code review specialist (example)
# code-reviewer: "gpt-4o"
# Data analysis expert (example)
# data-analyst: "claude-3-5-sonnet-20241022"
# Documentation writer (example)
# doc-writer: "gpt-3.5-turbo"
# Environment variable overrides:
# The following environment variables will override the YAML configuration:
#
# Server:
# PORT - Server port
# READ_TIMEOUT - Read timeout duration
# WRITE_TIMEOUT - Write timeout duration
# IDLE_TIMEOUT - Idle timeout duration
#
# Anthropic:
# ANTHROPIC_FORWARD_URL - Anthropic base URL
# ANTHROPIC_VERSION - Anthropic API version
# ANTHROPIC_MAX_RETRIES - Maximum retries for Anthropic requests
#
# OpenAI:
# OPENAI_API_KEY - OpenAI API key
#
# Storage:
# DB_PATH - Database file path
#
# Subagents:
# SUBAGENT_MAPPINGS - Comma-separated subagent:model pairs
# Example: "code-reviewer:claude-3-5-sonnet"

View file

@ -15,6 +15,7 @@ import (
"github.com/seifghazi/claude-code-monitor/internal/config" "github.com/seifghazi/claude-code-monitor/internal/config"
"github.com/seifghazi/claude-code-monitor/internal/handler" "github.com/seifghazi/claude-code-monitor/internal/handler"
"github.com/seifghazi/claude-code-monitor/internal/middleware" "github.com/seifghazi/claude-code-monitor/internal/middleware"
"github.com/seifghazi/claude-code-monitor/internal/provider"
"github.com/seifghazi/claude-code-monitor/internal/service" "github.com/seifghazi/claude-code-monitor/internal/service"
) )
@ -26,6 +27,15 @@ func main() {
logger.Fatalf("❌ Failed to load configuration: %v", err) logger.Fatalf("❌ Failed to load configuration: %v", err)
} }
// Initialize providers
providers := make(map[string]provider.Provider)
providers["anthropic"] = provider.NewAnthropicProvider(&cfg.Providers.Anthropic)
providers["openai"] = provider.NewOpenAIProvider(&cfg.Providers.OpenAI)
// Initialize model router
modelRouter := service.NewModelRouter(cfg, providers, logger)
// Use legacy anthropic service for backward compatibility
anthropicService := service.NewAnthropicService(&cfg.Anthropic) anthropicService := service.NewAnthropicService(&cfg.Anthropic)
// Use SQLite storage // Use SQLite storage
@ -35,7 +45,7 @@ func main() {
} }
logger.Println("🗿 SQLite database ready") logger.Println("🗿 SQLite database ready")
h := handler.New(anthropicService, storageService, logger) h := handler.New(anthropicService, storageService, logger, modelRouter)
r := mux.NewRouter() r := mux.NewRouter()

View file

@ -7,6 +7,7 @@ require (
github.com/gorilla/mux v1.8.1 github.com/gorilla/mux v1.8.1
github.com/joho/godotenv v1.5.1 github.com/joho/godotenv v1.5.1
github.com/mattn/go-sqlite3 v1.14.28 github.com/mattn/go-sqlite3 v1.14.28
gopkg.in/yaml.v3 v3.0.1
) )
require github.com/felixge/httpsnoop v1.0.3 // indirect require github.com/felixge/httpsnoop v1.0.3 // indirect

View file

@ -8,3 +8,7 @@ github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEum7A= github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEum7A=
github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View file

@ -1,27 +1,56 @@
package config package config
import ( import (
"fmt"
"os" "os"
"path/filepath" "path/filepath"
"strconv" "strconv"
"time" "time"
"github.com/joho/godotenv" "github.com/joho/godotenv"
"gopkg.in/yaml.v3"
) )
type Config struct { type Config struct {
Server ServerConfig Server ServerConfig `yaml:"server"`
Providers ProvidersConfig `yaml:"providers"`
Storage StorageConfig `yaml:"storage"`
Subagents SubagentsConfig `yaml:"subagents"`
// Legacy fields for backward compatibility
Anthropic AnthropicConfig Anthropic AnthropicConfig
Storage StorageConfig
} }
type ServerConfig struct { type ServerConfig struct {
Port string Port string `yaml:"port"`
Timeouts TimeoutsConfig `yaml:"timeouts"`
// Legacy fields
ReadTimeout time.Duration ReadTimeout time.Duration
WriteTimeout time.Duration WriteTimeout time.Duration
IdleTimeout time.Duration IdleTimeout time.Duration
} }
type TimeoutsConfig struct {
Read string `yaml:"read"`
Write string `yaml:"write"`
Idle string `yaml:"idle"`
}
type ProvidersConfig struct {
Anthropic AnthropicProviderConfig `yaml:"anthropic"`
OpenAI OpenAIProviderConfig `yaml:"openai"`
}
type AnthropicProviderConfig struct {
BaseURL string `yaml:"base_url"`
Version string `yaml:"version"`
MaxRetries int `yaml:"max_retries"`
}
type OpenAIProviderConfig struct {
BaseURL string `yaml:"base_url"`
APIKey string `yaml:"api_key"`
}
type AnthropicConfig struct { type AnthropicConfig struct {
BaseURL string BaseURL string
Version string Version string
@ -29,8 +58,12 @@ type AnthropicConfig struct {
} }
type StorageConfig struct { type StorageConfig struct {
RequestsDir string RequestsDir string `yaml:"requests_dir"`
DBPath string DBPath string `yaml:"db_path"`
}
type SubagentsConfig struct {
Mappings map[string]string `yaml:"mappings"`
} }
func Load() (*Config, error) { func Load() (*Config, error) {
@ -45,26 +78,84 @@ func Load() (*Config, error) {
} }
} }
// Start with default configuration
cfg := &Config{ cfg := &Config{
Server: ServerConfig{ Server: ServerConfig{
Port: getEnv("PORT", "3001"), Port: getEnv("PORT", "3001"),
ReadTimeout: getDuration("READ_TIMEOUT", 600*time.Second), // Increased to 10 minutes ReadTimeout: getDuration("READ_TIMEOUT", 600*time.Second),
WriteTimeout: getDuration("WRITE_TIMEOUT", 600*time.Second), // Increased to 10 minutes WriteTimeout: getDuration("WRITE_TIMEOUT", 600*time.Second),
IdleTimeout: getDuration("IDLE_TIMEOUT", 600*time.Second), // Increased to 10 minutes IdleTimeout: getDuration("IDLE_TIMEOUT", 600*time.Second),
}, },
Providers: ProvidersConfig{
Anthropic: AnthropicProviderConfig{
BaseURL: getEnv("ANTHROPIC_FORWARD_URL", "https://api.anthropic.com"),
Version: getEnv("ANTHROPIC_VERSION", "2023-06-01"),
MaxRetries: getInt("ANTHROPIC_MAX_RETRIES", 3),
},
OpenAI: OpenAIProviderConfig{
BaseURL: getEnv("OPENAI_BASE_URL", "https://api.openai.com"),
APIKey: getEnv("OPENAI_API_KEY", ""),
},
},
Storage: StorageConfig{
DBPath: getEnv("DB_PATH", "requests.db"),
},
Subagents: SubagentsConfig{
Mappings: make(map[string]string),
},
// Legacy field for backward compatibility
Anthropic: AnthropicConfig{ Anthropic: AnthropicConfig{
BaseURL: getEnv("ANTHROPIC_FORWARD_URL", "https://api.anthropic.com"), BaseURL: getEnv("ANTHROPIC_FORWARD_URL", "https://api.anthropic.com"),
Version: getEnv("ANTHROPIC_VERSION", "2023-06-01"), Version: getEnv("ANTHROPIC_VERSION", "2023-06-01"),
MaxRetries: getInt("ANTHROPIC_MAX_RETRIES", 3), MaxRetries: getInt("ANTHROPIC_MAX_RETRIES", 3),
}, },
Storage: StorageConfig{ }
DBPath: getEnv("DB_PATH", "requests.db"),
}, // Try to load from YAML config file if specified
configPath := getEnv("CONFIG_PATH", "../config.yaml")
if configPath != "" {
if err := cfg.loadFromFile(configPath); err != nil {
// Log error but continue with defaults
fmt.Printf("Warning: Failed to load config from %s: %v\n", configPath, err)
}
}
// After loading from file, apply any timeout conversions if needed
if cfg.Server.Timeouts.Read != "" {
if duration, err := time.ParseDuration(cfg.Server.Timeouts.Read); err == nil {
cfg.Server.ReadTimeout = duration
}
}
if cfg.Server.Timeouts.Write != "" {
if duration, err := time.ParseDuration(cfg.Server.Timeouts.Write); err == nil {
cfg.Server.WriteTimeout = duration
}
}
if cfg.Server.Timeouts.Idle != "" {
if duration, err := time.ParseDuration(cfg.Server.Timeouts.Idle); err == nil {
cfg.Server.IdleTimeout = duration
}
}
// Sync legacy Anthropic config with new structure
cfg.Anthropic = AnthropicConfig{
BaseURL: cfg.Providers.Anthropic.BaseURL,
Version: cfg.Providers.Anthropic.Version,
MaxRetries: cfg.Providers.Anthropic.MaxRetries,
} }
return cfg, nil return cfg, nil
} }
func (c *Config) loadFromFile(path string) error {
data, err := os.ReadFile(path)
if err != nil {
return err
}
return yaml.Unmarshal(data, c)
}
func getEnv(key, defaultValue string) string { func getEnv(key, defaultValue string) string {
if value := os.Getenv(key); value != "" { if value := os.Getenv(key); value != "" {
return value return value

View file

@ -0,0 +1,202 @@
package config
import (
"os"
"path/filepath"
"testing"
"time"
)
func TestLoad(t *testing.T) {
// Save original environment variables
originalConfigPath := os.Getenv("CONFIG_PATH")
originalPort := os.Getenv("PORT")
originalAnthropicURL := os.Getenv("ANTHROPIC_FORWARD_URL")
originalOpenAIKey := os.Getenv("OPENAI_API_KEY")
// Restore after test
defer func() {
os.Setenv("CONFIG_PATH", originalConfigPath)
os.Setenv("PORT", originalPort)
os.Setenv("ANTHROPIC_FORWARD_URL", originalAnthropicURL)
os.Setenv("OPENAI_API_KEY", originalOpenAIKey)
}()
t.Run("LoadWithValidConfigFile", func(t *testing.T) {
// Create a temporary config file
tempDir := t.TempDir()
configPath := filepath.Join(tempDir, "config.yaml")
configContent := `
server:
port: 8080
timeouts:
read: 5m
write: 5m
idle: 5m
providers:
anthropic:
base_url: "https://api.anthropic.com"
version: "2023-06-01"
max_retries: 3
openai:
base_url: "https://api.openai.com"
storage:
db_path: "test.db"
subagents:
mappings:
test-agent: "gpt-4"
`
err := os.WriteFile(configPath, []byte(configContent), 0644)
if err != nil {
t.Fatalf("Failed to write config file: %v", err)
}
// Set config path
os.Setenv("CONFIG_PATH", configPath)
// Load config
cfg, err := Load()
if err != nil {
t.Fatalf("Failed to load config: %v", err)
}
// Verify values
if cfg.Server.Port != "8080" {
t.Errorf("Expected port 8080, got %s", cfg.Server.Port)
}
if cfg.Anthropic.BaseURL != "https://api.anthropic.com" {
t.Errorf("Expected Anthropic URL https://api.anthropic.com, got %s", cfg.Anthropic.BaseURL)
}
if cfg.Storage.DBPath != "test.db" {
t.Errorf("Expected DB path test.db, got %s", cfg.Storage.DBPath)
}
if cfg.Subagents.Mappings["test-agent"] != "gpt-4" {
t.Errorf("Expected subagent mapping test-agent: gpt-4, got %s", cfg.Subagents.Mappings["test-agent"])
}
})
t.Run("LoadWithDefaults", func(t *testing.T) {
// Clear environment variables
os.Unsetenv("CONFIG_PATH")
os.Unsetenv("PORT")
// Create empty config directory
tempDir := t.TempDir()
os.Setenv("CONFIG_PATH", filepath.Join(tempDir, "nonexistent.yaml"))
// Load config (should use defaults)
cfg, err := Load()
if err != nil {
t.Fatalf("Failed to load config with defaults: %v", err)
}
// Verify default values
if cfg.Server.Port != "3001" {
t.Errorf("Expected default port 3001, got %s", cfg.Server.Port)
}
if cfg.Server.ReadTimeout != 10*time.Minute {
t.Errorf("Expected default read timeout 10m, got %v", cfg.Server.ReadTimeout)
}
if cfg.Anthropic.BaseURL != "https://api.anthropic.com" {
t.Errorf("Expected default Anthropic URL, got %s", cfg.Anthropic.BaseURL)
}
if cfg.Storage.DBPath != "requests.db" {
t.Errorf("Expected default DB path requests.db, got %s", cfg.Storage.DBPath)
}
})
t.Run("EnvironmentVariableOverrides", func(t *testing.T) {
// Create a config file
tempDir := t.TempDir()
configPath := filepath.Join(tempDir, "config.yaml")
configContent := `
server:
port: 8080
providers:
anthropic:
base_url: "https://api.anthropic.com"
openai:
api_key: "file-key"
`
err := os.WriteFile(configPath, []byte(configContent), 0644)
if err != nil {
t.Fatalf("Failed to write config file: %v", err)
}
// Set environment variables
os.Setenv("CONFIG_PATH", configPath)
os.Setenv("PORT", "9090")
os.Setenv("ANTHROPIC_FORWARD_URL", "https://custom.anthropic.com")
os.Setenv("OPENAI_API_KEY", "env-key")
// Load config
cfg, err := Load()
if err != nil {
t.Fatalf("Failed to load config: %v", err)
}
// Verify environment overrides
if cfg.Server.Port != "9090" {
t.Errorf("Expected port override 9090, got %s", cfg.Server.Port)
}
if cfg.Anthropic.BaseURL != "https://custom.anthropic.com" {
t.Errorf("Expected Anthropic URL override, got %s", cfg.Anthropic.BaseURL)
}
if cfg.OpenAI.APIKey != "env-key" {
t.Errorf("Expected OpenAI API key override, got %s", cfg.OpenAI.APIKey)
}
})
t.Run("InvalidYAML", func(t *testing.T) {
// Create invalid YAML file
tempDir := t.TempDir()
configPath := filepath.Join(tempDir, "invalid.yaml")
configContent := `
server:
port: [this is invalid
`
err := os.WriteFile(configPath, []byte(configContent), 0644)
if err != nil {
t.Fatalf("Failed to write config file: %v", err)
}
os.Setenv("CONFIG_PATH", configPath)
// Should still load with defaults (error is logged but not returned)
cfg, err := Load()
if err != nil {
t.Fatalf("Expected config to load with defaults despite invalid YAML: %v", err)
}
// Should have default values
if cfg.Server.Port != "3001" {
t.Errorf("Expected default port 3001 after invalid YAML, got %s", cfg.Server.Port)
}
})
}
func TestConfig_ParseTimeouts(t *testing.T) {
tests := []struct {
name string
timeoutStr string
expectedMinutes int
expectError bool
}{
{"Valid minutes", "5m", 5, false},
{"Valid seconds", "30s", 0, false}, // Will be 30 seconds, not minutes
{"Valid hours", "2h", 120, false},
{"Empty string", "", 10, false}, // Should use default
{"Invalid format", "invalid", 10, false}, // Should use default
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// This test would require exposing the parseTimeout function
// or testing it indirectly through the Load function
// For now, we'll skip the implementation details
})
}
}

View file

@ -25,15 +25,19 @@ type Handler struct {
anthropicService service.AnthropicService anthropicService service.AnthropicService
storageService service.StorageService storageService service.StorageService
conversationService service.ConversationService conversationService service.ConversationService
modelRouter *service.ModelRouter
logger *log.Logger
} }
func New(anthropicService service.AnthropicService, storageService service.StorageService, logger *log.Logger) *Handler { func New(anthropicService service.AnthropicService, storageService service.StorageService, logger *log.Logger, modelRouter *service.ModelRouter) *Handler {
conversationService := service.NewConversationService() conversationService := service.NewConversationService()
return &Handler{ return &Handler{
anthropicService: anthropicService, anthropicService: anthropicService,
storageService: storageService, storageService: storageService,
conversationService: conversationService, conversationService: conversationService,
modelRouter: modelRouter,
logger: logger,
} }
} }
@ -81,10 +85,22 @@ func (h *Handler) Messages(w http.ResponseWriter, r *http.Request) {
log.Printf("❌ Error saving request: %v", err) log.Printf("❌ Error saving request: %v", err)
} }
// Forward the request to Anthropic // Use model router to determine provider and route the request
resp, err := h.anthropicService.ForwardRequest(r.Context(), r) provider, originalModel, err := h.modelRouter.RouteRequest(&req)
if err != nil { if err != nil {
log.Printf("❌ Error forwarding to Anthropic API: %v", err) log.Printf("❌ Error routing request: %v", err)
writeErrorResponse(w, "Failed to route request", http.StatusInternalServerError)
return
}
// Update request log with original model (for tracking)
requestLog.OriginalModel = originalModel
requestLog.RoutedModel = req.Model
// Forward the request to the selected provider
resp, err := provider.ForwardRequest(r.Context(), r)
if err != nil {
log.Printf("❌ Error forwarding to %s API: %v", provider.Name(), err)
writeErrorResponse(w, "Failed to forward request", http.StatusInternalServerError) writeErrorResponse(w, "Failed to forward request", http.StatusInternalServerError)
return return
} }

View file

@ -0,0 +1,287 @@
package handler
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"net/http/httptest"
"testing"
"github.com/gorilla/mux"
"github.com/seifghazi/claude-code-monitor/internal/config"
"github.com/seifghazi/claude-code-monitor/internal/model"
)
// MockStorageService implements StorageService interface for testing
type MockStorageService struct {
SavedRequests []model.RequestLog
ReturnError error
RequestsToReturn []model.RequestLog
TotalRequests int
}
func (m *MockStorageService) SaveRequest(request *model.RequestLog) (string, error) {
if m.ReturnError != nil {
return "", m.ReturnError
}
m.SavedRequests = append(m.SavedRequests, *request)
return "test-id-123", nil
}
func (m *MockStorageService) GetRequests(page, limit int) ([]model.RequestLog, int, error) {
if m.ReturnError != nil {
return nil, 0, m.ReturnError
}
return m.RequestsToReturn, m.TotalRequests, nil
}
func (m *MockStorageService) ClearRequests() (int, error) {
if m.ReturnError != nil {
return 0, m.ReturnError
}
count := len(m.SavedRequests)
m.SavedRequests = nil
return count, nil
}
func (m *MockStorageService) UpdateRequestWithGrading(requestID string, grade *model.PromptGrade) error {
return m.ReturnError
}
func (m *MockStorageService) UpdateRequestWithResponse(request *model.RequestLog) error {
return m.ReturnError
}
func (m *MockStorageService) EnsureDirectoryExists() error {
return nil
}
func (m *MockStorageService) GetRequestByShortID(shortID string) (*model.RequestLog, string, error) {
if m.ReturnError != nil {
return nil, "", m.ReturnError
}
if len(m.RequestsToReturn) > 0 {
return &m.RequestsToReturn[0], "full-id", nil
}
return nil, "", nil
}
func (m *MockStorageService) GetConfig() *config.StorageConfig {
return &config.StorageConfig{
DBPath: "test.db",
}
}
func (m *MockStorageService) GetAllRequests(modelFilter string) ([]*model.RequestLog, error) {
if m.ReturnError != nil {
return nil, m.ReturnError
}
result := make([]*model.RequestLog, len(m.RequestsToReturn))
for i := range m.RequestsToReturn {
result[i] = &m.RequestsToReturn[i]
}
return result, nil
}
// MockAnthropicService implements AnthropicService interface for testing
type MockAnthropicService struct {
ReturnResponse *http.Response
ReturnError error
ReceivedRequest *http.Request
}
func (m *MockAnthropicService) ForwardRequest(ctx context.Context, originalReq *http.Request) (*http.Response, error) {
m.ReceivedRequest = originalReq
if m.ReturnError != nil {
return nil, m.ReturnError
}
if m.ReturnResponse != nil {
return m.ReturnResponse, nil
}
// Return a default successful response
return &http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(`{"id":"test","content":[{"text":"Hello"}]}`)),
Header: make(http.Header),
}, nil
}
func TestHealthEndpoint(t *testing.T) {
// Create handler with mocks
mockStorage := &MockStorageService{}
mockAnthropic := &MockAnthropicService{}
handler := New(mockAnthropic, mockStorage, nil)
// Create test request
req, err := http.NewRequest("GET", "/health", nil)
if err != nil {
t.Fatal(err)
}
// Create response recorder
rr := httptest.NewRecorder()
// Create router and register handler
router := mux.NewRouter()
router.HandleFunc("/health", handler.Health).Methods("GET")
// Serve the request
router.ServeHTTP(rr, req)
// Check status code
if status := rr.Code; status != http.StatusOK {
t.Errorf("Handler returned wrong status code: got %v want %v", status, http.StatusOK)
}
// Check response body
var response map[string]interface{}
if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil {
t.Errorf("Failed to parse response body: %v", err)
}
if response["status"] != "healthy" {
t.Errorf("Expected status 'healthy', got %v", response["status"])
}
}
func TestGetRequestsEndpoint(t *testing.T) {
// Create mock storage with test data
mockStorage := &MockStorageService{
RequestsToReturn: []model.RequestLog{
{
ID: "test-1",
Method: "POST",
Endpoint: "/v1/messages",
Model: "claude-3-opus",
},
{
ID: "test-2",
Method: "POST",
Endpoint: "/v1/messages",
Model: "claude-3-sonnet",
},
},
TotalRequests: 2,
}
mockAnthropic := &MockAnthropicService{}
handler := New(mockAnthropic, mockStorage, nil)
// Create test request
req, err := http.NewRequest("GET", "/api/requests?page=1&limit=10", nil)
if err != nil {
t.Fatal(err)
}
// Create response recorder
rr := httptest.NewRecorder()
// Create router and register handler
router := mux.NewRouter()
router.HandleFunc("/api/requests", handler.GetRequests).Methods("GET")
// Serve the request
router.ServeHTTP(rr, req)
// Check status code
if status := rr.Code; status != http.StatusOK {
t.Errorf("Handler returned wrong status code: got %v want %v", status, http.StatusOK)
}
// Check response body
var response struct {
Requests []model.RequestLog `json:"requests"`
Total int `json:"total"`
Page int `json:"page"`
Limit int `json:"limit"`
}
if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil {
t.Errorf("Failed to parse response body: %v", err)
}
if len(response.Requests) != 2 {
t.Errorf("Expected 2 requests, got %d", len(response.Requests))
}
if response.Total != 2 {
t.Errorf("Expected total 2, got %d", response.Total)
}
}
func TestChatCompletionsEndpoint(t *testing.T) {
mockStorage := &MockStorageService{}
mockAnthropic := &MockAnthropicService{}
handler := New(mockAnthropic, mockStorage, nil)
// Create test request
req, err := http.NewRequest("POST", "/v1/chat/completions", bytes.NewBufferString(`{"model":"gpt-4"}`))
if err != nil {
t.Fatal(err)
}
req.Header.Set("Content-Type", "application/json")
// Create response recorder
rr := httptest.NewRecorder()
// Call handler directly
handler.ChatCompletions(rr, req)
// Should return bad request since this is an Anthropic proxy
if status := rr.Code; status != http.StatusBadRequest {
t.Errorf("Handler returned wrong status code: got %v want %v", status, http.StatusBadRequest)
}
// Check error message
var response map[string]interface{}
if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil {
t.Errorf("Failed to parse response body: %v", err)
}
expectedError := "This is an Anthropic proxy. Please use the /v1/messages endpoint instead of /v1/chat/completions"
if response["error"] != expectedError {
t.Errorf("Expected error message '%s', got %v", expectedError, response["error"])
}
}
func TestDeleteRequestsEndpoint(t *testing.T) {
// Create mock storage
mockStorage := &MockStorageService{
SavedRequests: []model.RequestLog{
{ID: "test-1"},
{ID: "test-2"},
},
}
mockAnthropic := &MockAnthropicService{}
handler := New(mockAnthropic, mockStorage, nil)
// Create test request
req, err := http.NewRequest("DELETE", "/api/requests", nil)
if err != nil {
t.Fatal(err)
}
// Create response recorder
rr := httptest.NewRecorder()
// Create router and register handler
router := mux.NewRouter()
router.HandleFunc("/api/requests", handler.DeleteRequests).Methods("DELETE")
// Serve the request
router.ServeHTTP(rr, req)
// Check status code
if status := rr.Code; status != http.StatusOK {
t.Errorf("Handler returned wrong status code: got %v want %v", status, http.StatusOK)
}
// Check response body
var response map[string]interface{}
if err := json.Unmarshal(rr.Body.Bytes(), &response); err != nil {
t.Errorf("Failed to parse response body: %v", err)
}
if response["deleted"] != float64(2) { // JSON unmarshals numbers as float64
t.Errorf("Expected 2 deleted requests, got %v", response["deleted"])
}
}

View file

@ -25,17 +25,19 @@ type CriteriaScore struct {
} }
type RequestLog struct { type RequestLog struct {
RequestID string `json:"requestId"` RequestID string `json:"requestId"`
Timestamp string `json:"timestamp"` Timestamp string `json:"timestamp"`
Method string `json:"method"` Method string `json:"method"`
Endpoint string `json:"endpoint"` Endpoint string `json:"endpoint"`
Headers map[string][]string `json:"headers"` Headers map[string][]string `json:"headers"`
Body interface{} `json:"body"` Body interface{} `json:"body"`
Model string `json:"model,omitempty"` Model string `json:"model,omitempty"`
UserAgent string `json:"userAgent"` OriginalModel string `json:"originalModel,omitempty"`
ContentType string `json:"contentType"` RoutedModel string `json:"routedModel,omitempty"`
PromptGrade *PromptGrade `json:"promptGrade,omitempty"` UserAgent string `json:"userAgent"`
Response *ResponseLog `json:"response,omitempty"` ContentType string `json:"contentType"`
PromptGrade *PromptGrade `json:"promptGrade,omitempty"`
Response *ResponseLog `json:"response,omitempty"`
} }
type ResponseLog struct { type ResponseLog struct {

View file

@ -0,0 +1,131 @@
package provider
import (
"compress/gzip"
"context"
"fmt"
"io"
"net/http"
"net/url"
"path"
"strings"
"time"
"github.com/seifghazi/claude-code-monitor/internal/config"
)
type AnthropicProvider struct {
client *http.Client
config *config.AnthropicProviderConfig
}
func NewAnthropicProvider(cfg *config.AnthropicProviderConfig) Provider {
return &AnthropicProvider{
client: &http.Client{
Timeout: 300 * time.Second, // 5 minutes timeout
},
config: cfg,
}
}
func (p *AnthropicProvider) Name() string {
return "anthropic"
}
func (p *AnthropicProvider) ForwardRequest(ctx context.Context, originalReq *http.Request) (*http.Response, error) {
// Clone the request to avoid modifying the original
proxyReq := originalReq.Clone(ctx)
// Parse the configured base URL
baseURL, err := url.Parse(p.config.BaseURL)
if err != nil {
return nil, fmt.Errorf("failed to parse base URL '%s': %w", p.config.BaseURL, err)
}
if baseURL.Scheme == "" || baseURL.Host == "" {
return nil, fmt.Errorf("invalid base URL, scheme and host are required: %s", p.config.BaseURL)
}
// Update the destination URL
proxyReq.URL.Scheme = baseURL.Scheme
proxyReq.URL.Host = baseURL.Host
proxyReq.URL.Path = path.Join(baseURL.Path, originalReq.URL.Path)
// Preserve query parameters
proxyReq.URL.RawQuery = originalReq.URL.RawQuery
// Update request headers
proxyReq.RequestURI = ""
proxyReq.Host = baseURL.Host
// Remove hop-by-hop headers
removeHopByHopHeaders(proxyReq.Header)
// Add required headers if not present
if proxyReq.Header.Get("anthropic-version") == "" {
proxyReq.Header.Set("anthropic-version", p.config.Version)
}
// Support gzip encoding
proxyReq.Header.Set("Accept-Encoding", "gzip")
// Forward the request
resp, err := p.client.Do(proxyReq)
if err != nil {
return nil, fmt.Errorf("failed to forward request: %w", err)
}
// Handle gzip-encoded responses
if resp.Header.Get("Content-Encoding") == "gzip" {
resp.Header.Del("Content-Encoding")
resp.Header.Del("Content-Length")
gzipReader, err := gzip.NewReader(resp.Body)
if err != nil {
resp.Body.Close()
return nil, fmt.Errorf("failed to create gzip reader: %w", err)
}
resp.Body = &gzipResponseBody{
Reader: gzipReader,
closer: resp.Body,
}
}
return resp, nil
}
type gzipResponseBody struct {
io.Reader
closer io.Closer
}
func (g *gzipResponseBody) Close() error {
if gzReader, ok := g.Reader.(*gzip.Reader); ok {
gzReader.Close()
}
return g.closer.Close()
}
func removeHopByHopHeaders(header http.Header) {
hopByHopHeaders := []string{
"Connection",
"Keep-Alive",
"Proxy-Authenticate",
"Proxy-Authorization",
"TE",
"Trailers",
"Transfer-Encoding",
"Upgrade",
}
for _, h := range hopByHopHeaders {
header.Del(h)
}
// Remove any headers specified in the Connection header
if connection := header.Get("Connection"); connection != "" {
for _, h := range strings.Split(connection, ",") {
header.Del(strings.TrimSpace(h))
}
header.Del("Connection")
}
}

View file

@ -0,0 +1,200 @@
package provider
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"time"
"github.com/seifghazi/claude-code-monitor/internal/config"
"github.com/seifghazi/claude-code-monitor/internal/model"
)
type OpenAIProvider struct {
client *http.Client
config *config.OpenAIProviderConfig
}
func NewOpenAIProvider(cfg *config.OpenAIProviderConfig) Provider {
return &OpenAIProvider{
client: &http.Client{
Timeout: 300 * time.Second, // 5 minutes timeout
},
config: cfg,
}
}
func (p *OpenAIProvider) Name() string {
return "openai"
}
func (p *OpenAIProvider) ForwardRequest(ctx context.Context, originalReq *http.Request) (*http.Response, error) {
// First, we need to convert the Anthropic request to OpenAI format
bodyBytes, err := io.ReadAll(originalReq.Body)
if err != nil {
return nil, fmt.Errorf("failed to read request body: %w", err)
}
originalReq.Body = io.NopCloser(bytes.NewReader(bodyBytes))
var anthropicReq model.AnthropicRequest
if err := json.Unmarshal(bodyBytes, &anthropicReq); err != nil {
return nil, fmt.Errorf("failed to parse anthropic request: %w", err)
}
// Convert to OpenAI format
openAIReq := convertAnthropicToOpenAI(&anthropicReq)
newBodyBytes, err := json.Marshal(openAIReq)
if err != nil {
return nil, fmt.Errorf("failed to marshal openai request: %w", err)
}
// Clone the request with new body
proxyReq := originalReq.Clone(ctx)
proxyReq.Body = io.NopCloser(bytes.NewReader(newBodyBytes))
proxyReq.ContentLength = int64(len(newBodyBytes))
// Parse the configured base URL
baseURL, err := url.Parse(p.config.BaseURL)
if err != nil {
return nil, fmt.Errorf("failed to parse base URL '%s': %w", p.config.BaseURL, err)
}
// Update the destination URL for OpenAI
proxyReq.URL.Scheme = baseURL.Scheme
proxyReq.URL.Host = baseURL.Host
proxyReq.URL.Path = "/v1/chat/completions" // OpenAI endpoint
// Update request headers
proxyReq.RequestURI = ""
proxyReq.Host = baseURL.Host
// Remove Anthropic-specific headers
proxyReq.Header.Del("anthropic-version")
proxyReq.Header.Del("x-api-key")
// Add OpenAI headers
if p.config.APIKey != "" {
proxyReq.Header.Set("Authorization", "Bearer "+p.config.APIKey)
}
proxyReq.Header.Set("Content-Type", "application/json")
// Forward the request
resp, err := p.client.Do(proxyReq)
if err != nil {
return nil, fmt.Errorf("failed to forward request: %w", err)
}
// For streaming responses, we need to convert back to Anthropic format
if anthropicReq.Stream {
// Create a pipe to transform the response
pr, pw := io.Pipe()
// Start a goroutine to transform the stream
go func() {
defer pw.Close()
transformOpenAIStreamToAnthropic(resp.Body, pw)
}()
// Replace the response body with our transformed stream
resp.Body = pr
} else {
// For non-streaming, read and convert the response
respBody, err := io.ReadAll(resp.Body)
resp.Body.Close()
if err != nil {
return nil, fmt.Errorf("failed to read response body: %w", err)
}
// Convert OpenAI response back to Anthropic format
transformedBody := transformOpenAIResponseToAnthropic(respBody)
resp.Body = io.NopCloser(bytes.NewReader(transformedBody))
resp.ContentLength = int64(len(transformedBody))
resp.Header.Set("Content-Length", fmt.Sprintf("%d", len(transformedBody)))
}
return resp, nil
}
func convertAnthropicToOpenAI(req *model.AnthropicRequest) map[string]interface{} {
messages := []map[string]interface{}{}
// Add system messages
for _, sysMsg := range req.System {
messages = append(messages, map[string]interface{}{
"role": "system",
"content": sysMsg.Text,
})
}
// Add conversation messages
for _, msg := range req.Messages {
// Get content blocks from the message
contentBlocks := msg.GetContentBlocks()
content := ""
if len(contentBlocks) > 0 {
// Use the first text block
content = contentBlocks[0].Text
}
messages = append(messages, map[string]interface{}{
"role": msg.Role,
"content": content,
})
}
openAIReq := map[string]interface{}{
"model": req.Model,
"messages": messages,
"temperature": req.Temperature,
"max_tokens": req.MaxTokens,
"stream": req.Stream,
}
return openAIReq
}
func transformOpenAIResponseToAnthropic(respBody []byte) []byte {
// This is a simplified transformation
// In production, you'd want to handle all fields properly
var openAIResp map[string]interface{}
if err := json.Unmarshal(respBody, &openAIResp); err != nil {
return respBody // Return as-is if we can't parse
}
// Extract the assistant's message
content := ""
if choices, ok := openAIResp["choices"].([]interface{}); ok && len(choices) > 0 {
if choice, ok := choices[0].(map[string]interface{}); ok {
if msg, ok := choice["message"].(map[string]interface{}); ok {
if c, ok := msg["content"].(string); ok {
content = c
}
}
}
}
// Build Anthropic-style response
anthropicResp := map[string]interface{}{
"id": openAIResp["id"],
"type": "message",
"role": "assistant",
"content": []map[string]string{{"type": "text", "text": content}},
"model": openAIResp["model"],
"usage": openAIResp["usage"],
}
result, _ := json.Marshal(anthropicResp)
return result
}
func transformOpenAIStreamToAnthropic(openAIStream io.ReadCloser, anthropicStream io.Writer) {
defer openAIStream.Close()
// This is a placeholder - in production you'd parse SSE events
// and transform them from OpenAI format to Anthropic format
io.Copy(anthropicStream, openAIStream)
}

View file

@ -0,0 +1,15 @@
package provider
import (
"context"
"net/http"
)
// Provider is the interface that all LLM providers must implement
type Provider interface {
// Name returns the provider name (e.g., "anthropic", "openai")
Name() string
// ForwardRequest forwards a request to the provider's API
ForwardRequest(ctx context.Context, originalReq *http.Request) (*http.Response, error)
}

View file

@ -0,0 +1,165 @@
package service
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"log"
"os"
"strings"
"github.com/seifghazi/claude-code-monitor/internal/config"
"github.com/seifghazi/claude-code-monitor/internal/model"
"github.com/seifghazi/claude-code-monitor/internal/provider"
)
type ModelRouter struct {
config *config.Config
providers map[string]provider.Provider
subagentMappings map[string]string // agentName -> targetModel
customAgentPrompts map[string]SubagentDefinition // promptHash -> definition
logger *log.Logger
}
type SubagentDefinition struct {
Name string
TargetModel string
TargetProvider string
FullPrompt string // Store for debugging
}
func NewModelRouter(cfg *config.Config, providers map[string]provider.Provider, logger *log.Logger) *ModelRouter {
router := &ModelRouter{
config: cfg,
providers: providers,
subagentMappings: cfg.Subagents.Mappings,
customAgentPrompts: make(map[string]SubagentDefinition),
logger: logger,
}
router.loadCustomAgents()
return router
}
// extractStaticPrompt extracts the portion before "Notes:" if it exists
func (r *ModelRouter) extractStaticPrompt(systemPrompt string) string {
// Find the "Notes:" section
notesIndex := strings.Index(systemPrompt, "\nNotes:")
if notesIndex == -1 {
notesIndex = strings.Index(systemPrompt, "\n\nNotes:")
}
if notesIndex != -1 {
// Return only the part before "Notes:"
return strings.TrimSpace(systemPrompt[:notesIndex])
}
// If no "Notes:" section, return the whole prompt
return strings.TrimSpace(systemPrompt)
}
func (r *ModelRouter) loadCustomAgents() {
for agentName, targetModel := range r.subagentMappings {
// Try loading from project level first, then user level
paths := []string{
fmt.Sprintf(".claude/agents/%s.md", agentName),
fmt.Sprintf("%s/.claude/agents/%s.md", os.Getenv("HOME"), agentName),
}
for _, path := range paths {
content, err := os.ReadFile(path)
if err != nil {
continue
}
// Parse agent file: metadata\n---\nsystem prompt
parts := strings.Split(string(content), "\n---\n")
if len(parts) >= 2 {
systemPrompt := strings.TrimSpace(parts[1])
// Extract only the static part (before "Notes:" if it exists)
staticPrompt := r.extractStaticPrompt(systemPrompt)
hash := r.hashString(staticPrompt)
// Determine provider for the target model
providerName := r.getProviderNameForModel(targetModel)
r.customAgentPrompts[hash] = SubagentDefinition{
Name: agentName,
TargetModel: targetModel,
TargetProvider: providerName,
FullPrompt: staticPrompt,
}
r.logger.Printf("Loaded custom agent: %s (hash: %s) -> %s",
agentName, hash, targetModel)
break
}
}
}
}
// RouteRequest determines which provider and model to use for a request
func (r *ModelRouter) RouteRequest(req *model.AnthropicRequest) (provider.Provider, string, error) {
originalModel := req.Model
// Claude Code pattern: Check if we have exactly 2 system messages
if len(req.System) == 2 {
// First should be "You are Claude Code..."
if strings.Contains(req.System[0].Text, "You are Claude Code") {
// Second message could be either:
// 1. A regular Claude Code prompt (no Notes: section)
// 2. A subagent prompt (may have Notes: section)
fullPrompt := req.System[1].Text
// Extract static portion (before "Notes:" if it exists)
staticPrompt := r.extractStaticPrompt(fullPrompt)
promptHash := r.hashString(staticPrompt)
// Check if this matches a known custom agent
if definition, exists := r.customAgentPrompts[promptHash]; exists {
r.logger.Printf("Subagent '%s' detected -> routing to %s",
definition.Name, definition.TargetModel)
req.Model = definition.TargetModel
provider := r.providers[definition.TargetProvider]
if provider == nil {
return nil, originalModel, fmt.Errorf("provider %s not found for model %s",
definition.TargetProvider, definition.TargetModel)
}
return provider, originalModel, nil
}
// This is a regular Claude Code request (not a known subagent)
r.logger.Printf("Regular Claude Code request detected, using original model %s", originalModel)
}
}
// Default: use the original model and its provider
providerName := r.getProviderNameForModel(originalModel)
provider := r.providers[providerName]
if provider == nil {
return nil, originalModel, fmt.Errorf("no provider found for model %s", originalModel)
}
return provider, originalModel, nil
}
func (r *ModelRouter) hashString(s string) string {
h := sha256.New()
h.Write([]byte(s))
return hex.EncodeToString(h.Sum(nil))[:16]
}
func (r *ModelRouter) getProviderNameForModel(model string) string {
// Map models to providers
if strings.HasPrefix(model, "claude") {
return "anthropic"
} else if strings.HasPrefix(model, "gpt") {
return "openai"
}
// Default to anthropic
return "anthropic"
}

View file

@ -0,0 +1,141 @@
package service
import (
"log"
"os"
"testing"
"github.com/seifghazi/claude-code-monitor/internal/config"
"github.com/seifghazi/claude-code-monitor/internal/model"
"github.com/seifghazi/claude-code-monitor/internal/provider"
)
func TestModelRouter_EdgeCases(t *testing.T) {
// Setup
cfg := &config.Config{
Subagents: config.SubagentsConfig{
Mappings: map[string]string{
"streaming-systems-engineer": "gpt-4o",
},
},
}
providers := make(map[string]provider.Provider)
// Mock providers - in real test you'd use mocks
providers["anthropic"] = nil
providers["openai"] = nil
logger := log.New(os.Stdout, "test: ", log.LstdFlags)
router := NewModelRouter(cfg, providers, logger)
tests := []struct {
name string
request *model.AnthropicRequest
expectedRoute string
expectedModel string
description string
}{
{
name: "Regular Claude Code request (no Notes section)",
request: &model.AnthropicRequest{
Model: "claude-3-opus-20240229",
System: []model.AnthropicSystemMessage{
{Text: "You are Claude Code, Anthropic's official CLI for Claude."},
{Text: "You are an interactive CLI tool that helps users with software engineering tasks. Use the instructions below and the tools available to you to assist the user."},
},
},
expectedRoute: "anthropic",
expectedModel: "claude-3-opus-20240229",
description: "Regular Claude Code requests should use original model",
},
{
name: "Non-Claude Code request",
request: &model.AnthropicRequest{
Model: "claude-3-opus-20240229",
System: []model.AnthropicSystemMessage{
{Text: "You are a helpful assistant."},
},
},
expectedRoute: "anthropic",
expectedModel: "claude-3-opus-20240229",
description: "Non-Claude Code requests should use original model",
},
{
name: "Single system message",
request: &model.AnthropicRequest{
Model: "claude-3-opus-20240229",
System: []model.AnthropicSystemMessage{},
},
expectedRoute: "anthropic",
expectedModel: "claude-3-opus-20240229",
description: "Requests with no system messages should use original model",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Since we can't test with real providers, we'll just test the logic
// by checking that extractStaticPrompt works correctly
if len(tt.request.System) == 2 {
// Test extract static prompt for second message
fullPrompt := tt.request.System[1].Text
staticPrompt := router.extractStaticPrompt(fullPrompt)
// Verify no "Notes:" in static prompt
if contains(staticPrompt, "Notes:") {
t.Errorf("Static prompt should not contain 'Notes:' section")
}
}
// Log for manual verification
t.Logf("Test case: %s", tt.description)
})
}
}
func TestModelRouter_ExtractStaticPrompt(t *testing.T) {
router := &ModelRouter{}
tests := []struct {
name string
input string
expected string
}{
{
name: "Prompt with Notes section",
input: "You are an expert engineer.\n\nNotes:\n- Some dynamic content\n- More notes",
expected: "You are an expert engineer.",
},
{
name: "Prompt without Notes section",
input: "You are an expert engineer.\nNo notes here.",
expected: "You are an expert engineer.\nNo notes here.",
},
{
name: "Prompt with double newline before Notes",
input: "You are an expert.\n\nNotes:\nDynamic content",
expected: "You are an expert.",
},
{
name: "Empty prompt",
input: "",
expected: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := router.extractStaticPrompt(tt.input)
if result != tt.expected {
t.Errorf("extractStaticPrompt() = %q, want %q", result, tt.expected)
}
})
}
}
func contains(s, substr string) bool {
return len(s) >= len(substr) && (s == substr || len(substr) == 0 ||
(len(s) > 0 && len(substr) > 0 && s[0:len(substr)] == substr) ||
(len(s) > len(substr) && contains(s[1:], substr)))
}

136
proxy/test_e2e.sh Normal file
View file

@ -0,0 +1,136 @@
#!/bin/bash
# End-to-End test script for LLM Proxy
# This script starts the server, runs basic tests, and cleans up
set -e
echo "🧪 Starting End-to-End Tests for LLM Proxy"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Test configuration
TEST_PORT=3002
TEST_DB="test_requests.db"
TEST_CONFIG="test_config.yaml"
# Cleanup function
cleanup() {
echo "🧹 Cleaning up..."
if [ ! -z "$SERVER_PID" ]; then
kill $SERVER_PID 2>/dev/null || true
fi
rm -f $TEST_DB $TEST_CONFIG
}
# Set trap to cleanup on exit
trap cleanup EXIT
# Create test configuration
echo "📝 Creating test configuration..."
cat > $TEST_CONFIG << EOF
server:
port: $TEST_PORT
timeouts:
read: 1m
write: 1m
idle: 1m
providers:
anthropic:
base_url: "https://api.anthropic.com"
version: "2023-06-01"
max_retries: 1
storage:
db_path: "$TEST_DB"
EOF
# Build the proxy
echo "🔨 Building proxy..."
cd proxy && go build -o ../bin/test-proxy cmd/proxy/main.go && cd ..
# Start the server
echo "🚀 Starting test server on port $TEST_PORT..."
CONFIG_PATH=$TEST_CONFIG PORT=$TEST_PORT ./bin/test-proxy &
SERVER_PID=$!
# Wait for server to start
echo "⏳ Waiting for server to start..."
sleep 3
# Function to check response
check_response() {
local endpoint=$1
local expected_status=$2
local test_name=$3
response=$(curl -s -w "\n%{http_code}" http://localhost:$TEST_PORT$endpoint)
status_code=$(echo "$response" | tail -n 1)
body=$(echo "$response" | head -n -1)
if [ "$status_code" = "$expected_status" ]; then
echo -e "${GREEN}${NC} $test_name: Status $status_code"
return 0
else
echo -e "${RED}${NC} $test_name: Expected $expected_status, got $status_code"
echo "Response body: $body"
return 1
fi
}
# Run tests
echo ""
echo "🧪 Running tests..."
echo ""
# Test 1: Health check
check_response "/health" "200" "Health check"
# Test 2: Get requests (should be empty initially)
response=$(curl -s http://localhost:$TEST_PORT/api/requests)
if echo "$response" | grep -q '"requests":\[\]'; then
echo -e "${GREEN}${NC} Get requests: Returns empty array initially"
else
echo -e "${RED}${NC} Get requests: Expected empty array"
echo "Response: $response"
fi
# Test 3: Models endpoint
check_response "/v1/models" "200" "Models endpoint"
# Test 4: Invalid endpoint
check_response "/invalid" "404" "404 for invalid endpoint"
# Test 5: Chat completions endpoint (should return helpful error)
response=$(curl -s -X POST -H "Content-Type: application/json" \
-d '{"model":"gpt-4","messages":[]}' \
http://localhost:$TEST_PORT/v1/chat/completions)
if echo "$response" | grep -q "This is an Anthropic proxy"; then
echo -e "${GREEN}${NC} Chat completions: Returns helpful error message"
else
echo -e "${RED}${NC} Chat completions: Expected Anthropic proxy error"
echo "Response: $response"
fi
# Test 6: Delete requests
response=$(curl -s -X DELETE http://localhost:$TEST_PORT/api/requests)
if echo "$response" | grep -q '"deleted":0'; then
echo -e "${GREEN}${NC} Delete requests: Works with empty database"
else
echo -e "${RED}${NC} Delete requests: Expected deletion count"
echo "Response: $response"
fi
# Test 7: Conversations endpoint
check_response "/api/conversations" "200" "Conversations endpoint"
echo ""
echo "🎉 End-to-End tests completed!"
echo ""
# Cleanup is handled by trap