Ready
This commit is contained in:
commit
ae71ec4f72
48 changed files with 21032 additions and 0 deletions
106
proxy/cmd/proxy/main.go
Normal file
106
proxy/cmd/proxy/main.go
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/handlers"
|
||||
"github.com/gorilla/mux"
|
||||
|
||||
"github.com/seifghazi/claude-code-monitor/internal/config"
|
||||
"github.com/seifghazi/claude-code-monitor/internal/handler"
|
||||
"github.com/seifghazi/claude-code-monitor/internal/middleware"
|
||||
"github.com/seifghazi/claude-code-monitor/internal/service"
|
||||
)
|
||||
|
||||
func main() {
|
||||
logger := log.New(os.Stdout, "proxy: ", log.LstdFlags|log.Lshortfile)
|
||||
|
||||
cfg, err := config.Load()
|
||||
if err != nil {
|
||||
logger.Fatalf("❌ Failed to load configuration: %v", err)
|
||||
}
|
||||
|
||||
anthropicService := service.NewAnthropicService(&cfg.Anthropic)
|
||||
|
||||
// Use SQLite storage
|
||||
storageService, err := service.NewSQLiteStorageService(&cfg.Storage)
|
||||
if err != nil {
|
||||
logger.Fatalf("❌ Failed to initialize SQLite storage: %v", err)
|
||||
}
|
||||
logger.Println("🗿 SQLite database ready")
|
||||
|
||||
h := handler.New(anthropicService, storageService, logger)
|
||||
|
||||
r := mux.NewRouter()
|
||||
|
||||
corsHandler := handlers.CORS(
|
||||
handlers.AllowedOrigins([]string{"*"}),
|
||||
handlers.AllowedMethods([]string{"GET", "POST", "PUT", "DELETE", "OPTIONS"}),
|
||||
handlers.AllowedHeaders([]string{"*"}),
|
||||
)
|
||||
|
||||
r.Use(middleware.Logging)
|
||||
|
||||
r.HandleFunc("/v1/chat/completions", h.ChatCompletions).Methods("POST")
|
||||
r.HandleFunc("/v1/messages", h.Messages).Methods("POST")
|
||||
r.HandleFunc("/v1/models", h.Models).Methods("GET")
|
||||
r.HandleFunc("/health", h.Health).Methods("GET")
|
||||
|
||||
r.HandleFunc("/", h.UI).Methods("GET")
|
||||
r.HandleFunc("/ui", h.UI).Methods("GET")
|
||||
r.HandleFunc("/api/requests", h.GetRequests).Methods("GET")
|
||||
r.HandleFunc("/api/requests", h.DeleteRequests).Methods("DELETE")
|
||||
r.HandleFunc("/api/conversations", h.GetConversations).Methods("GET")
|
||||
r.HandleFunc("/api/conversations/{id}", h.GetConversationByID).Methods("GET")
|
||||
r.HandleFunc("/api/conversations/project", h.GetConversationsByProject).Methods("GET")
|
||||
|
||||
r.NotFoundHandler = http.HandlerFunc(h.NotFound)
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: ":" + cfg.Server.Port,
|
||||
Handler: corsHandler(r),
|
||||
ReadTimeout: cfg.Server.ReadTimeout,
|
||||
WriteTimeout: cfg.Server.WriteTimeout,
|
||||
IdleTimeout: cfg.Server.IdleTimeout,
|
||||
}
|
||||
|
||||
go func() {
|
||||
logger.Printf("🚀 Claude Code Monitor Server running on http://localhost:%s", cfg.Server.Port)
|
||||
logger.Printf("📡 API endpoints available at:")
|
||||
logger.Printf(" - POST http://localhost:%s/v1/chat/completions (OpenAI format)", cfg.Server.Port)
|
||||
logger.Printf(" - POST http://localhost:%s/v1/messages (Anthropic format)", cfg.Server.Port)
|
||||
logger.Printf(" - GET http://localhost:%s/v1/models", cfg.Server.Port)
|
||||
logger.Printf(" - GET http://localhost:%s/health", cfg.Server.Port)
|
||||
logger.Printf(" - POST http://localhost:%s/api/grade-prompt (Prompt grading)", cfg.Server.Port)
|
||||
logger.Printf("🎨 Web UI available at:")
|
||||
logger.Printf(" - GET http://localhost:%s/ (Request Visualizer)", cfg.Server.Port)
|
||||
logger.Printf(" - GET http://localhost:%s/api/requests (Request API)", cfg.Server.Port)
|
||||
logger.Printf("🔍 All requests logged with comprehensive error handling")
|
||||
logger.Printf("🎯 Auto prompt grading with Anthropic best practices")
|
||||
|
||||
if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||
logger.Fatalf("❌ Server failed to start: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
quit := make(chan os.Signal, 1)
|
||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-quit
|
||||
|
||||
logger.Println("🛑 Shutting down server...")
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if err := srv.Shutdown(ctx); err != nil {
|
||||
logger.Fatalf("❌ Server forced to shutdown: %v", err)
|
||||
}
|
||||
|
||||
logger.Println("✅ Server exited")
|
||||
}
|
||||
13
proxy/go.mod
Normal file
13
proxy/go.mod
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
module github.com/seifghazi/claude-code-monitor
|
||||
|
||||
go 1.20
|
||||
|
||||
require (
|
||||
github.com/gorilla/handlers v1.5.2
|
||||
github.com/gorilla/mux v1.8.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/felixge/httpsnoop v1.0.3 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.28 // indirect
|
||||
)
|
||||
8
proxy/go.sum
Normal file
8
proxy/go.sum
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
|
||||
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE=
|
||||
github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w=
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEum7A=
|
||||
github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
87
proxy/internal/config/config.go
Normal file
87
proxy/internal/config/config.go
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Server ServerConfig
|
||||
Anthropic AnthropicConfig
|
||||
Storage StorageConfig
|
||||
}
|
||||
|
||||
type ServerConfig struct {
|
||||
Port string
|
||||
ReadTimeout time.Duration
|
||||
WriteTimeout time.Duration
|
||||
IdleTimeout time.Duration
|
||||
}
|
||||
|
||||
type AnthropicConfig struct {
|
||||
BaseURL string
|
||||
Version string
|
||||
MaxRetries int
|
||||
}
|
||||
|
||||
type StorageConfig struct {
|
||||
RequestsDir string
|
||||
DBPath string
|
||||
}
|
||||
|
||||
func Load() (*Config, error) {
|
||||
cfg := &Config{
|
||||
Server: ServerConfig{
|
||||
Port: getEnv("PORT", "3001"),
|
||||
ReadTimeout: getDuration("READ_TIMEOUT", 500*time.Second),
|
||||
WriteTimeout: getDuration("WRITE_TIMEOUT", 500*time.Second),
|
||||
IdleTimeout: getDuration("IDLE_TIMEOUT", 500*time.Second),
|
||||
},
|
||||
Anthropic: AnthropicConfig{
|
||||
BaseURL: getEnv("ANTHROPIC_FORWARD_URL", "https://api.anthropic.com"),
|
||||
Version: getEnv("ANTHROPIC_VERSION", "2023-06-01"),
|
||||
MaxRetries: getInt("ANTHROPIC_MAX_RETRIES", 3),
|
||||
},
|
||||
Storage: StorageConfig{
|
||||
DBPath: getEnv("DB_PATH", "requests.db"),
|
||||
},
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func getEnv(key, defaultValue string) string {
|
||||
if value := os.Getenv(key); value != "" {
|
||||
return value
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
func getDuration(key string, defaultValue time.Duration) time.Duration {
|
||||
value := os.Getenv(key)
|
||||
if value == "" {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
duration, err := time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
return duration
|
||||
}
|
||||
|
||||
func getInt(key string, defaultValue int) int {
|
||||
value := os.Getenv(key)
|
||||
if value == "" {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
intValue, err := strconv.Atoi(value)
|
||||
if err != nil {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
return intValue
|
||||
}
|
||||
689
proxy/internal/handler/handlers.go
Normal file
689
proxy/internal/handler/handlers.go
Normal file
|
|
@ -0,0 +1,689 @@
|
|||
package handler
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto/rand"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
|
||||
"github.com/seifghazi/claude-code-monitor/internal/model"
|
||||
"github.com/seifghazi/claude-code-monitor/internal/service"
|
||||
)
|
||||
|
||||
type Handler struct {
|
||||
anthropicService service.AnthropicService
|
||||
storageService service.StorageService
|
||||
conversationService service.ConversationService
|
||||
}
|
||||
|
||||
func New(anthropicService service.AnthropicService, storageService service.StorageService, logger *log.Logger) *Handler {
|
||||
conversationService := service.NewConversationService()
|
||||
|
||||
return &Handler{
|
||||
anthropicService: anthropicService,
|
||||
storageService: storageService,
|
||||
conversationService: conversationService,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) ChatCompletions(w http.ResponseWriter, r *http.Request) {
|
||||
log.Println("🤖 Chat completion request received (OpenAI format)")
|
||||
|
||||
bodyBytes := getBodyBytes(r)
|
||||
if bodyBytes == nil {
|
||||
http.Error(w, "Error reading request body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var req model.ChatCompletionRequest
|
||||
if err := json.Unmarshal(bodyBytes, &req); err != nil {
|
||||
log.Printf("❌ Error parsing JSON: %v", err)
|
||||
writeErrorResponse(w, "Invalid JSON", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
requestID := generateRequestID()
|
||||
startTime := time.Now()
|
||||
|
||||
requestLog := &model.RequestLog{
|
||||
RequestID: requestID,
|
||||
Timestamp: time.Now().Format(time.RFC3339),
|
||||
Method: r.Method,
|
||||
Endpoint: "/v1/chat/completions",
|
||||
Headers: SanitizeHeaders(r.Header),
|
||||
Body: req,
|
||||
Model: req.Model,
|
||||
UserAgent: r.Header.Get("User-Agent"),
|
||||
ContentType: r.Header.Get("Content-Type"),
|
||||
}
|
||||
|
||||
if _, err := h.storageService.SaveRequest(requestLog); err != nil {
|
||||
log.Printf("❌ Error saving request: %v", err)
|
||||
}
|
||||
|
||||
response := &model.ChatCompletionResponse{
|
||||
ID: fmt.Sprintf("chatcmpl-%d", time.Now().UnixNano()),
|
||||
Object: "chat.completion",
|
||||
Created: time.Now().Unix(),
|
||||
Model: req.Model,
|
||||
Choices: []model.Choice{
|
||||
{
|
||||
Index: 0,
|
||||
Message: model.ChatMessage{
|
||||
Role: "assistant",
|
||||
Content: "Hello! This is a test response from the refactored proxy server.",
|
||||
},
|
||||
FinishReason: "stop",
|
||||
},
|
||||
},
|
||||
Usage: model.Usage{
|
||||
PromptTokens: 10,
|
||||
CompletionTokens: 20,
|
||||
TotalTokens: 30,
|
||||
},
|
||||
}
|
||||
|
||||
if req.Model == "" {
|
||||
response.Model = "claude-3-sonnet"
|
||||
}
|
||||
|
||||
responseLog := &model.ResponseLog{
|
||||
StatusCode: http.StatusOK,
|
||||
Headers: SanitizeHeaders(w.Header()),
|
||||
Body: response,
|
||||
ResponseTime: time.Since(startTime).Milliseconds(),
|
||||
IsStreaming: false,
|
||||
}
|
||||
|
||||
// The requestLog object has the conversation details.
|
||||
// We need to set the response on it and then save the update.
|
||||
requestLog.Response = responseLog
|
||||
if err := h.storageService.UpdateRequestWithResponse(requestLog); err != nil {
|
||||
log.Printf("❌ Error updating request with response: %v", err)
|
||||
}
|
||||
|
||||
writeJSONResponse(w, response)
|
||||
}
|
||||
|
||||
func (h *Handler) Messages(w http.ResponseWriter, r *http.Request) {
|
||||
log.Println("🤖 Messages request received (Anthropic format)")
|
||||
|
||||
bodyBytes := getBodyBytes(r)
|
||||
if bodyBytes == nil {
|
||||
http.Error(w, "Error reading request body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var req model.AnthropicRequest
|
||||
if err := json.Unmarshal(bodyBytes, &req); err != nil {
|
||||
log.Printf("❌ Error parsing JSON: %v", err)
|
||||
writeErrorResponse(w, "Invalid JSON", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Extract API key from incoming request headers
|
||||
apiKey := r.Header.Get("x-api-key")
|
||||
if apiKey == "" {
|
||||
// Also check for X-Api-Key (capitalized version)
|
||||
apiKey = r.Header.Get("X-Api-Key")
|
||||
}
|
||||
if apiKey == "" {
|
||||
log.Println("❌ No API key provided in request headers")
|
||||
writeErrorResponse(w, "API key required in x-api-key header", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
requestID := generateRequestID()
|
||||
startTime := time.Now()
|
||||
|
||||
// Create request log
|
||||
requestLog := &model.RequestLog{
|
||||
RequestID: requestID,
|
||||
Timestamp: time.Now().Format(time.RFC3339),
|
||||
Method: r.Method,
|
||||
Endpoint: "/v1/messages",
|
||||
Headers: SanitizeHeaders(r.Header),
|
||||
Body: req,
|
||||
Model: req.Model,
|
||||
UserAgent: r.Header.Get("User-Agent"),
|
||||
ContentType: r.Header.Get("Content-Type"),
|
||||
}
|
||||
|
||||
if _, err := h.storageService.SaveRequest(requestLog); err != nil {
|
||||
log.Printf("❌ Error saving request: %v", err)
|
||||
}
|
||||
|
||||
// Forward the request to Anthropic
|
||||
resp, err := h.anthropicService.ForwardRequest(r.Context(), &req, apiKey)
|
||||
if err != nil {
|
||||
log.Printf("❌ Error forwarding to Anthropic API: %v", err)
|
||||
writeErrorResponse(w, "Failed to forward request", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if req.Stream {
|
||||
h.handleStreamingResponse(w, resp, requestLog, startTime)
|
||||
return
|
||||
}
|
||||
|
||||
h.handleNonStreamingResponse(w, resp, requestLog, startTime)
|
||||
}
|
||||
|
||||
func (h *Handler) Models(w http.ResponseWriter, r *http.Request) {
|
||||
log.Println("📋 Models list requested")
|
||||
|
||||
response := &model.ModelsResponse{
|
||||
Object: "list",
|
||||
Data: []model.ModelInfo{
|
||||
{
|
||||
ID: "claude-3-sonnet-20240229",
|
||||
Object: "model",
|
||||
Created: 1677610602,
|
||||
OwnedBy: "anthropic",
|
||||
},
|
||||
{
|
||||
ID: "claude-3-opus-20240229",
|
||||
Object: "model",
|
||||
Created: 1677610602,
|
||||
OwnedBy: "anthropic",
|
||||
},
|
||||
{
|
||||
ID: "claude-3-haiku-20240307",
|
||||
Object: "model",
|
||||
Created: 1677610602,
|
||||
OwnedBy: "anthropic",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
writeJSONResponse(w, response)
|
||||
}
|
||||
|
||||
func (h *Handler) Health(w http.ResponseWriter, r *http.Request) {
|
||||
response := &model.HealthResponse{
|
||||
Status: "healthy",
|
||||
Timestamp: time.Now(),
|
||||
}
|
||||
|
||||
writeJSONResponse(w, response)
|
||||
}
|
||||
|
||||
func (h *Handler) UI(w http.ResponseWriter, r *http.Request) {
|
||||
htmlContent, err := os.ReadFile("index.html")
|
||||
if err != nil {
|
||||
log.Printf("❌ Error reading index.html: %v", err)
|
||||
http.Error(w, "UI not available", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/html")
|
||||
w.Write(htmlContent)
|
||||
}
|
||||
|
||||
func (h *Handler) GetRequests(w http.ResponseWriter, r *http.Request) {
|
||||
page, _ := strconv.Atoi(r.URL.Query().Get("page"))
|
||||
if page < 1 {
|
||||
page = 1
|
||||
}
|
||||
|
||||
limit, _ := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
if limit <= 0 {
|
||||
limit = 10 // Default limit
|
||||
}
|
||||
|
||||
// Get model filter from query parameters
|
||||
modelFilter := r.URL.Query().Get("model")
|
||||
if modelFilter == "" {
|
||||
modelFilter = "all"
|
||||
}
|
||||
|
||||
log.Printf("📊 GetRequests called - page: %d, limit: %d, modelFilter: %s", page, limit, modelFilter)
|
||||
|
||||
// Get all requests with model filter applied at storage level
|
||||
allRequests, err := h.storageService.GetAllRequests(modelFilter)
|
||||
if err != nil {
|
||||
log.Printf("Error getting requests: %v", err)
|
||||
http.Error(w, "Failed to get requests", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("📊 Got %d requests from storage (filter: %s)", len(allRequests), modelFilter)
|
||||
|
||||
// Convert pointers to values for consistency
|
||||
requests := make([]model.RequestLog, len(allRequests))
|
||||
for i, req := range allRequests {
|
||||
if req != nil {
|
||||
requests[i] = *req
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate total before pagination
|
||||
total := len(requests)
|
||||
|
||||
// Apply pagination
|
||||
start := (page - 1) * limit
|
||||
end := start + limit
|
||||
if start >= len(requests) {
|
||||
requests = []model.RequestLog{}
|
||||
} else {
|
||||
if end > len(requests) {
|
||||
end = len(requests)
|
||||
}
|
||||
requests = requests[start:end]
|
||||
}
|
||||
|
||||
log.Printf("📊 Returning %d requests after pagination", len(requests))
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(struct {
|
||||
Requests []model.RequestLog `json:"requests"`
|
||||
Total int `json:"total"`
|
||||
}{
|
||||
Requests: requests,
|
||||
Total: total,
|
||||
})
|
||||
}
|
||||
|
||||
func (h *Handler) DeleteRequests(w http.ResponseWriter, r *http.Request) {
|
||||
log.Println("🗑️ Clearing request history")
|
||||
|
||||
clearedCount, err := h.storageService.ClearRequests()
|
||||
if err != nil {
|
||||
log.Printf("❌ Error clearing requests: %v", err)
|
||||
writeErrorResponse(w, "Error clearing request history", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("✅ Deleted %d request files", clearedCount)
|
||||
|
||||
response := map[string]interface{}{
|
||||
"message": "Request history cleared",
|
||||
"deleted": clearedCount,
|
||||
}
|
||||
|
||||
writeJSONResponse(w, response)
|
||||
}
|
||||
|
||||
func (h *Handler) NotFound(w http.ResponseWriter, r *http.Request) {
|
||||
writeErrorResponse(w, "Not found", http.StatusNotFound)
|
||||
}
|
||||
|
||||
func (h *Handler) handleStreamingResponse(w http.ResponseWriter, resp *http.Response, requestLog *model.RequestLog, startTime time.Time) {
|
||||
log.Println("🌊 Streaming response detected, forwarding stream...")
|
||||
|
||||
w.Header().Set("Content-Type", "text/event-stream")
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
w.Header().Set("Connection", "keep-alive")
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
log.Printf("❌ Anthropic API error: %d", resp.StatusCode)
|
||||
errorBytes, _ := io.ReadAll(resp.Body)
|
||||
log.Printf("Error details: %s", string(errorBytes))
|
||||
|
||||
responseLog := &model.ResponseLog{
|
||||
StatusCode: resp.StatusCode,
|
||||
Headers: SanitizeHeaders(resp.Header),
|
||||
BodyText: string(errorBytes),
|
||||
ResponseTime: time.Since(startTime).Milliseconds(),
|
||||
IsStreaming: true,
|
||||
CompletedAt: time.Now().Format(time.RFC3339),
|
||||
}
|
||||
|
||||
requestLog.Response = responseLog
|
||||
if err := h.storageService.UpdateRequestWithResponse(requestLog); err != nil {
|
||||
log.Printf("❌ Error updating request with error response: %v", err)
|
||||
}
|
||||
|
||||
w.WriteHeader(resp.StatusCode)
|
||||
w.Write(errorBytes)
|
||||
return
|
||||
}
|
||||
|
||||
var fullResponseText strings.Builder
|
||||
var toolCalls []model.ContentBlock
|
||||
var streamingChunks []string
|
||||
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if line == "" || !strings.HasPrefix(line, "data:") {
|
||||
continue
|
||||
}
|
||||
|
||||
streamingChunks = append(streamingChunks, line)
|
||||
fmt.Fprintf(w, "%s\n\n", line)
|
||||
if f, ok := w.(http.Flusher); ok {
|
||||
f.Flush()
|
||||
}
|
||||
|
||||
jsonData := strings.TrimPrefix(line, "data: ")
|
||||
var event model.StreamingEvent
|
||||
if err := json.Unmarshal([]byte(jsonData), &event); err != nil {
|
||||
log.Printf("⚠️ Error unmarshalling streaming event: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
switch event.Type {
|
||||
case "content_block_delta":
|
||||
if event.Delta != nil {
|
||||
if event.Delta.Type == "text_delta" {
|
||||
fullResponseText.WriteString(event.Delta.Text)
|
||||
} else if event.Delta.Type == "input_json_delta" {
|
||||
if event.Index != nil && *event.Index < len(toolCalls) {
|
||||
toolCalls[*event.Index].Input = append(toolCalls[*event.Index].Input, event.Delta.Input...)
|
||||
}
|
||||
}
|
||||
}
|
||||
case "content_block_start":
|
||||
if event.ContentBlock != nil && event.ContentBlock.Type == "tool_use" {
|
||||
toolCalls = append(toolCalls, *event.ContentBlock)
|
||||
}
|
||||
case "message_stop":
|
||||
// End of stream
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
responseLog := &model.ResponseLog{
|
||||
StatusCode: resp.StatusCode,
|
||||
Headers: SanitizeHeaders(resp.Header),
|
||||
StreamingChunks: streamingChunks,
|
||||
ResponseTime: time.Since(startTime).Milliseconds(),
|
||||
IsStreaming: true,
|
||||
CompletedAt: time.Now().Format(time.RFC3339),
|
||||
}
|
||||
|
||||
// Create a structured body for the log
|
||||
var responseBody model.AnthropicMessage
|
||||
responseBody.Role = "assistant"
|
||||
var contentBlocks []model.ContentBlock
|
||||
if fullResponseText.Len() > 0 {
|
||||
contentBlocks = append(contentBlocks, model.ContentBlock{
|
||||
Type: "text",
|
||||
Text: fullResponseText.String(),
|
||||
})
|
||||
}
|
||||
contentBlocks = append(contentBlocks, toolCalls...)
|
||||
responseBody.Content = contentBlocks
|
||||
responseLog.Body = responseBody
|
||||
|
||||
requestLog.Response = responseLog
|
||||
if err := h.storageService.UpdateRequestWithResponse(requestLog); err != nil {
|
||||
log.Printf("❌ Error updating request with streaming response: %v", err)
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
log.Printf("❌ Streaming error: %v", err)
|
||||
} else {
|
||||
log.Println("✅ Streaming response completed")
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handler) handleNonStreamingResponse(w http.ResponseWriter, resp *http.Response, requestLog *model.RequestLog, startTime time.Time) {
|
||||
responseBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
log.Printf("❌ Error reading Anthropic response: %v", err)
|
||||
writeErrorResponse(w, "Failed to read response", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
responseLog := &model.ResponseLog{
|
||||
StatusCode: resp.StatusCode,
|
||||
Headers: SanitizeHeaders(resp.Header),
|
||||
BodyText: string(responseBytes),
|
||||
ResponseTime: time.Since(startTime).Milliseconds(),
|
||||
IsStreaming: false,
|
||||
CompletedAt: time.Now().Format(time.RFC3339),
|
||||
}
|
||||
|
||||
// Try to parse as JSON for structured logging
|
||||
if resp.Header.Get("Content-Type") == "application/json" {
|
||||
var jsonBody interface{}
|
||||
if json.Unmarshal(responseBytes, &jsonBody) == nil {
|
||||
responseLog.Body = jsonBody
|
||||
}
|
||||
}
|
||||
|
||||
requestLog.Response = responseLog
|
||||
if err := h.storageService.UpdateRequestWithResponse(requestLog); err != nil {
|
||||
log.Printf("❌ Error updating request with response: %v", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
log.Printf("❌ Anthropic API error: %d %s", resp.StatusCode, string(responseBytes))
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(resp.StatusCode)
|
||||
w.Write(responseBytes)
|
||||
return
|
||||
}
|
||||
|
||||
log.Println("✅ Successfully forwarded request to Anthropic API")
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write(responseBytes)
|
||||
}
|
||||
|
||||
func generateRequestID() string {
|
||||
bytes := make([]byte, 8)
|
||||
rand.Read(bytes)
|
||||
return hex.EncodeToString(bytes)
|
||||
}
|
||||
|
||||
func getBodyBytes(r *http.Request) []byte {
|
||||
if bodyBytes, ok := r.Context().Value(model.BodyBytesKey).([]byte); ok {
|
||||
return bodyBytes
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeJSONResponse(w http.ResponseWriter, data interface{}) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if err := json.NewEncoder(w).Encode(data); err != nil {
|
||||
log.Printf("❌ Error encoding JSON response: %v", err)
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func writeErrorResponse(w http.ResponseWriter, message string, statusCode int) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(statusCode)
|
||||
json.NewEncoder(w).Encode(&model.ErrorResponse{Error: message})
|
||||
}
|
||||
|
||||
// extractTextFromMessage tries multiple strategies to extract text from a message
|
||||
func extractTextFromMessage(message json.RawMessage) string {
|
||||
// Strategy 1: Direct string (simple text message)
|
||||
var directString string
|
||||
if err := json.Unmarshal(message, &directString); err == nil && directString != "" {
|
||||
return directString
|
||||
}
|
||||
|
||||
// Strategy 2: Array format [{"type": "text", "text": "..."}]
|
||||
var msgArray []interface{}
|
||||
if err := json.Unmarshal(message, &msgArray); err == nil {
|
||||
for _, item := range msgArray {
|
||||
if itemMap, ok := item.(map[string]interface{}); ok {
|
||||
if itemMap["type"] == "text" {
|
||||
if text, ok := itemMap["text"].(string); ok && text != "" {
|
||||
return text
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Strategy 3: Content object format {"content": [{"type": "text", "text": "..."}]}
|
||||
var msgContent map[string]interface{}
|
||||
if err := json.Unmarshal(message, &msgContent); err == nil {
|
||||
if content, ok := msgContent["content"]; ok {
|
||||
if contentArray, ok := content.([]interface{}); ok {
|
||||
for _, block := range contentArray {
|
||||
if blockMap, ok := block.(map[string]interface{}); ok {
|
||||
if blockMap["type"] == "text" {
|
||||
if text, ok := blockMap["text"].(string); ok && text != "" {
|
||||
return text
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also check if content is a string directly
|
||||
if contentStr, ok := msgContent["content"].(string); ok && contentStr != "" {
|
||||
return contentStr
|
||||
}
|
||||
}
|
||||
|
||||
// Strategy 4: Single object with text field {"type": "text", "text": "..."}
|
||||
var singleObj map[string]interface{}
|
||||
if err := json.Unmarshal(message, &singleObj); err == nil {
|
||||
if singleObj["type"] == "text" {
|
||||
if text, ok := singleObj["text"].(string); ok && text != "" {
|
||||
return text
|
||||
}
|
||||
}
|
||||
|
||||
// Also check for content field at top level
|
||||
if text, ok := singleObj["content"].(string); ok && text != "" {
|
||||
return text
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// Conversation handlers
|
||||
|
||||
func (h *Handler) GetConversations(w http.ResponseWriter, r *http.Request) {
|
||||
log.Println("📚 Getting conversations from Claude projects")
|
||||
|
||||
conversations, err := h.conversationService.GetConversations()
|
||||
if err != nil {
|
||||
log.Printf("❌ Error getting conversations: %v", err)
|
||||
writeErrorResponse(w, "Failed to get conversations", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Flatten all conversations into a single array for the UI
|
||||
var allConversations []map[string]interface{}
|
||||
for _, convs := range conversations {
|
||||
for _, conv := range convs {
|
||||
// Extract first user message from the conversation
|
||||
var firstMessage string
|
||||
for _, msg := range conv.Messages {
|
||||
if msg.Type == "user" {
|
||||
// Try multiple parsing strategies
|
||||
text := extractTextFromMessage(msg.Message)
|
||||
if text != "" {
|
||||
firstMessage = text
|
||||
if len(firstMessage) > 200 {
|
||||
firstMessage = firstMessage[:200] + "..."
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
allConversations = append(allConversations, map[string]interface{}{
|
||||
"id": conv.SessionID,
|
||||
"requestCount": conv.MessageCount,
|
||||
"startTime": conv.StartTime.Format(time.RFC3339),
|
||||
"lastActivity": conv.EndTime.Format(time.RFC3339),
|
||||
"duration": conv.EndTime.Sub(conv.StartTime).Milliseconds(),
|
||||
"firstMessage": firstMessage,
|
||||
"projectName": conv.ProjectName,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by last activity (newest first)
|
||||
sort.Slice(allConversations, func(i, j int) bool {
|
||||
t1, _ := time.Parse(time.RFC3339, allConversations[i]["lastActivity"].(string))
|
||||
t2, _ := time.Parse(time.RFC3339, allConversations[j]["lastActivity"].(string))
|
||||
return t1.After(t2)
|
||||
})
|
||||
|
||||
// Apply pagination
|
||||
page, _ := strconv.Atoi(r.URL.Query().Get("page"))
|
||||
if page < 1 {
|
||||
page = 1
|
||||
}
|
||||
limit, _ := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
if limit <= 0 {
|
||||
limit = 10
|
||||
}
|
||||
|
||||
start := (page - 1) * limit
|
||||
end := start + limit
|
||||
if start > len(allConversations) {
|
||||
allConversations = []map[string]interface{}{}
|
||||
} else {
|
||||
if end > len(allConversations) {
|
||||
end = len(allConversations)
|
||||
}
|
||||
allConversations = allConversations[start:end]
|
||||
}
|
||||
|
||||
response := map[string]interface{}{
|
||||
"conversations": allConversations,
|
||||
}
|
||||
|
||||
writeJSONResponse(w, response)
|
||||
}
|
||||
|
||||
func (h *Handler) GetConversationByID(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
sessionID, ok := vars["id"]
|
||||
if !ok {
|
||||
http.Error(w, "Session ID is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
projectPath := r.URL.Query().Get("project")
|
||||
if projectPath == "" {
|
||||
http.Error(w, "Project path is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("📖 Getting conversation %s from project %s", sessionID, projectPath)
|
||||
|
||||
conversation, err := h.conversationService.GetConversation(projectPath, sessionID)
|
||||
if err != nil {
|
||||
log.Printf("❌ Error getting conversation: %v", err)
|
||||
http.Error(w, "Conversation not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
writeJSONResponse(w, conversation)
|
||||
}
|
||||
|
||||
func (h *Handler) GetConversationsByProject(w http.ResponseWriter, r *http.Request) {
|
||||
projectPath := r.URL.Query().Get("project")
|
||||
if projectPath == "" {
|
||||
http.Error(w, "Project path is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("📁 Getting conversations for project %s", projectPath)
|
||||
|
||||
conversations, err := h.conversationService.GetConversationsByProject(projectPath)
|
||||
if err != nil {
|
||||
log.Printf("❌ Error getting project conversations: %v", err)
|
||||
writeErrorResponse(w, "Failed to get project conversations", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
writeJSONResponse(w, conversations)
|
||||
}
|
||||
265
proxy/internal/handler/utils.go
Normal file
265
proxy/internal/handler/utils.go
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
package handler
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/seifghazi/claude-code-monitor/internal/model"
|
||||
)
|
||||
|
||||
// SanitizeHeaders removes sensitive headers before logging/storage
|
||||
func SanitizeHeaders(headers http.Header) http.Header {
|
||||
sanitized := make(http.Header)
|
||||
|
||||
sensitiveHeaders := []string{
|
||||
"x-api-key",
|
||||
"api-key",
|
||||
"authorization",
|
||||
"anthropic-api-key",
|
||||
"openai-api-key",
|
||||
"bearer",
|
||||
}
|
||||
|
||||
for key, values := range headers {
|
||||
lowerKey := strings.ToLower(key)
|
||||
isSensitive := false
|
||||
|
||||
for _, sensitive := range sensitiveHeaders {
|
||||
if strings.Contains(lowerKey, sensitive) {
|
||||
isSensitive = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if isSensitive {
|
||||
sanitized[key] = []string{"[REDACTED]"}
|
||||
} else {
|
||||
sanitized[key] = values
|
||||
}
|
||||
}
|
||||
|
||||
return sanitized
|
||||
}
|
||||
|
||||
// ConversationDiffAnalyzer analyzes conversation flows to identify new vs repeated content
|
||||
type ConversationDiffAnalyzer struct{}
|
||||
|
||||
// NewConversationDiffAnalyzer creates a new conversation diff analyzer
|
||||
func NewConversationDiffAnalyzer() *ConversationDiffAnalyzer {
|
||||
return &ConversationDiffAnalyzer{}
|
||||
}
|
||||
|
||||
// ConversationFlowData represents the flow analysis of a conversation
|
||||
type ConversationFlowData struct {
|
||||
TotalMessages int `json:"totalMessages"`
|
||||
NewMessages []int `json:"newMessages"` // Indices of new messages
|
||||
DuplicateMessages []int `json:"duplicateMessages"` // Indices of duplicate messages
|
||||
MessageHashes []string `json:"messageHashes"` // Content hashes for deduplication
|
||||
ConversationHash string `json:"conversationHash"` // Hash of entire conversation
|
||||
PreviousHash string `json:"previousHash"` // Hash of previous conversation state
|
||||
Changes []ConversationChange `json:"changes"` // Detailed changes
|
||||
FlowMetadata map[string]interface{} `json:"flowMetadata"` // Additional metadata
|
||||
}
|
||||
|
||||
// ConversationChange represents a specific change in the conversation
|
||||
type ConversationChange struct {
|
||||
Type string `json:"type"` // "added", "modified", "context"
|
||||
MessageIdx int `json:"messageIdx"` // Index of the message
|
||||
Role string `json:"role"` // Role of the message
|
||||
ContentHash string `json:"contentHash"` // Hash of the content
|
||||
Preview string `json:"preview"` // Short preview of content
|
||||
Timestamp string `json:"timestamp"` // When this change was detected
|
||||
}
|
||||
|
||||
// AnalyzeConversationFlow analyzes a conversation to identify what's new vs repeated
|
||||
func (c *ConversationDiffAnalyzer) AnalyzeConversationFlow(messages []model.AnthropicMessage, previousConversation []model.AnthropicMessage) *ConversationFlowData {
|
||||
totalMessages := len(messages)
|
||||
|
||||
// Create hashes for current conversation
|
||||
currentHashes := make([]string, totalMessages)
|
||||
for i, msg := range messages {
|
||||
currentHashes[i] = c.hashMessage(msg)
|
||||
}
|
||||
|
||||
// Create hashes for previous conversation (if any)
|
||||
var previousHashes []string
|
||||
if previousConversation != nil {
|
||||
previousHashes = make([]string, len(previousConversation))
|
||||
for i, msg := range previousConversation {
|
||||
previousHashes[i] = c.hashMessage(msg)
|
||||
}
|
||||
}
|
||||
|
||||
// Identify new vs duplicate messages
|
||||
newMessages := []int{}
|
||||
duplicateMessages := []int{}
|
||||
changes := []ConversationChange{}
|
||||
|
||||
// Simple approach: messages that appear after the previous conversation length are new
|
||||
previousLength := len(previousHashes)
|
||||
|
||||
for i, msg := range messages {
|
||||
isNew := i >= previousLength
|
||||
|
||||
// More sophisticated check: compare hashes
|
||||
if !isNew && i < len(previousHashes) {
|
||||
isNew = currentHashes[i] != previousHashes[i]
|
||||
}
|
||||
|
||||
if isNew {
|
||||
newMessages = append(newMessages, i)
|
||||
changes = append(changes, ConversationChange{
|
||||
Type: "added",
|
||||
MessageIdx: i,
|
||||
Role: msg.Role,
|
||||
ContentHash: currentHashes[i],
|
||||
Preview: c.getMessagePreview(msg),
|
||||
Timestamp: fmt.Sprintf("%d", time.Now().Unix()),
|
||||
})
|
||||
} else {
|
||||
duplicateMessages = append(duplicateMessages, i)
|
||||
changes = append(changes, ConversationChange{
|
||||
Type: "context",
|
||||
MessageIdx: i,
|
||||
Role: msg.Role,
|
||||
ContentHash: currentHashes[i],
|
||||
Preview: c.getMessagePreview(msg),
|
||||
Timestamp: fmt.Sprintf("%d", time.Now().Unix()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// If no previous conversation, consider a reasonable threshold of "new" vs "context"
|
||||
if previousConversation == nil && totalMessages > 1 {
|
||||
// Heuristic: last 30% of messages are "new", rest is context
|
||||
newThreshold := max(1, int(float64(totalMessages)*0.3))
|
||||
contextEnd := totalMessages - newThreshold
|
||||
|
||||
newMessages = []int{}
|
||||
duplicateMessages = []int{}
|
||||
changes = []ConversationChange{}
|
||||
|
||||
for i := 0; i < totalMessages; i++ {
|
||||
if i >= contextEnd {
|
||||
newMessages = append(newMessages, i)
|
||||
changes = append(changes, ConversationChange{
|
||||
Type: "added",
|
||||
MessageIdx: i,
|
||||
Role: messages[i].Role,
|
||||
ContentHash: currentHashes[i],
|
||||
Preview: c.getMessagePreview(messages[i]),
|
||||
Timestamp: fmt.Sprintf("%d", time.Now().Unix()),
|
||||
})
|
||||
} else {
|
||||
duplicateMessages = append(duplicateMessages, i)
|
||||
changes = append(changes, ConversationChange{
|
||||
Type: "context",
|
||||
MessageIdx: i,
|
||||
Role: messages[i].Role,
|
||||
ContentHash: currentHashes[i],
|
||||
Preview: c.getMessagePreview(messages[i]),
|
||||
Timestamp: fmt.Sprintf("%d", time.Now().Unix()),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate conversation hashes
|
||||
conversationHash := c.hashConversation(messages)
|
||||
previousHash := ""
|
||||
if previousConversation != nil {
|
||||
previousHash = c.hashConversation(previousConversation)
|
||||
}
|
||||
|
||||
return &ConversationFlowData{
|
||||
TotalMessages: totalMessages,
|
||||
NewMessages: newMessages,
|
||||
DuplicateMessages: duplicateMessages,
|
||||
MessageHashes: currentHashes,
|
||||
ConversationHash: conversationHash,
|
||||
PreviousHash: previousHash,
|
||||
Changes: changes,
|
||||
FlowMetadata: map[string]interface{}{
|
||||
"newCount": len(newMessages),
|
||||
"duplicateCount": len(duplicateMessages),
|
||||
"analyzeTime": time.Now().Format(time.RFC3339),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// hashMessage creates a hash of a message for deduplication
|
||||
func (c *ConversationDiffAnalyzer) hashMessage(msg model.AnthropicMessage) string {
|
||||
// Create a stable representation of the message
|
||||
content := c.normalizeMessageContent(msg.Content)
|
||||
data := fmt.Sprintf("%s|%s", msg.Role, content)
|
||||
|
||||
hash := sha256.Sum256([]byte(data))
|
||||
return fmt.Sprintf("%x", hash[:8]) // Use first 8 bytes for shorter hash
|
||||
}
|
||||
|
||||
// hashConversation creates a hash of the entire conversation
|
||||
func (c *ConversationDiffAnalyzer) hashConversation(messages []model.AnthropicMessage) string {
|
||||
var parts []string
|
||||
for _, msg := range messages {
|
||||
parts = append(parts, c.hashMessage(msg))
|
||||
}
|
||||
|
||||
conversationData := strings.Join(parts, "|")
|
||||
hash := sha256.Sum256([]byte(conversationData))
|
||||
return fmt.Sprintf("%x", hash[:16]) // Use first 16 bytes for conversation hash
|
||||
}
|
||||
|
||||
// normalizeMessageContent converts message content to a normalized string
|
||||
func (c *ConversationDiffAnalyzer) normalizeMessageContent(content interface{}) string {
|
||||
switch v := content.(type) {
|
||||
case string:
|
||||
return strings.TrimSpace(v)
|
||||
case []interface{}:
|
||||
var parts []string
|
||||
for _, item := range v {
|
||||
if block, ok := item.(map[string]interface{}); ok {
|
||||
if text, hasText := block["text"].(string); hasText {
|
||||
parts = append(parts, strings.TrimSpace(text))
|
||||
} else if blockType, hasType := block["type"].(string); hasType {
|
||||
// Handle different content types (tool_use, etc.)
|
||||
switch blockType {
|
||||
case "tool_use":
|
||||
if name, hasName := block["name"].(string); hasName {
|
||||
parts = append(parts, fmt.Sprintf("TOOL:%s", name))
|
||||
}
|
||||
case "tool_result":
|
||||
parts = append(parts, "TOOL_RESULT")
|
||||
default:
|
||||
parts = append(parts, fmt.Sprintf("CONTENT:%s", blockType))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return strings.Join(parts, " ")
|
||||
default:
|
||||
// Convert to JSON and back for normalization
|
||||
jsonBytes, _ := json.Marshal(content)
|
||||
return string(jsonBytes)
|
||||
}
|
||||
}
|
||||
|
||||
// getMessagePreview creates a short preview of a message
|
||||
func (c *ConversationDiffAnalyzer) getMessagePreview(msg model.AnthropicMessage) string {
|
||||
content := c.normalizeMessageContent(msg.Content)
|
||||
if len(content) > 100 {
|
||||
return content[:100] + "..."
|
||||
}
|
||||
return content
|
||||
}
|
||||
|
||||
// max returns the maximum of two integers
|
||||
func max(a, b int) int {
|
||||
if a > b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
99
proxy/internal/middleware/logging.go
Normal file
99
proxy/internal/middleware/logging.go
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
package middleware
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/seifghazi/claude-code-monitor/internal/model"
|
||||
)
|
||||
|
||||
func Logging(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
start := time.Now()
|
||||
log.Printf("%s - %s %s", start.Format(time.RFC3339), r.Method, r.URL.Path)
|
||||
log.Printf("Headers: %s", formatHeaders(r.Header))
|
||||
|
||||
var bodyBytes []byte
|
||||
if r.Body != nil {
|
||||
var err error
|
||||
bodyBytes, err = io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
log.Printf("❌ Error reading request body: %v", err)
|
||||
http.Error(w, "Error reading request body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
r.Body.Close()
|
||||
r.Body = io.NopCloser(bytes.NewReader(bodyBytes))
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), model.BodyBytesKey, bodyBytes)
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
log.Printf("Body length: %d bytes", len(bodyBytes))
|
||||
if len(bodyBytes) > 0 {
|
||||
logRequestBody(bodyBytes)
|
||||
}
|
||||
log.Println("---")
|
||||
|
||||
wrapped := &responseWriter{ResponseWriter: w, statusCode: http.StatusOK}
|
||||
next.ServeHTTP(wrapped, r)
|
||||
|
||||
duration := time.Since(start)
|
||||
log.Printf("Response: %d %s (took %v)", wrapped.statusCode, http.StatusText(wrapped.statusCode), duration)
|
||||
})
|
||||
}
|
||||
|
||||
func formatHeaders(headers http.Header) string {
|
||||
headerMap := make(map[string][]string)
|
||||
for k, v := range headers {
|
||||
headerMap[k] = sanitizeHeaderValue(k, v)
|
||||
}
|
||||
headerBytes, _ := json.MarshalIndent(headerMap, "", " ")
|
||||
return string(headerBytes)
|
||||
}
|
||||
|
||||
func sanitizeHeaderValue(key string, values []string) []string {
|
||||
lowerKey := strings.ToLower(key)
|
||||
sensitiveHeaders := []string{
|
||||
"x-api-key",
|
||||
"api-key",
|
||||
"authorization",
|
||||
"anthropic-api-key",
|
||||
"openai-api-key",
|
||||
"bearer",
|
||||
}
|
||||
|
||||
for _, sensitive := range sensitiveHeaders {
|
||||
if strings.Contains(lowerKey, sensitive) {
|
||||
return []string{"[REDACTED]"}
|
||||
}
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
||||
func logRequestBody(bodyBytes []byte) {
|
||||
var bodyJSON interface{}
|
||||
if err := json.Unmarshal(bodyBytes, &bodyJSON); err == nil {
|
||||
bodyStr, _ := json.MarshalIndent(bodyJSON, "", " ")
|
||||
log.Printf("Body: %s", string(bodyStr))
|
||||
} else {
|
||||
log.Printf("❌ Failed to parse body as JSON: %v", err)
|
||||
log.Printf("Raw body: %s", string(bodyBytes))
|
||||
}
|
||||
}
|
||||
|
||||
type responseWriter struct {
|
||||
http.ResponseWriter
|
||||
statusCode int
|
||||
}
|
||||
|
||||
func (rw *responseWriter) WriteHeader(code int) {
|
||||
rw.statusCode = code
|
||||
rw.ResponseWriter.WriteHeader(code)
|
||||
}
|
||||
203
proxy/internal/model/models.go
Normal file
203
proxy/internal/model/models.go
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ContextKey string
|
||||
|
||||
const BodyBytesKey ContextKey = "bodyBytes"
|
||||
|
||||
type PromptGrade struct {
|
||||
Score int `json:"score"`
|
||||
MaxScore int `json:"maxScore"`
|
||||
Feedback string `json:"feedback"`
|
||||
ImprovedPrompt string `json:"improvedPrompt"`
|
||||
Criteria map[string]CriteriaScore `json:"criteria"`
|
||||
GradingTimestamp string `json:"gradingTimestamp"`
|
||||
IsProcessing bool `json:"isProcessing"`
|
||||
}
|
||||
|
||||
type CriteriaScore struct {
|
||||
Score int `json:"score"`
|
||||
Feedback string `json:"feedback"`
|
||||
}
|
||||
|
||||
type RequestLog struct {
|
||||
RequestID string `json:"requestId"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
Method string `json:"method"`
|
||||
Endpoint string `json:"endpoint"`
|
||||
Headers map[string][]string `json:"headers"`
|
||||
Body interface{} `json:"body"`
|
||||
Model string `json:"model,omitempty"`
|
||||
UserAgent string `json:"userAgent"`
|
||||
ContentType string `json:"contentType"`
|
||||
PromptGrade *PromptGrade `json:"promptGrade,omitempty"`
|
||||
Response *ResponseLog `json:"response,omitempty"`
|
||||
}
|
||||
|
||||
type ResponseLog struct {
|
||||
StatusCode int `json:"statusCode"`
|
||||
Headers map[string][]string `json:"headers"`
|
||||
Body interface{} `json:"body,omitempty"`
|
||||
BodyText string `json:"bodyText,omitempty"`
|
||||
ResponseTime int64 `json:"responseTime"`
|
||||
StreamingChunks []string `json:"streamingChunks,omitempty"`
|
||||
IsStreaming bool `json:"isStreaming"`
|
||||
CompletedAt string `json:"completedAt"`
|
||||
}
|
||||
|
||||
type ChatMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
type ChatCompletionRequest struct {
|
||||
Model string `json:"model"`
|
||||
Messages []ChatMessage `json:"messages"`
|
||||
Stream bool `json:"stream,omitempty"`
|
||||
}
|
||||
|
||||
type ChatCompletionResponse struct {
|
||||
ID string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
Created int64 `json:"created"`
|
||||
Model string `json:"model"`
|
||||
Choices []Choice `json:"choices"`
|
||||
Usage Usage `json:"usage"`
|
||||
}
|
||||
|
||||
type Choice struct {
|
||||
Index int `json:"index"`
|
||||
Message ChatMessage `json:"message"`
|
||||
FinishReason string `json:"finish_reason"`
|
||||
}
|
||||
|
||||
type Usage struct {
|
||||
PromptTokens int `json:"prompt_tokens"`
|
||||
CompletionTokens int `json:"completion_tokens"`
|
||||
TotalTokens int `json:"total_tokens"`
|
||||
}
|
||||
|
||||
type AnthropicContentBlock struct {
|
||||
Type string `json:"type"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
type AnthropicMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content interface{} `json:"content"`
|
||||
}
|
||||
|
||||
func (m *AnthropicMessage) GetContentBlocks() []AnthropicContentBlock {
|
||||
switch v := m.Content.(type) {
|
||||
case string:
|
||||
return []AnthropicContentBlock{{Type: "text", Text: v}}
|
||||
case []interface{}:
|
||||
var blocks []AnthropicContentBlock
|
||||
for _, item := range v {
|
||||
if block, ok := item.(map[string]interface{}); ok {
|
||||
if typ, hasType := block["type"].(string); hasType {
|
||||
if text, hasText := block["text"].(string); hasText {
|
||||
blocks = append(blocks, AnthropicContentBlock{Type: typ, Text: text})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return blocks
|
||||
case []AnthropicContentBlock:
|
||||
return v
|
||||
default:
|
||||
return []AnthropicContentBlock{}
|
||||
}
|
||||
}
|
||||
|
||||
type AnthropicSystemMessage struct {
|
||||
Text string `json:"text"`
|
||||
Type string `json:"type"`
|
||||
CacheControl *CacheControl `json:"cache_control,omitempty"`
|
||||
}
|
||||
|
||||
type CacheControl struct {
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
type Tool struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
InputSchema InputSchema `json:"input_schema"`
|
||||
}
|
||||
|
||||
type InputSchema struct {
|
||||
Type string `json:"type"`
|
||||
Properties map[string]Property `json:"properties"`
|
||||
Required []string `json:"required,omitempty"`
|
||||
}
|
||||
|
||||
type Property struct {
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
type AnthropicRequest struct {
|
||||
Model string `json:"model"`
|
||||
Messages []AnthropicMessage `json:"messages"`
|
||||
MaxTokens int `json:"max_tokens"`
|
||||
Temperature *float64 `json:"temperature,omitempty"`
|
||||
System []AnthropicSystemMessage `json:"system,omitempty"`
|
||||
Stream bool `json:"stream,omitempty"`
|
||||
Tools []Tool `json:"tools,omitempty"`
|
||||
}
|
||||
|
||||
type ModelsResponse struct {
|
||||
Object string `json:"object"`
|
||||
Data []ModelInfo `json:"data"`
|
||||
}
|
||||
|
||||
type ModelInfo struct {
|
||||
ID string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
Created int64 `json:"created"`
|
||||
OwnedBy string `json:"owned_by"`
|
||||
}
|
||||
|
||||
type GradeRequest struct {
|
||||
Messages []AnthropicMessage `json:"messages"`
|
||||
SystemMessages []AnthropicSystemMessage `json:"systemMessages"`
|
||||
RequestID string `json:"requestId,omitempty"`
|
||||
}
|
||||
|
||||
type HealthResponse struct {
|
||||
Status string `json:"status"`
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
}
|
||||
|
||||
type ErrorResponse struct {
|
||||
Error string `json:"error"`
|
||||
Details string `json:"details,omitempty"`
|
||||
}
|
||||
|
||||
|
||||
type StreamingEvent struct {
|
||||
Type string `json:"type"`
|
||||
Index *int `json:"index,omitempty"`
|
||||
Delta *Delta `json:"delta,omitempty"`
|
||||
ContentBlock *ContentBlock `json:"content_block,omitempty"`
|
||||
}
|
||||
|
||||
type Delta struct {
|
||||
Type string `json:"type,omitempty"`
|
||||
Text string `json:"text,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
Input json.RawMessage `json:"input,omitempty"`
|
||||
}
|
||||
|
||||
type ContentBlock struct {
|
||||
Type string `json:"type"`
|
||||
ID string `json:"id,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
Input json.RawMessage `json:"input,omitempty"`
|
||||
Text string `json:"text,omitempty"`
|
||||
}
|
||||
291
proxy/internal/service/anthropic.go
Normal file
291
proxy/internal/service/anthropic.go
Normal file
|
|
@ -0,0 +1,291 @@
|
|||
package service
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/seifghazi/claude-code-monitor/internal/config"
|
||||
"github.com/seifghazi/claude-code-monitor/internal/model"
|
||||
)
|
||||
|
||||
type AnthropicService interface {
|
||||
ForwardRequest(ctx context.Context, request *model.AnthropicRequest, apiKey string) (*http.Response, error)
|
||||
GradePrompt(ctx context.Context, messages []model.AnthropicMessage, systemMessages []model.AnthropicSystemMessage, apiKey string) (*model.PromptGrade, error)
|
||||
}
|
||||
|
||||
type anthropicService struct {
|
||||
client *http.Client
|
||||
config *config.AnthropicConfig
|
||||
}
|
||||
|
||||
func NewAnthropicService(cfg *config.AnthropicConfig) AnthropicService {
|
||||
return &anthropicService{
|
||||
client: &http.Client{
|
||||
Timeout: 60 * time.Second,
|
||||
},
|
||||
config: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *anthropicService) ForwardRequest(ctx context.Context, request *model.AnthropicRequest, apiKey string) (*http.Response, error) {
|
||||
if apiKey == "" {
|
||||
return nil, fmt.Errorf("API key not provided")
|
||||
}
|
||||
|
||||
requestBody, err := json.Marshal(request)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal request: %w", err)
|
||||
}
|
||||
|
||||
if s.config.BaseURL == "" {
|
||||
return nil, fmt.Errorf("anthropic base URL is not configured. Please set ANTHROPIC_BASE_URL")
|
||||
}
|
||||
|
||||
baseURL, err := url.Parse(s.config.BaseURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse anthropic base URL '%s': %w", s.config.BaseURL, err)
|
||||
}
|
||||
|
||||
if baseURL.Scheme == "" || baseURL.Host == "" {
|
||||
return nil, fmt.Errorf("invalid anthropic base URL, scheme and host are required: %s", s.config.BaseURL)
|
||||
}
|
||||
|
||||
baseURL.Path = path.Join(baseURL.Path, "/v1/messages")
|
||||
fullURL := baseURL.String()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", fullURL, bytes.NewBuffer(requestBody))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create request: %w", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("x-api-key", apiKey)
|
||||
req.Header.Set("anthropic-version", s.config.Version)
|
||||
|
||||
resp, err := s.client.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send request: %w", err)
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (s *anthropicService) GradePrompt(ctx context.Context, messages []model.AnthropicMessage, systemMessages []model.AnthropicSystemMessage, apiKey string) (*model.PromptGrade, error) {
|
||||
if apiKey == "" {
|
||||
return nil, fmt.Errorf("API key not provided")
|
||||
}
|
||||
|
||||
userContentParts := s.extractUserContent(messages)
|
||||
if len(userContentParts) == 0 {
|
||||
return nil, fmt.Errorf("no user content found to grade")
|
||||
}
|
||||
|
||||
originalPrompt := strings.Join(userContentParts, "\n\n")
|
||||
systemPrompt := s.extractSystemPrompt(systemMessages)
|
||||
|
||||
gradingPrompt := s.buildGradingPrompt(originalPrompt, systemPrompt)
|
||||
|
||||
claudeRequest := &model.AnthropicRequest{
|
||||
Model: "claude-3-5-sonnet-20240620",
|
||||
MaxTokens: 4000,
|
||||
Messages: []model.AnthropicMessage{
|
||||
{
|
||||
Role: "user",
|
||||
Content: gradingPrompt,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := s.ForwardRequest(ctx, claudeRequest, apiKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send grading request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
return nil, fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
var claudeResponse struct {
|
||||
Content []struct {
|
||||
Type string `json:"type"`
|
||||
Text string `json:"text"`
|
||||
} `json:"content"`
|
||||
}
|
||||
|
||||
if err := json.NewDecoder(resp.Body).Decode(&claudeResponse); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode response: %w", err)
|
||||
}
|
||||
|
||||
if len(claudeResponse.Content) == 0 {
|
||||
return nil, fmt.Errorf("empty response from Claude")
|
||||
}
|
||||
|
||||
return s.parseGradingResponse(claudeResponse.Content[0].Text)
|
||||
}
|
||||
|
||||
func (s *anthropicService) extractUserContent(messages []model.AnthropicMessage) []string {
|
||||
var userContentParts []string
|
||||
for _, msg := range messages {
|
||||
if msg.Role == "user" {
|
||||
blocks := msg.GetContentBlocks()
|
||||
for _, block := range blocks {
|
||||
if block.Type == "text" {
|
||||
text := strings.TrimSpace(block.Text)
|
||||
if text != "" && !s.isSystemReminder(text) {
|
||||
userContentParts = append(userContentParts, text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return userContentParts
|
||||
}
|
||||
|
||||
func (s *anthropicService) extractSystemPrompt(systemMessages []model.AnthropicSystemMessage) string {
|
||||
var systemPromptParts []string
|
||||
for _, msg := range systemMessages {
|
||||
if msg.Text != "" {
|
||||
systemPromptParts = append(systemPromptParts, msg.Text)
|
||||
}
|
||||
}
|
||||
systemPrompt := strings.Join(systemPromptParts, "\n\n")
|
||||
if systemPrompt == "" {
|
||||
systemPrompt = "No system prompt was provided for this request."
|
||||
}
|
||||
return systemPrompt
|
||||
}
|
||||
|
||||
func (s *anthropicService) isSystemReminder(text string) bool {
|
||||
text = strings.TrimSpace(text)
|
||||
lowerText := strings.ToLower(text)
|
||||
|
||||
systemPatterns := []string{
|
||||
"<system-reminder>",
|
||||
"system-reminder>",
|
||||
"this is a reminder that your todo list",
|
||||
"as you answer the user's questions, you can use the following context:",
|
||||
"important-instruction-reminders",
|
||||
"do not mention this to the user explicitly",
|
||||
"the user opened the file",
|
||||
"the user selected the following lines",
|
||||
"caveat: the messages below were generated by the user while running local commands",
|
||||
}
|
||||
|
||||
for _, pattern := range systemPatterns {
|
||||
if strings.Contains(lowerText, strings.ToLower(pattern)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *anthropicService) buildGradingPrompt(originalPrompt, systemPrompt string) string {
|
||||
return fmt.Sprintf(`<task>
|
||||
You are an expert prompt engineer specializing in Anthropic's Claude best practices. Please analyze the following user prompt and provide a comprehensive grading report.
|
||||
|
||||
<original_prompt>
|
||||
%s
|
||||
</original_prompt>
|
||||
|
||||
For context, here is the system prompt used in this request:
|
||||
<system_prompt>
|
||||
%s
|
||||
</system_prompt>
|
||||
|
||||
Please evaluate this prompt across these 5 criteria and provide your analysis in the exact JSON format specified below:
|
||||
|
||||
1. **Clarity & Explicitness** (1-5): How clear and specific are the instructions?
|
||||
2. **Context & Motivation** (1-5): Does it explain why the task matters and provide sufficient background?
|
||||
3. **Structure & Format** (1-5): Is it well-organized? Does it use XML tags effectively?
|
||||
4. **Examples & Details** (1-5): Are there sufficient examples and detailed specifications?
|
||||
5. **Task-Specific Best Practices** (1-5): Does it follow Claude-specific best practices (thinking prompts, role specification, etc.)?
|
||||
|
||||
Additionally, create an improved version of this prompt that addresses any weaknesses you identify. Include XML tags to structure the output if necessary.
|
||||
</task>
|
||||
|
||||
<response_format>
|
||||
Please respond with a JSON object in exactly this format:
|
||||
{
|
||||
"overallScore": [1-5 integer],
|
||||
"detailedFeedback": "[comprehensive analysis of the prompt's strengths and weaknesses]",
|
||||
"improvedPrompt": "[your rewritten version of the prompt that addresses the issues]",
|
||||
"criteria": {
|
||||
"clarity": {
|
||||
"score": [1-5 integer],
|
||||
"feedback": "[specific feedback for clarity]"
|
||||
},
|
||||
"context": {
|
||||
"score": [1-5 integer],
|
||||
"feedback": "[specific feedback for context]"
|
||||
},
|
||||
"structure": {
|
||||
"score": [1-5 integer],
|
||||
"feedback": "[specific feedback for structure]"
|
||||
},
|
||||
"examples": {
|
||||
"score": [1-5 integer],
|
||||
"feedback": "[specific feedback for examples]"
|
||||
},
|
||||
"taskSpecific": {
|
||||
"score": [1-5 integer],
|
||||
"feedback": "[specific feedback for task-specific practices]"
|
||||
}
|
||||
}
|
||||
}
|
||||
</response_format>`, originalPrompt, systemPrompt)
|
||||
}
|
||||
|
||||
func (s *anthropicService) parseGradingResponse(responseText string) (*model.PromptGrade, error) {
|
||||
var jsonStr string
|
||||
|
||||
if strings.Contains(responseText, "```json") {
|
||||
start := strings.Index(responseText, "```json") + 7
|
||||
end := strings.Index(responseText[start:], "```")
|
||||
if end != -1 {
|
||||
jsonStr = strings.TrimSpace(responseText[start : start+end])
|
||||
}
|
||||
} else {
|
||||
jsonStart := strings.Index(responseText, "{")
|
||||
jsonEnd := strings.LastIndex(responseText, "}")
|
||||
if jsonStart == -1 || jsonEnd == -1 {
|
||||
return nil, fmt.Errorf("no JSON found in Claude's response")
|
||||
}
|
||||
jsonStr = responseText[jsonStart : jsonEnd+1]
|
||||
}
|
||||
|
||||
if jsonStr == "" {
|
||||
return nil, fmt.Errorf("no JSON found in Claude's response")
|
||||
}
|
||||
|
||||
var gradingResult struct {
|
||||
OverallScore int `json:"overallScore"`
|
||||
DetailedFeedback string `json:"detailedFeedback"`
|
||||
ImprovedPrompt string `json:"improvedPrompt"`
|
||||
Criteria map[string]model.CriteriaScore `json:"criteria"`
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(jsonStr), &gradingResult); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse grading result: %w", err)
|
||||
}
|
||||
|
||||
return &model.PromptGrade{
|
||||
Score: gradingResult.OverallScore,
|
||||
MaxScore: 5,
|
||||
Feedback: gradingResult.DetailedFeedback,
|
||||
ImprovedPrompt: gradingResult.ImprovedPrompt,
|
||||
Criteria: gradingResult.Criteria,
|
||||
GradingTimestamp: time.Now().Format(time.RFC3339),
|
||||
IsProcessing: false,
|
||||
}, nil
|
||||
}
|
||||
306
proxy/internal/service/conversation.go
Normal file
306
proxy/internal/service/conversation.go
Normal file
|
|
@ -0,0 +1,306 @@
|
|||
package service
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ConversationService interface {
|
||||
GetConversations() (map[string][]*Conversation, error)
|
||||
GetConversation(projectPath, sessionID string) (*Conversation, error)
|
||||
GetConversationsByProject(projectPath string) ([]*Conversation, error)
|
||||
}
|
||||
|
||||
type conversationService struct {
|
||||
claudeProjectsPath string
|
||||
}
|
||||
|
||||
func NewConversationService() ConversationService {
|
||||
homeDir, _ := os.UserHomeDir()
|
||||
return &conversationService{
|
||||
claudeProjectsPath: filepath.Join(homeDir, ".claude", "projects"),
|
||||
}
|
||||
}
|
||||
|
||||
// ConversationMessage represents a single message in a Claude conversation
|
||||
type ConversationMessage struct {
|
||||
ParentUUID *string `json:"parentUuid"`
|
||||
IsSidechain bool `json:"isSidechain"`
|
||||
UserType string `json:"userType"`
|
||||
CWD string `json:"cwd"`
|
||||
SessionID string `json:"sessionId"`
|
||||
Version string `json:"version"`
|
||||
Type string `json:"type"`
|
||||
Message json.RawMessage `json:"message"`
|
||||
UUID string `json:"uuid"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
ParsedTime time.Time `json:"-"`
|
||||
}
|
||||
|
||||
// Conversation represents a complete conversation session
|
||||
type Conversation struct {
|
||||
SessionID string `json:"sessionId"`
|
||||
ProjectPath string `json:"projectPath"`
|
||||
ProjectName string `json:"projectName"`
|
||||
Messages []*ConversationMessage `json:"messages"`
|
||||
StartTime time.Time `json:"startTime"`
|
||||
EndTime time.Time `json:"endTime"`
|
||||
MessageCount int `json:"messageCount"`
|
||||
FileModTime time.Time `json:"-"` // Used for sorting, not exported
|
||||
}
|
||||
|
||||
// GetConversations returns all conversations organized by project
|
||||
func (cs *conversationService) GetConversations() (map[string][]*Conversation, error) {
|
||||
conversations := make(map[string][]*Conversation)
|
||||
var parseErrors []string
|
||||
|
||||
err := filepath.Walk(cs.claudeProjectsPath, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
// Log but don't fail the entire walk
|
||||
parseErrors = append(parseErrors, fmt.Sprintf("Error accessing %s: %v", path, err))
|
||||
return nil
|
||||
}
|
||||
|
||||
if !strings.HasSuffix(path, ".jsonl") {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get the project path relative to claudeProjectsPath
|
||||
projectDir := filepath.Dir(path)
|
||||
projectRelPath, _ := filepath.Rel(cs.claudeProjectsPath, projectDir)
|
||||
|
||||
// Skip files directly in the projects directory
|
||||
if projectRelPath == "." || projectRelPath == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
conv, err := cs.parseConversationFile(path, projectRelPath)
|
||||
if err != nil {
|
||||
// Log parsing errors but continue processing other files
|
||||
parseErrors = append(parseErrors, fmt.Sprintf("Failed to parse %s: %v", path, err))
|
||||
return nil
|
||||
}
|
||||
|
||||
if conv != nil {
|
||||
// Include conversations even if they have no messages (edge case)
|
||||
conversations[projectRelPath] = append(conversations[projectRelPath], conv)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to walk claude projects: %w", err)
|
||||
}
|
||||
|
||||
// Log any parsing errors encountered
|
||||
if len(parseErrors) > 0 {
|
||||
fmt.Printf("Warning: Encountered %d parsing errors while loading conversations:\n", len(parseErrors))
|
||||
for i, err := range parseErrors {
|
||||
if i < 5 { // Only show first 5 errors to avoid spam
|
||||
fmt.Printf(" - %s\n", err)
|
||||
}
|
||||
}
|
||||
if len(parseErrors) > 5 {
|
||||
fmt.Printf(" ... and %d more errors\n", len(parseErrors)-5)
|
||||
}
|
||||
}
|
||||
|
||||
// Sort conversations within each project by file modification time (newest first)
|
||||
for project := range conversations {
|
||||
sort.Slice(conversations[project], func(i, j int) bool {
|
||||
return conversations[project][i].FileModTime.After(conversations[project][j].FileModTime)
|
||||
})
|
||||
}
|
||||
|
||||
return conversations, nil
|
||||
}
|
||||
|
||||
// GetConversation returns a specific conversation by project and session ID
|
||||
func (cs *conversationService) GetConversation(projectPath, sessionID string) (*Conversation, error) {
|
||||
filePath := filepath.Join(cs.claudeProjectsPath, projectPath, sessionID+".jsonl")
|
||||
|
||||
conv, err := cs.parseConversationFile(filePath, projectPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse conversation: %w", err)
|
||||
}
|
||||
|
||||
return conv, nil
|
||||
}
|
||||
|
||||
// GetConversationsByProject returns all conversations for a specific project
|
||||
func (cs *conversationService) GetConversationsByProject(projectPath string) ([]*Conversation, error) {
|
||||
var conversations []*Conversation
|
||||
projectDir := filepath.Join(cs.claudeProjectsPath, projectPath)
|
||||
|
||||
files, err := os.ReadDir(projectDir)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read project directory: %w", err)
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
if !strings.HasSuffix(file.Name(), ".jsonl") {
|
||||
continue
|
||||
}
|
||||
|
||||
filePath := filepath.Join(projectDir, file.Name())
|
||||
conv, err := cs.parseConversationFile(filePath, projectPath)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if conv != nil && len(conv.Messages) > 0 {
|
||||
conversations = append(conversations, conv)
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by file modification time (newest first)
|
||||
sort.Slice(conversations, func(i, j int) bool {
|
||||
return conversations[i].FileModTime.After(conversations[j].FileModTime)
|
||||
})
|
||||
|
||||
return conversations, nil
|
||||
}
|
||||
|
||||
// parseConversationFile reads and parses a JSONL conversation file
|
||||
func (cs *conversationService) parseConversationFile(filePath, projectPath string) (*Conversation, error) {
|
||||
// Get file info for modification time
|
||||
fileInfo, err := os.Stat(filePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to stat file: %w", err)
|
||||
}
|
||||
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
var messages []*ConversationMessage
|
||||
var parseErrors int
|
||||
lineNum := 0
|
||||
|
||||
scanner := bufio.NewScanner(file)
|
||||
|
||||
// Increase buffer size for large messages
|
||||
const maxScanTokenSize = 10 * 1024 * 1024 // 10MB
|
||||
buf := make([]byte, maxScanTokenSize)
|
||||
scanner.Buffer(buf, maxScanTokenSize)
|
||||
|
||||
for scanner.Scan() {
|
||||
lineNum++
|
||||
line := scanner.Bytes()
|
||||
|
||||
// Skip empty lines
|
||||
if len(line) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var msg ConversationMessage
|
||||
if err := json.Unmarshal(line, &msg); err != nil {
|
||||
parseErrors++
|
||||
// Log only first few errors to avoid spam
|
||||
if parseErrors <= 3 {
|
||||
fmt.Printf("Warning: Failed to parse line %d in %s: %v\n", lineNum, filePath, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse timestamp
|
||||
if msg.Timestamp != "" {
|
||||
parsedTime, err := time.Parse(time.RFC3339, msg.Timestamp)
|
||||
if err != nil {
|
||||
// Try alternative timestamp formats
|
||||
parsedTime, err = time.Parse(time.RFC3339Nano, msg.Timestamp)
|
||||
if err != nil {
|
||||
fmt.Printf("Warning: Failed to parse timestamp '%s' in %s\n", msg.Timestamp, filePath)
|
||||
}
|
||||
}
|
||||
msg.ParsedTime = parsedTime
|
||||
}
|
||||
|
||||
messages = append(messages, &msg)
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("scanner error: %w", err)
|
||||
}
|
||||
|
||||
if parseErrors > 3 {
|
||||
fmt.Printf("Warning: Total of %d lines failed to parse in %s\n", parseErrors, filePath)
|
||||
}
|
||||
|
||||
// Return empty conversation if no messages (caller can decide what to do)
|
||||
if len(messages) == 0 {
|
||||
// Extract session ID from filename
|
||||
sessionID := filepath.Base(filePath)
|
||||
sessionID = strings.TrimSuffix(sessionID, ".jsonl")
|
||||
|
||||
// Use the full project path as provided
|
||||
projectName := projectPath
|
||||
// If it looks like a file path, extract the last component
|
||||
if strings.Contains(projectPath, "-") {
|
||||
// This handles cases like "-Users-seifghazi-dev-llm-proxy"
|
||||
projectName = projectPath
|
||||
}
|
||||
|
||||
return &Conversation{
|
||||
SessionID: sessionID,
|
||||
ProjectPath: projectPath,
|
||||
ProjectName: projectName,
|
||||
Messages: messages,
|
||||
StartTime: time.Time{},
|
||||
EndTime: time.Time{},
|
||||
MessageCount: 0,
|
||||
FileModTime: fileInfo.ModTime(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Sort messages by timestamp
|
||||
sort.Slice(messages, func(i, j int) bool {
|
||||
return messages[i].ParsedTime.Before(messages[j].ParsedTime)
|
||||
})
|
||||
|
||||
// Extract session ID from filename
|
||||
sessionID := filepath.Base(filePath)
|
||||
sessionID = strings.TrimSuffix(sessionID, ".jsonl")
|
||||
|
||||
// Use the full project path as provided
|
||||
projectName := projectPath
|
||||
|
||||
// Find first and last valid timestamps
|
||||
var startTime, endTime time.Time
|
||||
for _, msg := range messages {
|
||||
if !msg.ParsedTime.IsZero() {
|
||||
if startTime.IsZero() || msg.ParsedTime.Before(startTime) {
|
||||
startTime = msg.ParsedTime
|
||||
}
|
||||
if endTime.IsZero() || msg.ParsedTime.After(endTime) {
|
||||
endTime = msg.ParsedTime
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no valid timestamps found, use file modification time
|
||||
if startTime.IsZero() {
|
||||
startTime = fileInfo.ModTime()
|
||||
endTime = fileInfo.ModTime()
|
||||
}
|
||||
|
||||
return &Conversation{
|
||||
SessionID: sessionID,
|
||||
ProjectPath: projectPath,
|
||||
ProjectName: projectName,
|
||||
Messages: messages,
|
||||
StartTime: startTime,
|
||||
EndTime: endTime,
|
||||
MessageCount: len(messages),
|
||||
FileModTime: fileInfo.ModTime(),
|
||||
}, nil
|
||||
}
|
||||
18
proxy/internal/service/storage.go
Normal file
18
proxy/internal/service/storage.go
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
package service
|
||||
|
||||
import (
|
||||
"github.com/seifghazi/claude-code-monitor/internal/config"
|
||||
"github.com/seifghazi/claude-code-monitor/internal/model"
|
||||
)
|
||||
|
||||
type StorageService interface {
|
||||
SaveRequest(request *model.RequestLog) (string, error)
|
||||
GetRequests(page, limit int) ([]model.RequestLog, int, error)
|
||||
ClearRequests() (int, error)
|
||||
UpdateRequestWithGrading(requestID string, grade *model.PromptGrade) error
|
||||
UpdateRequestWithResponse(request *model.RequestLog) error
|
||||
EnsureDirectoryExists() error
|
||||
GetRequestByShortID(shortID string) (*model.RequestLog, string, error)
|
||||
GetConfig() *config.StorageConfig
|
||||
GetAllRequests(modelFilter string) ([]*model.RequestLog, error)
|
||||
}
|
||||
386
proxy/internal/service/storage_sqlite.go
Normal file
386
proxy/internal/service/storage_sqlite.go
Normal file
|
|
@ -0,0 +1,386 @@
|
|||
package service
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
|
||||
"github.com/seifghazi/claude-code-monitor/internal/config"
|
||||
"github.com/seifghazi/claude-code-monitor/internal/model"
|
||||
)
|
||||
|
||||
type sqliteStorageService struct {
|
||||
db *sql.DB
|
||||
config *config.StorageConfig
|
||||
}
|
||||
|
||||
func NewSQLiteStorageService(cfg *config.StorageConfig) (StorageService, error) {
|
||||
db, err := sql.Open("sqlite3", cfg.DBPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open database: %w", err)
|
||||
}
|
||||
|
||||
service := &sqliteStorageService{
|
||||
db: db,
|
||||
config: cfg,
|
||||
}
|
||||
|
||||
if err := service.createTables(); err != nil {
|
||||
return nil, fmt.Errorf("failed to create tables: %w", err)
|
||||
}
|
||||
|
||||
return service, nil
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) createTables() error {
|
||||
schema := `
|
||||
CREATE TABLE IF NOT EXISTS requests (
|
||||
id TEXT PRIMARY KEY,
|
||||
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
method TEXT NOT NULL,
|
||||
endpoint TEXT NOT NULL,
|
||||
headers TEXT NOT NULL,
|
||||
body TEXT NOT NULL,
|
||||
user_agent TEXT,
|
||||
content_type TEXT,
|
||||
prompt_grade TEXT,
|
||||
response TEXT,
|
||||
model TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_timestamp ON requests(timestamp DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_endpoint ON requests(endpoint);
|
||||
CREATE INDEX IF NOT EXISTS idx_model ON requests(model);
|
||||
`
|
||||
|
||||
_, err := s.db.Exec(schema)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) SaveRequest(request *model.RequestLog) (string, error) {
|
||||
headersJSON, err := json.Marshal(request.Headers)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to marshal headers: %w", err)
|
||||
}
|
||||
|
||||
bodyJSON, err := json.Marshal(request.Body)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to marshal body: %w", err)
|
||||
}
|
||||
|
||||
// Extract model from body if available
|
||||
var modelName string
|
||||
if body, ok := request.Body.(map[string]interface{}); ok {
|
||||
if model, ok := body["model"].(string); ok {
|
||||
modelName = model
|
||||
request.Model = model // Also set it in the struct
|
||||
}
|
||||
}
|
||||
|
||||
query := `
|
||||
INSERT INTO requests (id, timestamp, method, endpoint, headers, body, user_agent, content_type, model)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`
|
||||
|
||||
_, err = s.db.Exec(query,
|
||||
request.RequestID,
|
||||
request.Timestamp,
|
||||
request.Method,
|
||||
request.Endpoint,
|
||||
string(headersJSON),
|
||||
string(bodyJSON),
|
||||
request.UserAgent,
|
||||
request.ContentType,
|
||||
modelName,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to insert request: %w", err)
|
||||
}
|
||||
|
||||
return request.RequestID, nil
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) GetRequests(page, limit int) ([]model.RequestLog, int, error) {
|
||||
// Get total count
|
||||
var total int
|
||||
err := s.db.QueryRow("SELECT COUNT(*) FROM requests").Scan(&total)
|
||||
if err != nil {
|
||||
return nil, 0, fmt.Errorf("failed to get total count: %w", err)
|
||||
}
|
||||
|
||||
// Get paginated results
|
||||
offset := (page - 1) * limit
|
||||
query := `
|
||||
SELECT id, timestamp, method, endpoint, headers, body, model, user_agent, content_type, prompt_grade, response
|
||||
FROM requests
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ? OFFSET ?
|
||||
`
|
||||
|
||||
rows, err := s.db.Query(query, limit, offset)
|
||||
if err != nil {
|
||||
return nil, 0, fmt.Errorf("failed to query requests: %w", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var requests []model.RequestLog
|
||||
for rows.Next() {
|
||||
var req model.RequestLog
|
||||
var headersJSON, bodyJSON string
|
||||
var promptGradeJSON, responseJSON sql.NullString
|
||||
|
||||
err := rows.Scan(
|
||||
&req.RequestID,
|
||||
&req.Timestamp,
|
||||
&req.Method,
|
||||
&req.Endpoint,
|
||||
&headersJSON,
|
||||
&bodyJSON,
|
||||
&req.Model,
|
||||
&req.UserAgent,
|
||||
&req.ContentType,
|
||||
&promptGradeJSON,
|
||||
&responseJSON,
|
||||
)
|
||||
if err != nil {
|
||||
log.Printf("Error scanning row: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Unmarshal JSON fields
|
||||
if err := json.Unmarshal([]byte(headersJSON), &req.Headers); err != nil {
|
||||
log.Printf("Error unmarshaling headers: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
var body interface{}
|
||||
if err := json.Unmarshal([]byte(bodyJSON), &body); err != nil {
|
||||
log.Printf("Error unmarshaling body: %v", err)
|
||||
continue
|
||||
}
|
||||
req.Body = body
|
||||
|
||||
if promptGradeJSON.Valid {
|
||||
var grade model.PromptGrade
|
||||
if err := json.Unmarshal([]byte(promptGradeJSON.String), &grade); err == nil {
|
||||
req.PromptGrade = &grade
|
||||
}
|
||||
}
|
||||
|
||||
if responseJSON.Valid {
|
||||
var resp model.ResponseLog
|
||||
if err := json.Unmarshal([]byte(responseJSON.String), &resp); err == nil {
|
||||
req.Response = &resp
|
||||
}
|
||||
}
|
||||
|
||||
requests = append(requests, req)
|
||||
}
|
||||
|
||||
return requests, total, nil
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) ClearRequests() (int, error) {
|
||||
result, err := s.db.Exec("DELETE FROM requests")
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to clear requests: %w", err)
|
||||
}
|
||||
|
||||
rowsAffected, err := result.RowsAffected()
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to get rows affected: %w", err)
|
||||
}
|
||||
|
||||
return int(rowsAffected), nil
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) UpdateRequestWithGrading(requestID string, grade *model.PromptGrade) error {
|
||||
gradeJSON, err := json.Marshal(grade)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal grade: %w", err)
|
||||
}
|
||||
|
||||
query := "UPDATE requests SET prompt_grade = ? WHERE id = ?"
|
||||
_, err = s.db.Exec(query, string(gradeJSON), requestID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update request with grading: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) UpdateRequestWithResponse(request *model.RequestLog) error {
|
||||
responseJSON, err := json.Marshal(request.Response)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal response: %w", err)
|
||||
}
|
||||
|
||||
query := "UPDATE requests SET response = ? WHERE id = ?"
|
||||
_, err = s.db.Exec(query, string(responseJSON), request.RequestID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update request with response: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) EnsureDirectoryExists() error {
|
||||
// No directory needed for SQLite
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) GetRequestByShortID(shortID string) (*model.RequestLog, string, error) {
|
||||
query := `
|
||||
SELECT id, timestamp, method, endpoint, headers, body, model, user_agent, content_type, prompt_grade, response
|
||||
FROM requests
|
||||
WHERE id LIKE ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT 1
|
||||
`
|
||||
|
||||
var req model.RequestLog
|
||||
var headersJSON, bodyJSON string
|
||||
var promptGradeJSON, responseJSON sql.NullString
|
||||
|
||||
err := s.db.QueryRow(query, "%"+shortID).Scan(
|
||||
&req.RequestID,
|
||||
&req.Timestamp,
|
||||
&req.Method,
|
||||
&req.Endpoint,
|
||||
&headersJSON,
|
||||
&bodyJSON,
|
||||
&req.Model,
|
||||
&req.UserAgent,
|
||||
&req.ContentType,
|
||||
&promptGradeJSON,
|
||||
&responseJSON,
|
||||
)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, "", fmt.Errorf("request with ID %s not found", shortID)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("failed to query request: %w", err)
|
||||
}
|
||||
|
||||
// Unmarshal JSON fields
|
||||
if err := json.Unmarshal([]byte(headersJSON), &req.Headers); err != nil {
|
||||
return nil, "", fmt.Errorf("failed to unmarshal headers: %w", err)
|
||||
}
|
||||
|
||||
var body interface{}
|
||||
if err := json.Unmarshal([]byte(bodyJSON), &body); err != nil {
|
||||
return nil, "", fmt.Errorf("failed to unmarshal body: %w", err)
|
||||
}
|
||||
req.Body = body
|
||||
|
||||
if promptGradeJSON.Valid {
|
||||
var grade model.PromptGrade
|
||||
if err := json.Unmarshal([]byte(promptGradeJSON.String), &grade); err == nil {
|
||||
req.PromptGrade = &grade
|
||||
}
|
||||
}
|
||||
|
||||
if responseJSON.Valid {
|
||||
var resp model.ResponseLog
|
||||
if err := json.Unmarshal([]byte(responseJSON.String), &resp); err == nil {
|
||||
req.Response = &resp
|
||||
}
|
||||
}
|
||||
|
||||
return &req, req.RequestID, nil
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) GetConfig() *config.StorageConfig {
|
||||
return s.config
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) GetAllRequests(modelFilter string) ([]*model.RequestLog, error) {
|
||||
query := `
|
||||
SELECT id, timestamp, method, endpoint, headers, body, model, user_agent, content_type, prompt_grade, response
|
||||
FROM requests
|
||||
`
|
||||
args := []interface{}{}
|
||||
|
||||
if modelFilter != "" && modelFilter != "all" {
|
||||
query += " WHERE LOWER(model) LIKE ?"
|
||||
args = append(args, "%"+strings.ToLower(modelFilter)+"%")
|
||||
log.Printf("🔍 SQL Query with filter: %s, args: %v", query, args)
|
||||
}
|
||||
|
||||
query += " ORDER BY timestamp DESC"
|
||||
|
||||
rows, err := s.db.Query(query, args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to query requests: %w", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var requests []*model.RequestLog
|
||||
for rows.Next() {
|
||||
var req model.RequestLog
|
||||
var headersJSON, bodyJSON string
|
||||
var promptGradeJSON, responseJSON sql.NullString
|
||||
|
||||
err := rows.Scan(
|
||||
&req.RequestID,
|
||||
&req.Timestamp,
|
||||
&req.Method,
|
||||
&req.Endpoint,
|
||||
&headersJSON,
|
||||
&bodyJSON,
|
||||
&req.Model,
|
||||
&req.UserAgent,
|
||||
&req.ContentType,
|
||||
&promptGradeJSON,
|
||||
&responseJSON,
|
||||
)
|
||||
if err != nil {
|
||||
log.Printf("Error scanning row: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Printf("🔍 Scanned request - ID: %s, Model: %s", req.RequestID, req.Model)
|
||||
|
||||
// Unmarshal JSON fields
|
||||
if err := json.Unmarshal([]byte(headersJSON), &req.Headers); err != nil {
|
||||
log.Printf("Error unmarshaling headers: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
var body interface{}
|
||||
if err := json.Unmarshal([]byte(bodyJSON), &body); err != nil {
|
||||
log.Printf("Error unmarshaling body: %v", err)
|
||||
continue
|
||||
}
|
||||
req.Body = body
|
||||
|
||||
if promptGradeJSON.Valid {
|
||||
var grade model.PromptGrade
|
||||
if err := json.Unmarshal([]byte(promptGradeJSON.String), &grade); err == nil {
|
||||
req.PromptGrade = &grade
|
||||
}
|
||||
}
|
||||
|
||||
if responseJSON.Valid {
|
||||
var resp model.ResponseLog
|
||||
if err := json.Unmarshal([]byte(responseJSON.String), &resp); err == nil {
|
||||
req.Response = &resp
|
||||
}
|
||||
}
|
||||
|
||||
requests = append(requests, &req)
|
||||
}
|
||||
|
||||
return requests, nil
|
||||
}
|
||||
|
||||
func (s *sqliteStorageService) Close() error {
|
||||
return s.db.Close()
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue