2025-06-29 19:27:00 -04:00
|
|
|
package service
|
|
|
|
|
|
|
|
|
|
import (
|
2025-07-07 02:07:37 -04:00
|
|
|
"compress/gzip"
|
2025-06-29 19:27:00 -04:00
|
|
|
"context"
|
|
|
|
|
"fmt"
|
|
|
|
|
"io"
|
|
|
|
|
"net/http"
|
|
|
|
|
"net/url"
|
|
|
|
|
"path"
|
|
|
|
|
"strings"
|
|
|
|
|
"time"
|
|
|
|
|
|
|
|
|
|
"github.com/seifghazi/claude-code-monitor/internal/config"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
type AnthropicService interface {
|
2025-07-07 02:07:37 -04:00
|
|
|
ForwardRequest(ctx context.Context, originalReq *http.Request) (*http.Response, error)
|
2025-06-29 19:27:00 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type anthropicService struct {
|
|
|
|
|
client *http.Client
|
|
|
|
|
config *config.AnthropicConfig
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func NewAnthropicService(cfg *config.AnthropicConfig) AnthropicService {
|
|
|
|
|
return &anthropicService{
|
|
|
|
|
client: &http.Client{
|
2025-07-07 02:07:37 -04:00
|
|
|
Timeout: 300 * time.Second, // Increased timeout to 5 minutes
|
2025-06-29 19:27:00 -04:00
|
|
|
},
|
|
|
|
|
config: cfg,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-07 02:07:37 -04:00
|
|
|
func (s *anthropicService) ForwardRequest(ctx context.Context, originalReq *http.Request) (*http.Response, error) {
|
|
|
|
|
// Clone the request to avoid modifying the original
|
|
|
|
|
proxyReq := originalReq.Clone(ctx)
|
2025-06-29 19:27:00 -04:00
|
|
|
|
2025-07-07 02:07:37 -04:00
|
|
|
// Parse the configured base URL
|
2025-06-29 19:27:00 -04:00
|
|
|
baseURL, err := url.Parse(s.config.BaseURL)
|
|
|
|
|
if err != nil {
|
2025-07-07 02:07:37 -04:00
|
|
|
return nil, fmt.Errorf("failed to parse base URL '%s': %w", s.config.BaseURL, err)
|
2025-06-29 19:27:00 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if baseURL.Scheme == "" || baseURL.Host == "" {
|
2025-07-07 02:07:37 -04:00
|
|
|
return nil, fmt.Errorf("invalid base URL, scheme and host are required: %s", s.config.BaseURL)
|
2025-06-29 19:27:00 -04:00
|
|
|
}
|
|
|
|
|
|
2025-07-07 02:07:37 -04:00
|
|
|
// Update the destination URL
|
|
|
|
|
proxyReq.URL.Scheme = baseURL.Scheme
|
|
|
|
|
proxyReq.URL.Host = baseURL.Host
|
|
|
|
|
proxyReq.URL.Path = path.Join(baseURL.Path, "/v1/messages")
|
2025-06-29 19:27:00 -04:00
|
|
|
|
2025-07-07 02:07:37 -04:00
|
|
|
// Preserve query parameters from original request
|
|
|
|
|
proxyReq.URL.RawQuery = originalReq.URL.RawQuery
|
2025-06-29 19:27:00 -04:00
|
|
|
|
2025-07-07 02:07:37 -04:00
|
|
|
// Clear fields that can't be set in client requests
|
|
|
|
|
proxyReq.RequestURI = "" // This is set by the server and must be cleared
|
|
|
|
|
proxyReq.Host = "" // Let Go set this from the URL
|
2025-06-29 19:27:00 -04:00
|
|
|
|
2025-07-07 02:07:37 -04:00
|
|
|
// Forward the request with all original headers intact
|
|
|
|
|
resp, err := s.client.Do(proxyReq)
|
2025-06-29 19:27:00 -04:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to send request: %w", err)
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-07 02:07:37 -04:00
|
|
|
// Handle gzip decompression
|
|
|
|
|
if strings.Contains(resp.Header.Get("Content-Encoding"), "gzip") {
|
|
|
|
|
decompressedResp, err := s.decompressGzipResponse(resp)
|
|
|
|
|
if err != nil {
|
|
|
|
|
resp.Body.Close()
|
|
|
|
|
return nil, fmt.Errorf("failed to decompress gzip response: %w", err)
|
2025-06-29 19:27:00 -04:00
|
|
|
}
|
2025-07-07 02:07:37 -04:00
|
|
|
return decompressedResp, nil
|
2025-06-29 19:27:00 -04:00
|
|
|
}
|
|
|
|
|
|
2025-07-07 02:07:37 -04:00
|
|
|
return resp, nil
|
2025-06-29 19:27:00 -04:00
|
|
|
}
|
|
|
|
|
|
2025-07-07 02:07:37 -04:00
|
|
|
func (s *anthropicService) decompressGzipResponse(resp *http.Response) (*http.Response, error) {
|
|
|
|
|
// Create a gzip reader
|
|
|
|
|
gzipReader, err := gzip.NewReader(resp.Body)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to create gzip reader: %w", err)
|
2025-06-29 19:27:00 -04:00
|
|
|
}
|
|
|
|
|
|
2025-07-07 02:07:37 -04:00
|
|
|
// Read the decompressed data
|
|
|
|
|
decompressedData, err := io.ReadAll(gzipReader)
|
|
|
|
|
if err != nil {
|
|
|
|
|
gzipReader.Close()
|
|
|
|
|
return nil, fmt.Errorf("failed to read decompressed data: %w", err)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Close the gzip reader and original body
|
|
|
|
|
gzipReader.Close()
|
|
|
|
|
resp.Body.Close()
|
|
|
|
|
|
|
|
|
|
// Create a new response with decompressed body
|
|
|
|
|
newResp := &http.Response{
|
|
|
|
|
Status: resp.Status,
|
|
|
|
|
StatusCode: resp.StatusCode,
|
|
|
|
|
Proto: resp.Proto,
|
|
|
|
|
ProtoMajor: resp.ProtoMajor,
|
|
|
|
|
ProtoMinor: resp.ProtoMinor,
|
|
|
|
|
Header: resp.Header.Clone(),
|
|
|
|
|
ContentLength: int64(len(decompressedData)),
|
|
|
|
|
TransferEncoding: resp.TransferEncoding,
|
|
|
|
|
Close: resp.Close,
|
|
|
|
|
Uncompressed: true,
|
|
|
|
|
Trailer: resp.Trailer,
|
|
|
|
|
Request: resp.Request,
|
|
|
|
|
TLS: resp.TLS,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Remove Content-Encoding header since we've decompressed
|
|
|
|
|
newResp.Header.Del("Content-Encoding")
|
|
|
|
|
|
|
|
|
|
// Set the decompressed body
|
|
|
|
|
newResp.Body = io.NopCloser(strings.NewReader(string(decompressedData)))
|
|
|
|
|
|
|
|
|
|
return newResp, nil
|
2025-06-29 19:27:00 -04:00
|
|
|
}
|