feat(cmd): pagination unification + fj api --paginate

Before this, only `release list` walked pages. `repo list`, `pr list` (the
non-filter branch), and `issue list` all passed `PageSize: limit` directly
to the gitea SDK — which silently caps PageSize at 50, so any request for
more than 50 results was truncated to 50 with no warning. `--limit` was
effectively a per-page hint, not a real limit.

## Changes

- New `cmd/paginate.go` — generic `paginateGitea[T any]` that walks pages
  until the response is short or the limit is reached. Uses Go 1.20
  generics so each list command keeps its existing typed slice without
  conversion overhead.

- `repo list` — paginates ListUserRepos.
- `pr list` — paginates ListRepoPullRequests in both branches:
  - With client-side filters (assignee, author, labels, search, draft,
    head, base): pull all pages then filter+limit.
  - Without filters: paginate up to limit.
- `issue list` — paginates ListRepoIssues. Overshoots 2x because the API
  returns both issues AND PRs and we filter PRs out client-side; the
  overshoot keeps us bounded but reduces the chance of returning fewer
  results than `--limit`.

## `fj api --paginate`

Mirrors `gh api --paginate`:
- Follows RFC 5988 `Link: rel="next"` headers (Forgejo emits these on
  list endpoints).
- Concatenates each page's JSON array into a single array via
  `concatPaginatedJSON`. If a page is not a JSON array, errors with a
  clear message — `--paginate` only makes sense for paginatable endpoints.
- GET-only (errors on POST/PUT/DELETE).
- Reuses the same auth and custom headers across pages; the body-size
  limit applies per-page.

Refactored the request execution into a `doOnce` closure so the loop body
isn't a copy of the single-request path.

Verified live:

  $ fj api 'repos/public/claude-code-proxy/commits?limit=2' \
        --paginate --jq '. | length'
  44

(44 = total commits in the repo, walked via Link headers from a 2-per-page
starting query.)

Out of scope for this commit, deferred:
- De-duplicating cmd/aliases.go ↔ cmd/actions.go subtrees (the type
  mismatch they caused is already fixed in the prior commit; the
  duplication itself is polish).
This commit is contained in:
sid 2026-05-02 15:46:22 -06:00
parent 0c181df1d1
commit 133fb2fea4
5 changed files with 199 additions and 66 deletions

View file

@ -64,7 +64,40 @@ func init() {
apiCmd.Flags().StringArrayP("header", "H", nil, "Add an HTTP request header (key:value)")
apiCmd.Flags().Bool("silent", false, "Do not print the response body")
apiCmd.Flags().BoolP("include", "i", false, "Include HTTP response headers in the output")
addJSONFlags(apiCmd, "Output the response as JSON; pass a comma-separated field list to project specific keys")
apiCmd.Flags().Bool("paginate", false, "Follow rel=\"next\" Link headers and concatenate JSON array pages (gh-compatible)")
addJSONFlags(apiCmd, "Output the response as JSON")
}
// parseLinkHeaderNext extracts the URL with rel="next" from an RFC 5988
// Link header. Returns "" if not present.
func parseLinkHeaderNext(link string) string {
for _, segment := range strings.Split(link, ",") {
segment = strings.TrimSpace(segment)
if !strings.Contains(segment, `rel="next"`) {
continue
}
start := strings.Index(segment, "<")
end := strings.Index(segment, ">")
if start >= 0 && end > start {
return segment[start+1 : end]
}
}
return ""
}
// concatPaginatedJSON parses each body as a JSON array and merges them.
// Errors if any body isn't an array (e.g. an object response means the
// endpoint isn't paginated and --paginate doesn't apply).
func concatPaginatedJSON(bodies [][]byte) ([]byte, error) {
merged := make([]json.RawMessage, 0)
for i, b := range bodies {
var page []json.RawMessage
if err := json.Unmarshal(b, &page); err != nil {
return nil, fmt.Errorf("--paginate requires JSON array responses; page %d wasn't an array: %w", i+1, err)
}
merged = append(merged, page...)
}
return json.Marshal(merged)
}
func runAPI(cmd *cobra.Command, args []string) error {
@ -198,21 +231,42 @@ func runAPI(cmd *cobra.Command, args []string) error {
req.Header.Set(strings.TrimSpace(key), strings.TrimSpace(value))
}
// Execute request via the shared client (30 s timeout, pooled
// connections). Previous zero-value http.Client{} had no timeout, which
// pinned the CLI on a hung Forgejo indefinitely.
ios.StartSpinner("Requesting...")
resp, err := api.SharedHTTPClient.Do(req)
ios.StopSpinner()
if err != nil {
return fmt.Errorf("failed to perform request: %w", err)
paginate, _ := cmd.Flags().GetBool("paginate")
if paginate && method != http.MethodGet {
return fmt.Errorf("--paginate only supports GET requests")
}
// doOnce executes a single request via the shared client (30 s timeout,
// pooled connections), reads the body bounded by maxAPIResponseBytes,
// and closes the body before returning. Previous zero-value http.Client{}
// had no timeout, pinning the CLI on a hung Forgejo indefinitely.
doOnce := func(r *http.Request) (body []byte, header http.Header, status int, proto string, statusText string, retErr error) {
ios.StartSpinner("Requesting...")
resp, err := api.SharedHTTPClient.Do(r)
ios.StopSpinner()
if err != nil {
return nil, nil, 0, "", "", fmt.Errorf("failed to perform request: %w", err)
}
defer func() { _ = resp.Body.Close() }()
body, err = io.ReadAll(io.LimitReader(resp.Body, maxAPIResponseBytes+1))
if err != nil {
return nil, nil, 0, "", "", fmt.Errorf("failed to read response body: %w", err)
}
if int64(len(body)) > maxAPIResponseBytes {
return nil, nil, 0, "", "", fmt.Errorf("response body exceeded %d bytes (use a different tool for bulk transfers)", maxAPIResponseBytes)
}
return body, resp.Header, resp.StatusCode, resp.Proto, resp.Status, nil
}
respBody, respHeader, statusCode, proto, status, err := doOnce(req)
if err != nil {
return err
}
defer func() { _ = resp.Body.Close() }()
// Print response headers if requested
if include {
fmt.Fprintf(ios.Out, "%s %s\n", resp.Proto, resp.Status)
for key, values := range resp.Header {
fmt.Fprintf(ios.Out, "%s %s\n", proto, status)
for key, values := range respHeader {
for _, v := range values {
fmt.Fprintf(ios.Out, "%s: %s\n", key, v)
}
@ -220,32 +274,60 @@ func runAPI(cmd *cobra.Command, args []string) error {
fmt.Fprintln(ios.Out)
}
// Read response body with a hard ceiling so a runaway upstream can't OOM
// the CLI. Read maxAPIResponseBytes+1 to detect overflow.
respBody, err := io.ReadAll(io.LimitReader(resp.Body, maxAPIResponseBytes+1))
if err != nil {
return fmt.Errorf("failed to read response body: %w", err)
}
if int64(len(respBody)) > maxAPIResponseBytes {
return fmt.Errorf("response body exceeded %d bytes (use a different tool for bulk transfers)", maxAPIResponseBytes)
}
// Handle non-2xx status codes
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
if statusCode < 200 || statusCode >= 300 {
if !silent {
fmt.Fprint(ios.ErrOut, string(respBody))
if len(respBody) > 0 && respBody[len(respBody)-1] != '\n' {
fmt.Fprintln(ios.ErrOut)
}
}
return fmt.Errorf("API request failed with status %d", resp.StatusCode)
return fmt.Errorf("API request failed with status %d", statusCode)
}
// Follow `Link: rel="next"` headers when --paginate is set, accumulating
// each page's body. After the loop, concatPaginatedJSON merges them into
// a single JSON array. Endpoint must be paginatable (returns an array).
if paginate {
bodies := [][]byte{respBody}
nextURL := parseLinkHeaderNext(respHeader.Get("Link"))
for nextURL != "" {
nextReq, err := http.NewRequest(http.MethodGet, nextURL, nil)
if err != nil {
return fmt.Errorf("failed to build paginated request: %w", err)
}
if host.Token != "" {
nextReq.Header.Set("Authorization", "token "+host.Token)
}
nextReq.Header.Set("Accept", "application/json")
for _, h := range headers {
key, value, found := strings.Cut(h, ":")
if !found {
continue
}
nextReq.Header.Set(strings.TrimSpace(key), strings.TrimSpace(value))
}
pageBody, pageHeader, pageStatus, _, _, err := doOnce(nextReq)
if err != nil {
return err
}
if pageStatus < 200 || pageStatus >= 300 {
return fmt.Errorf("paginated request to %s failed with status %d", nextURL, pageStatus)
}
bodies = append(bodies, pageBody)
nextURL = parseLinkHeaderNext(pageHeader.Get("Link"))
}
merged, err := concatPaginatedJSON(bodies)
if err != nil {
return err
}
respBody = merged
}
if silent || len(respBody) == 0 {
return nil
}
contentType := resp.Header.Get("Content-Type")
contentType := respHeader.Get("Content-Type")
isJSON := strings.Contains(contentType, "json") || json.Valid(respBody)
// If the user asked for JSON projection or jq filtering, route through