tenseleyflow/shithub / 4e49e63

Browse files

api/apipage: pagination Link header helper for list endpoints

Authored by mfwolffe <wolffemf@dukes.jmu.edu>
SHA
4e49e63048b7b9f2ffb6cadab1b2b391189c6d11
Parents
1d477a7
Tree
1bff019

2 changed files

StatusFile+-
A internal/web/handlers/api/apipage/page.go 171 0
A internal/web/handlers/api/apipage/page_test.go 256 0
internal/web/handlers/api/apipage/page.goadded
@@ -0,0 +1,171 @@
1
+// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
3
+// Package apipage centralizes /api/v1 list-endpoint pagination concerns:
4
+// reading page / per_page query params with sensible defaults and clamps,
5
+// and emitting the canonical RFC 8288 Link header for cursor navigation.
6
+//
7
+// The Link header format matches GitHub's REST API verbatim so existing
8
+// gh-style clients (including shithub-cli/internal/api.ParseLinkHeader)
9
+// keep working. All emitted URLs are absolute when a baseURL is provided;
10
+// callers should pass their configured public base URL so links survive
11
+// reverse-proxying and host-rewriting.
12
+package apipage
13
+
14
+import (
15
+	"net/http"
16
+	"net/url"
17
+	"strconv"
18
+	"strings"
19
+)
20
+
21
+// DefaultPerPage is the per_page value used when the caller omits it.
22
+const DefaultPerPage = 30
23
+
24
+// MaxPerPage caps per_page to prevent unbounded list responses. Mirrors
25
+// GitHub REST's 100 ceiling so client expectations port directly.
26
+const MaxPerPage = 100
27
+
28
+// Page describes a paginated response state. Total >= 0 enables emitting
29
+// first/last in the Link header. Total == -1 disables them and falls
30
+// back to HasMore for forward-only pagination (used when totals are
31
+// expensive to compute and a "next" cursor is cheap).
32
+type Page struct {
33
+	Current int  // 1-indexed; must be >= 1
34
+	PerPage int  // > 0
35
+	Total   int  // total items across all pages; -1 when unknown
36
+	HasMore bool // honored only when Total < 0
37
+}
38
+
39
+// ParseQuery reads ?page= and ?per_page= from r.URL.Query() with
40
+// defaults page=1, per_page=defaultPerPage. per_page is clamped to
41
+// [1, maxPerPage]. Non-integer or negative values fall back to defaults
42
+// rather than 400 — matches gh/GitHub leniency for list endpoints.
43
+func ParseQuery(r *http.Request, defaultPerPage, maxPerPage int) (page, perPage int) {
44
+	if defaultPerPage <= 0 {
45
+		defaultPerPage = DefaultPerPage
46
+	}
47
+	if maxPerPage <= 0 {
48
+		maxPerPage = MaxPerPage
49
+	}
50
+	q := r.URL.Query()
51
+	page = atoiOr(q.Get("page"), 1)
52
+	if page < 1 {
53
+		page = 1
54
+	}
55
+	perPage = atoiOr(q.Get("per_page"), defaultPerPage)
56
+	if perPage < 1 {
57
+		perPage = defaultPerPage
58
+	}
59
+	if perPage > maxPerPage {
60
+		perPage = maxPerPage
61
+	}
62
+	return page, perPage
63
+}
64
+
65
+// LinkHeader returns the canonical Link header value for p. The header
66
+// is composed of up to four entries (first, prev, next, last) with
67
+// rel values quoted per RFC 8288.
68
+//
69
+// baseURL is the public scheme://host prefix (e.g. "https://shithub.sh");
70
+// when empty, links are emitted as path-relative URLs. reqURL is the
71
+// incoming request URL — its query string is preserved and only the
72
+// page parameter is rewritten per rel.
73
+//
74
+// Returns "" when there is no useful link to emit (single-page result
75
+// with no forward signal).
76
+func (p Page) LinkHeader(baseURL string, reqURL *url.URL) string {
77
+	if reqURL == nil || p.PerPage <= 0 {
78
+		return ""
79
+	}
80
+	cur := p.Current
81
+	if cur < 1 {
82
+		cur = 1
83
+	}
84
+
85
+	var lastPage int
86
+	knownTotal := p.Total >= 0
87
+	if knownTotal {
88
+		lastPage = lastPageFor(p.Total, p.PerPage)
89
+	}
90
+
91
+	var hasPrev, hasNext bool
92
+	switch {
93
+	case knownTotal:
94
+		hasPrev = cur > 1 && lastPage >= 1
95
+		hasNext = cur < lastPage
96
+	default:
97
+		hasPrev = cur > 1
98
+		hasNext = p.HasMore
99
+	}
100
+
101
+	if !hasPrev && !hasNext && !knownTotal {
102
+		return ""
103
+	}
104
+	if knownTotal && lastPage <= 1 {
105
+		return ""
106
+	}
107
+
108
+	prefix := strings.TrimRight(baseURL, "/")
109
+
110
+	var entries []string
111
+	if knownTotal {
112
+		entries = append(entries, formatLink(prefix, reqURL, 1, "first"))
113
+	}
114
+	if hasPrev {
115
+		entries = append(entries, formatLink(prefix, reqURL, cur-1, "prev"))
116
+	}
117
+	if hasNext {
118
+		entries = append(entries, formatLink(prefix, reqURL, cur+1, "next"))
119
+	}
120
+	if knownTotal && lastPage >= 1 {
121
+		entries = append(entries, formatLink(prefix, reqURL, lastPage, "last"))
122
+	}
123
+	return strings.Join(entries, ", ")
124
+}
125
+
126
+// lastPageFor returns the 1-indexed page count for a known total. Always
127
+// >= 1 so callers can render "first" / "last" even on an empty result.
128
+func lastPageFor(total, perPage int) int {
129
+	if total <= 0 {
130
+		return 1
131
+	}
132
+	pages := total / perPage
133
+	if total%perPage != 0 {
134
+		pages++
135
+	}
136
+	if pages < 1 {
137
+		pages = 1
138
+	}
139
+	return pages
140
+}
141
+
142
+func formatLink(prefix string, reqURL *url.URL, page int, rel string) string {
143
+	q := reqURL.Query()
144
+	q.Set("page", strconv.Itoa(page))
145
+	rebuilt := *reqURL
146
+	rebuilt.RawQuery = q.Encode()
147
+	rebuilt.Scheme = ""
148
+	rebuilt.Host = ""
149
+
150
+	var b strings.Builder
151
+	b.WriteByte('<')
152
+	if prefix != "" {
153
+		b.WriteString(prefix)
154
+	}
155
+	b.WriteString(rebuilt.RequestURI())
156
+	b.WriteString(`>; rel="`)
157
+	b.WriteString(rel)
158
+	b.WriteByte('"')
159
+	return b.String()
160
+}
161
+
162
+func atoiOr(s string, fallback int) int {
163
+	if s == "" {
164
+		return fallback
165
+	}
166
+	n, err := strconv.Atoi(s)
167
+	if err != nil {
168
+		return fallback
169
+	}
170
+	return n
171
+}
internal/web/handlers/api/apipage/page_test.goadded
@@ -0,0 +1,256 @@
1
+// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
3
+package apipage_test
4
+
5
+import (
6
+	"net/http/httptest"
7
+	"net/url"
8
+	"strings"
9
+	"testing"
10
+
11
+	"github.com/tenseleyFlow/shithub/internal/web/handlers/api/apipage"
12
+)
13
+
14
+func TestParseQuery_Defaults(t *testing.T) {
15
+	t.Parallel()
16
+	r := httptest.NewRequest("GET", "/api/v1/things", nil)
17
+	page, perPage := apipage.ParseQuery(r, 0, 0)
18
+	if page != 1 || perPage != apipage.DefaultPerPage {
19
+		t.Fatalf("got page=%d, perPage=%d; want 1/%d", page, perPage, apipage.DefaultPerPage)
20
+	}
21
+}
22
+
23
+func TestParseQuery_ClampsPerPage(t *testing.T) {
24
+	t.Parallel()
25
+	r := httptest.NewRequest("GET", "/api/v1/things?page=2&per_page=500", nil)
26
+	page, perPage := apipage.ParseQuery(r, 30, 100)
27
+	if page != 2 {
28
+		t.Errorf("page: got %d, want 2", page)
29
+	}
30
+	if perPage != 100 {
31
+		t.Errorf("per_page: got %d, want 100", perPage)
32
+	}
33
+}
34
+
35
+func TestParseQuery_NegativeFallback(t *testing.T) {
36
+	t.Parallel()
37
+	r := httptest.NewRequest("GET", "/api/v1/things?page=-3&per_page=-1", nil)
38
+	page, perPage := apipage.ParseQuery(r, 0, 0)
39
+	if page != 1 || perPage != apipage.DefaultPerPage {
40
+		t.Fatalf("got page=%d, perPage=%d; want 1/%d", page, perPage, apipage.DefaultPerPage)
41
+	}
42
+}
43
+
44
+func TestParseQuery_NonInteger(t *testing.T) {
45
+	t.Parallel()
46
+	r := httptest.NewRequest("GET", "/api/v1/things?page=banana&per_page=cherry", nil)
47
+	page, perPage := apipage.ParseQuery(r, 25, 50)
48
+	if page != 1 || perPage != 25 {
49
+		t.Fatalf("got page=%d, perPage=%d; want 1/25", page, perPage)
50
+	}
51
+}
52
+
53
+func TestLinkHeader_MiddlePage(t *testing.T) {
54
+	t.Parallel()
55
+	u, _ := url.Parse("/api/v1/user/starred?per_page=30&page=2")
56
+	p := apipage.Page{Current: 2, PerPage: 30, Total: 120}
57
+	h := p.LinkHeader("https://shithub.sh", u)
58
+
59
+	links := parseLink(h)
60
+	wantRels := []string{"first", "prev", "next", "last"}
61
+	for _, rel := range wantRels {
62
+		if _, ok := links[rel]; !ok {
63
+			t.Errorf("missing rel=%q in %s", rel, h)
64
+		}
65
+	}
66
+	if got := pageFor(links["next"]); got != "3" {
67
+		t.Errorf("next page: got %s, want 3", got)
68
+	}
69
+	if got := pageFor(links["prev"]); got != "1" {
70
+		t.Errorf("prev page: got %s, want 1", got)
71
+	}
72
+	if got := pageFor(links["last"]); got != "4" {
73
+		t.Errorf("last page: got %s, want 4", got)
74
+	}
75
+	if got := pageFor(links["first"]); got != "1" {
76
+		t.Errorf("first page: got %s, want 1", got)
77
+	}
78
+	if !strings.HasPrefix(links["next"], "https://shithub.sh/api/v1/user/starred?") {
79
+		t.Errorf("next link not absolute: %s", links["next"])
80
+	}
81
+}
82
+
83
+func TestLinkHeader_FirstPage(t *testing.T) {
84
+	t.Parallel()
85
+	u, _ := url.Parse("/api/v1/repos?per_page=10&page=1")
86
+	p := apipage.Page{Current: 1, PerPage: 10, Total: 25}
87
+	links := parseLink(p.LinkHeader("https://shithub.sh", u))
88
+	if _, ok := links["prev"]; ok {
89
+		t.Errorf("prev should be absent on first page; got %v", links)
90
+	}
91
+	if _, ok := links["next"]; !ok {
92
+		t.Error("next should be present on first page")
93
+	}
94
+	if got := pageFor(links["last"]); got != "3" {
95
+		t.Errorf("last page: got %s, want 3", got)
96
+	}
97
+}
98
+
99
+func TestLinkHeader_LastPage(t *testing.T) {
100
+	t.Parallel()
101
+	u, _ := url.Parse("/api/v1/repos?per_page=10&page=3")
102
+	p := apipage.Page{Current: 3, PerPage: 10, Total: 25}
103
+	links := parseLink(p.LinkHeader("https://shithub.sh", u))
104
+	if _, ok := links["next"]; ok {
105
+		t.Errorf("next should be absent on last page; got %v", links)
106
+	}
107
+	if _, ok := links["prev"]; !ok {
108
+		t.Error("prev should be present on last page")
109
+	}
110
+}
111
+
112
+func TestLinkHeader_SinglePage(t *testing.T) {
113
+	t.Parallel()
114
+	u, _ := url.Parse("/api/v1/repos?per_page=30&page=1")
115
+	p := apipage.Page{Current: 1, PerPage: 30, Total: 5}
116
+	if got := p.LinkHeader("https://shithub.sh", u); got != "" {
117
+		t.Errorf("expected empty header for single-page result; got %q", got)
118
+	}
119
+}
120
+
121
+func TestLinkHeader_StreamForm(t *testing.T) {
122
+	t.Parallel()
123
+	u, _ := url.Parse("/api/v1/feed?page=2")
124
+	p := apipage.Page{Current: 2, PerPage: 30, Total: -1, HasMore: true}
125
+	links := parseLink(p.LinkHeader("https://shithub.sh", u))
126
+	if _, ok := links["first"]; ok {
127
+		t.Error("first should not appear when total unknown")
128
+	}
129
+	if _, ok := links["last"]; ok {
130
+		t.Error("last should not appear when total unknown")
131
+	}
132
+	if _, ok := links["next"]; !ok {
133
+		t.Error("next should appear when HasMore=true")
134
+	}
135
+	if _, ok := links["prev"]; !ok {
136
+		t.Error("prev should appear when Current > 1")
137
+	}
138
+}
139
+
140
+func TestLinkHeader_StreamFormExhausted(t *testing.T) {
141
+	t.Parallel()
142
+	u, _ := url.Parse("/api/v1/feed?page=4")
143
+	p := apipage.Page{Current: 4, PerPage: 30, Total: -1, HasMore: false}
144
+	links := parseLink(p.LinkHeader("https://shithub.sh", u))
145
+	if _, ok := links["next"]; ok {
146
+		t.Error("next should be absent when HasMore=false")
147
+	}
148
+	if _, ok := links["prev"]; !ok {
149
+		t.Error("prev should still appear when Current > 1")
150
+	}
151
+}
152
+
153
+func TestLinkHeader_PreservesOtherQueryParams(t *testing.T) {
154
+	t.Parallel()
155
+	u, _ := url.Parse("/api/v1/issues?state=open&labels=bug,ux&per_page=30&page=2")
156
+	p := apipage.Page{Current: 2, PerPage: 30, Total: 120}
157
+	links := parseLink(p.LinkHeader("https://shithub.sh", u))
158
+	for rel, link := range links {
159
+		if !strings.Contains(link, "state=open") {
160
+			t.Errorf("rel=%s lost state=open: %s", rel, link)
161
+		}
162
+		if !strings.Contains(link, "labels=") {
163
+			t.Errorf("rel=%s lost labels: %s", rel, link)
164
+		}
165
+	}
166
+}
167
+
168
+func TestLinkHeader_RelativeWhenBaseURLEmpty(t *testing.T) {
169
+	t.Parallel()
170
+	u, _ := url.Parse("/api/v1/repos?page=1&per_page=10")
171
+	p := apipage.Page{Current: 1, PerPage: 10, Total: 25}
172
+	h := p.LinkHeader("", u)
173
+	if !strings.HasPrefix(h, "</api/v1/repos?") {
174
+		t.Errorf("expected path-relative link; got %s", h)
175
+	}
176
+}
177
+
178
+// parseLink reimplements the gh-compatible parser used by
179
+// shithub-cli/internal/api.ParseLinkHeader. Kept in-test so apipage has
180
+// no cross-module dependency, but we exercise the same algorithm here.
181
+func parseLink(header string) map[string]string {
182
+	out := map[string]string{}
183
+	if header == "" {
184
+		return out
185
+	}
186
+	for _, entry := range splitLinkEntries(header) {
187
+		u, rel, ok := parseLinkEntry(entry)
188
+		if !ok {
189
+			continue
190
+		}
191
+		out[rel] = u
192
+	}
193
+	return out
194
+}
195
+
196
+func splitLinkEntries(header string) []string {
197
+	var (
198
+		entries []string
199
+		buf     strings.Builder
200
+		depth   int
201
+	)
202
+	for _, r := range header {
203
+		switch r {
204
+		case '<':
205
+			depth++
206
+		case '>':
207
+			if depth > 0 {
208
+				depth--
209
+			}
210
+		case ',':
211
+			if depth == 0 {
212
+				entries = append(entries, strings.TrimSpace(buf.String()))
213
+				buf.Reset()
214
+				continue
215
+			}
216
+		}
217
+		buf.WriteRune(r)
218
+	}
219
+	if buf.Len() > 0 {
220
+		entries = append(entries, strings.TrimSpace(buf.String()))
221
+	}
222
+	return entries
223
+}
224
+
225
+func parseLinkEntry(entry string) (linkURL, rel string, ok bool) {
226
+	lt := strings.Index(entry, "<")
227
+	gt := strings.Index(entry, ">")
228
+	if lt < 0 || gt < 0 || gt < lt {
229
+		return "", "", false
230
+	}
231
+	linkURL = entry[lt+1 : gt]
232
+	rest := entry[gt+1:]
233
+	for _, attr := range strings.Split(rest, ";") {
234
+		attr = strings.TrimSpace(attr)
235
+		if !strings.HasPrefix(attr, "rel=") {
236
+			continue
237
+		}
238
+		rel = strings.Trim(attr[len("rel="):], `"`)
239
+	}
240
+	if rel == "" {
241
+		return "", "", false
242
+	}
243
+	return linkURL, rel, true
244
+}
245
+
246
+func pageFor(link string) string {
247
+	idx := strings.Index(link, "?")
248
+	if idx < 0 {
249
+		return ""
250
+	}
251
+	q, err := url.ParseQuery(link[idx+1:])
252
+	if err != nil {
253
+		return ""
254
+	}
255
+	return q.Get("page")
256
+}