tenseleyflow/shithub / a943129

Browse files

S22: pulls orchestrator (create/sync/mergeability/merge/state) + tests

Authored by mfwolffe <wolffemf@dukes.jmu.edu>
SHA
a943129c730b7fc50b3097bb05ed3e63b1d4fed3
Parents
5d8e910
Tree
a68b3cf

6 changed files

StatusFile+-
A internal/pulls/linked_issues.go 39 0
A internal/pulls/linked_issues_test.go 34 0
A internal/pulls/merge.go 244 0
A internal/pulls/pulls.go 377 0
A internal/pulls/pulls_test.go 366 0
A internal/pulls/state.go 47 0
internal/pulls/linked_issues.goadded
@@ -0,0 +1,39 @@
1
+// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
3
+package pulls
4
+
5
+import (
6
+	"regexp"
7
+	"strconv"
8
+)
9
+
10
+// reLinkedIssue matches the GitHub-style auto-close keywords:
11
+//
12
+//	close|closes|closed | fix|fixes|fixed | resolve|resolves|resolved
13
+//
14
+// followed by `#N`. Case-insensitive. The keyword + `#N` must be a
15
+// connected token (one space allowed) so plain "...closes the door
16
+// on #5..." doesn't accidentally trigger an auto-close.
17
+var reLinkedIssue = regexp.MustCompile(`(?i)\b(?:close[sd]?|fix(?:e[sd])?|resolve[sd]?)\s+#([0-9]{1,9})\b`)
18
+
19
+// parseLinkedIssues returns the deduplicated set of issue numbers a
20
+// PR body or commit message intends to auto-close.
21
+func parseLinkedIssues(body string) []int64 {
22
+	if body == "" {
23
+		return nil
24
+	}
25
+	seen := map[int64]struct{}{}
26
+	out := []int64{}
27
+	for _, m := range reLinkedIssue.FindAllStringSubmatch(body, -1) {
28
+		n, err := strconv.ParseInt(m[1], 10, 64)
29
+		if err != nil {
30
+			continue
31
+		}
32
+		if _, dup := seen[n]; dup {
33
+			continue
34
+		}
35
+		seen[n] = struct{}{}
36
+		out = append(out, n)
37
+	}
38
+	return out
39
+}
internal/pulls/linked_issues_test.goadded
@@ -0,0 +1,34 @@
1
+// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
3
+package pulls
4
+
5
+import (
6
+	"reflect"
7
+	"testing"
8
+)
9
+
10
+func TestParseLinkedIssues(t *testing.T) {
11
+	t.Parallel()
12
+	cases := []struct {
13
+		name string
14
+		body string
15
+		want []int64
16
+	}{
17
+		{"closes", "closes #1", []int64{1}},
18
+		{"fixes_capitalized", "Fixes #42", []int64{42}},
19
+		{"resolves_past_tense", "Resolved #7", []int64{7}},
20
+		{"multiple", "Closes #1, fixes #2, resolves #3", []int64{1, 2, 3}},
21
+		{"dedup", "Fixes #5\nFixes #5 again", []int64{5}},
22
+		{"plain_hash_does_not_match", "see #99 — does not auto-close", nil},
23
+		{"keyword_in_prose_no_hash", "this closes the door on bug fixes", nil},
24
+		{"fixed_with_extra_whitespace", "Fixed  #11", []int64{11}},
25
+	}
26
+	for _, c := range cases {
27
+		t.Run(c.name, func(t *testing.T) {
28
+			got := parseLinkedIssues(c.body)
29
+			if !reflect.DeepEqual(got, c.want) && !(len(got) == 0 && len(c.want) == 0) {
30
+				t.Errorf("body %q: got %v, want %v", c.body, got, c.want)
31
+			}
32
+		})
33
+	}
34
+}
internal/pulls/merge.goadded
@@ -0,0 +1,244 @@
1
+// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
3
+package pulls
4
+
5
+import (
6
+	"context"
7
+	"errors"
8
+	"fmt"
9
+	"strconv"
10
+	"strings"
11
+	"time"
12
+
13
+	"github.com/jackc/pgx/v5/pgtype"
14
+
15
+	"github.com/tenseleyFlow/shithub/internal/issues"
16
+	issuesdb "github.com/tenseleyFlow/shithub/internal/issues/sqlc"
17
+	pullsdb "github.com/tenseleyFlow/shithub/internal/pulls/sqlc"
18
+	repogit "github.com/tenseleyFlow/shithub/internal/repos/git"
19
+	usersdb "github.com/tenseleyFlow/shithub/internal/users/sqlc"
20
+)
21
+
22
+// MergeParams describes a merge request.
23
+type MergeParams struct {
24
+	PRID         int64
25
+	ActorUserID  int64
26
+	GitDir       string
27
+	Method       string // "merge" | "squash" | "rebase"
28
+	Subject      string // optional override; falls back to "<title> (#<number>)"
29
+	Body         string
30
+	WorktreesDir string // optional, defaults to <gitDir>/.tmp-worktrees
31
+	Now          func() time.Time
32
+}
33
+
34
+// Merge performs the requested merge inside a temp worktree:
35
+//
36
+//  1. Lock the pull_requests row (FOR UPDATE) to serialize concurrent
37
+//     attempts. The transaction holds for the whole job to keep the
38
+//     lock until the DB state reflects the merge.
39
+//  2. Validate state: not merged, not closed, mergeable_state == clean,
40
+//     method allowed by the repo config.
41
+//  3. Resolve identity (author = PR author for squash, merger otherwise;
42
+//     committer = merger).
43
+//  4. Run repogit.PerformMerge against the bare repo.
44
+//  5. Persist merge_commit_sha + merged_at + flip the issue to closed
45
+//     with state_reason='completed'.
46
+//  6. Auto-close linked issues parsed from PR body + commit messages.
47
+//  7. Emit a `merged` timeline event.
48
+func Merge(ctx context.Context, deps Deps, p MergeParams) error {
49
+	if p.Now == nil {
50
+		p.Now = time.Now
51
+	}
52
+
53
+	// Begin the locking tx. Holding the row lock through the whole
54
+	// merge prevents two attempts both winning at the worktree level.
55
+	tx, err := deps.Pool.Begin(ctx)
56
+	if err != nil {
57
+		return err
58
+	}
59
+	committed := false
60
+	defer func() {
61
+		if !committed {
62
+			_ = tx.Rollback(ctx)
63
+		}
64
+	}()
65
+
66
+	q := pullsdb.New()
67
+	pr, err := q.LockPullRequestForMerge(ctx, tx, p.PRID)
68
+	if err != nil {
69
+		return ErrPRNotFound
70
+	}
71
+	if pr.MergedAt.Valid {
72
+		return ErrAlreadyMerged
73
+	}
74
+	// Issue side: must still be open + mergeable.
75
+	iq := issuesdb.New()
76
+	issue, err := iq.GetIssueByID(ctx, tx, p.PRID)
77
+	if err != nil {
78
+		return err
79
+	}
80
+	if issue.State != issuesdb.IssueStateOpen {
81
+		return ErrAlreadyClosed
82
+	}
83
+	if pr.MergeableState != pullsdb.PrMergeableStateClean {
84
+		return ErrMergeBlocked
85
+	}
86
+
87
+	// Identity for the new commit. Author email rules per spec:
88
+	//   merge       — author = committer = merger
89
+	//   squash      — author = PR author; committer = merger
90
+	//   rebase      — author preserved; committer = merger (handled by
91
+	//                 repogit.PerformMerge via env)
92
+	uq := usersdb.New()
93
+	mergerName, mergerEmail, err := identityFor(ctx, tx, uq, p.ActorUserID)
94
+	if err != nil {
95
+		return err
96
+	}
97
+	authorName, authorEmail := mergerName, mergerEmail
98
+	if p.Method == "squash" && issue.AuthorUserID.Valid {
99
+		an, ae, err := identityFor(ctx, tx, uq, issue.AuthorUserID.Int64)
100
+		if err == nil {
101
+			authorName, authorEmail = an, ae
102
+		}
103
+	}
104
+
105
+	subject := strings.TrimSpace(p.Subject)
106
+	if subject == "" {
107
+		subject = issue.Title + " (#" + strconv.FormatInt(issue.Number, 10) + ")"
108
+	}
109
+
110
+	// Run the merge against the bare repo. PerformMerge cleans up the
111
+	// worktree on every exit path.
112
+	mergeRes, err := repogit.PerformMerge(ctx, repogit.MergeOptions{
113
+		GitDir:         p.GitDir,
114
+		BaseRef:        "refs/heads/" + pr.BaseRef,
115
+		BaseOID:        pr.BaseOid,
116
+		HeadOID:        pr.HeadOid,
117
+		Method:         p.Method,
118
+		AuthorName:     authorName,
119
+		AuthorEmail:    authorEmail,
120
+		CommitterName:  mergerName,
121
+		CommitterEmail: mergerEmail,
122
+		When:           p.Now(),
123
+		Subject:        subject,
124
+		Body:           p.Body,
125
+		WorktreesDir:   p.WorktreesDir,
126
+	})
127
+	if err != nil {
128
+		return fmt.Errorf("perform merge: %w", err)
129
+	}
130
+
131
+	if err := q.SetPullRequestMerged(ctx, tx, pullsdb.SetPullRequestMergedParams{
132
+		IssueID:         p.PRID,
133
+		MergedByUserID:  pgtype.Int8{Int64: p.ActorUserID, Valid: p.ActorUserID != 0},
134
+		MergeCommitSha:  pgtype.Text{String: mergeRes.MergedOID, Valid: true},
135
+		MergeMethod:     pullsdb.NullPrMergeMethod{PrMergeMethod: pullsdb.PrMergeMethod(p.Method), Valid: true},
136
+		BaseOidAtMerge:  pgtype.Text{String: pr.BaseOid, Valid: true},
137
+		HeadOidAtMerge:  pgtype.Text{String: pr.HeadOid, Valid: true},
138
+	}); err != nil {
139
+		return err
140
+	}
141
+
142
+	// Close the issue side with state_reason=completed.
143
+	if err := iq.SetIssueState(ctx, tx, issuesdb.SetIssueStateParams{
144
+		ID:             p.PRID,
145
+		State:          issuesdb.IssueStateClosed,
146
+		StateReason:    issuesdb.NullIssueStateReason{IssueStateReason: issuesdb.IssueStateReasonCompleted, Valid: true},
147
+		ClosedByUserID: pgtype.Int8{Int64: p.ActorUserID, Valid: p.ActorUserID != 0},
148
+	}); err != nil {
149
+		return err
150
+	}
151
+
152
+	// `merged` timeline event.
153
+	if _, err := iq.InsertIssueEvent(ctx, tx, issuesdb.InsertIssueEventParams{
154
+		IssueID:     p.PRID,
155
+		ActorUserID: pgtype.Int8{Int64: p.ActorUserID, Valid: p.ActorUserID != 0},
156
+		Kind:        "merged",
157
+		Meta:        []byte(fmt.Sprintf(`{"method":%q,"commit":%q}`, p.Method, mergeRes.MergedOID)),
158
+	}); err != nil {
159
+		return err
160
+	}
161
+
162
+	// Auto-close linked issues. Linked = Closes/Fixes/Resolves #N in
163
+	// the PR body + each commit message. Best-effort; don't fail the
164
+	// merge if the close fails.
165
+	linked := parseLinkedIssues(issue.Body)
166
+	for _, c := range fetchCommitsForLinkScan(ctx, tx, p.PRID) {
167
+		linked = append(linked, parseLinkedIssues(c)...)
168
+	}
169
+	closed := map[int64]bool{}
170
+	for _, num := range linked {
171
+		if num == issue.Number {
172
+			continue // self-reference
173
+		}
174
+		target, err := iq.GetIssueByNumber(ctx, tx, issuesdb.GetIssueByNumberParams{
175
+			RepoID: issue.RepoID, Number: num,
176
+		})
177
+		if err != nil || target.Kind != issuesdb.IssueKindIssue || target.State != issuesdb.IssueStateOpen {
178
+			continue
179
+		}
180
+		if closed[target.ID] {
181
+			continue
182
+		}
183
+		closed[target.ID] = true
184
+		_ = iq.SetIssueState(ctx, tx, issuesdb.SetIssueStateParams{
185
+			ID:             target.ID,
186
+			State:          issuesdb.IssueStateClosed,
187
+			StateReason:    issuesdb.NullIssueStateReason{IssueStateReason: issuesdb.IssueStateReasonCompleted, Valid: true},
188
+			ClosedByUserID: pgtype.Int8{Int64: p.ActorUserID, Valid: p.ActorUserID != 0},
189
+		})
190
+		_, _ = iq.InsertIssueEvent(ctx, tx, issuesdb.InsertIssueEventParams{
191
+			IssueID:     target.ID,
192
+			ActorUserID: pgtype.Int8{Int64: p.ActorUserID, Valid: p.ActorUserID != 0},
193
+			Kind:        "closed",
194
+			Meta:        []byte(fmt.Sprintf(`{"closed_by_pr":%d}`, p.PRID)),
195
+		})
196
+	}
197
+
198
+	if err := tx.Commit(ctx); err != nil {
199
+		return err
200
+	}
201
+	committed = true
202
+	return nil
203
+}
204
+
205
+// identityFor reads the user's display + primary verified email for
206
+// commit identity. Falls back to a synthesised noreply on missing data
207
+// rather than failing the merge — privacy/noreply emails ship post-MVP.
208
+func identityFor(ctx context.Context, db pullsdb.DBTX, uq *usersdb.Queries, userID int64) (name, email string, err error) {
209
+	user, err := uq.GetUserByID(ctx, db, userID)
210
+	if err != nil {
211
+		return "", "", err
212
+	}
213
+	display := strings.TrimSpace(user.DisplayName)
214
+	if display == "" {
215
+		display = user.Username
216
+	}
217
+	addr := user.Username + "@noreply.shithub.local"
218
+	if user.PrimaryEmailID.Valid {
219
+		em, err := uq.GetUserEmailByID(ctx, db, user.PrimaryEmailID.Int64)
220
+		if err == nil && em.Verified {
221
+			addr = string(em.Email)
222
+		}
223
+	}
224
+	return display, addr, nil
225
+}
226
+
227
+// fetchCommitsForLinkScan returns the head-side commit messages so the
228
+// linked-issue parser can scan body + subject. Best-effort — returns
229
+// an empty slice on error.
230
+func fetchCommitsForLinkScan(ctx context.Context, db pullsdb.DBTX, prID int64) []string {
231
+	rows, err := pullsdb.New().ListPullRequestCommits(ctx, db, prID)
232
+	if err != nil {
233
+		return nil
234
+	}
235
+	out := make([]string, 0, len(rows))
236
+	for _, r := range rows {
237
+		out = append(out, r.Subject+"\n\n"+r.Body)
238
+	}
239
+	return out
240
+}
241
+
242
+// Compile-time check that issues' typed errors are still in scope so
243
+// EditPR's wrapping continues to work after refactors.
244
+var _ = errors.Is(issues.ErrEmptyTitle, issues.ErrEmptyTitle)
internal/pulls/pulls.goadded
@@ -0,0 +1,377 @@
1
+// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
3
+// Package pulls owns the pull-request orchestrator. PRs reuse the S21
4
+// `issues` row for title/body/state/timeline; this package owns the
5
+// PR-specific surface — opening, synchronizing, mergeability detection,
6
+// merge execution.
7
+//
8
+// Entry points are:
9
+//
10
+//	Create     — opens a PR (creates the issue row + the pull_requests row)
11
+//	Synchronize — refreshes commit + file lists + emits a synchronized event
12
+//	Mergeability — recomputes mergeable / mergeable_state via merge-tree
13
+//	Merge      — performs the requested merge strategy in a temp worktree
14
+//	Edit       — title/body
15
+//	SetState   — close / reopen
16
+//	SetReady   — draft → ready
17
+package pulls
18
+
19
+import (
20
+	"context"
21
+	"errors"
22
+	"fmt"
23
+	"log/slog"
24
+	"strings"
25
+
26
+	"github.com/jackc/pgx/v5"
27
+	"github.com/jackc/pgx/v5/pgtype"
28
+	"github.com/jackc/pgx/v5/pgxpool"
29
+
30
+	"github.com/tenseleyFlow/shithub/internal/issues"
31
+	issuesdb "github.com/tenseleyFlow/shithub/internal/issues/sqlc"
32
+	pullsdb "github.com/tenseleyFlow/shithub/internal/pulls/sqlc"
33
+	repogit "github.com/tenseleyFlow/shithub/internal/repos/git"
34
+	mdrender "github.com/tenseleyFlow/shithub/internal/repos/markdown"
35
+	reposdb "github.com/tenseleyFlow/shithub/internal/repos/sqlc"
36
+)
37
+
38
+// Deps wires this package against the rest of the runtime.
39
+type Deps struct {
40
+	Pool   *pgxpool.Pool
41
+	Logger *slog.Logger
42
+}
43
+
44
+// Errors surfaced to handlers.
45
+var (
46
+	ErrSameBranch       = errors.New("pulls: base and head must differ")
47
+	ErrBaseNotFound     = errors.New("pulls: base ref not found")
48
+	ErrHeadNotFound     = errors.New("pulls: head ref not found")
49
+	ErrNoCommitsToMerge = errors.New("pulls: head has no commits ahead of base")
50
+	ErrAlreadyMerged    = errors.New("pulls: already merged")
51
+	ErrAlreadyClosed    = errors.New("pulls: already closed")
52
+	ErrMergeBlocked     = errors.New("pulls: merge blocked (mergeable_state != clean)")
53
+	ErrMergeMethodOff   = errors.New("pulls: requested merge method is disabled on this repo")
54
+	ErrConcurrentMerge  = errors.New("pulls: PR is being merged by another request")
55
+	ErrPRNotFound       = errors.New("pulls: PR not found")
56
+)
57
+
58
+// CreateParams describes a new-PR request.
59
+type CreateParams struct {
60
+	RepoID       int64
61
+	AuthorUserID int64
62
+	Title        string
63
+	Body         string
64
+	BaseRef      string
65
+	HeadRef      string
66
+	Draft        bool
67
+	GitDir       string // resolved from RepoFS by the caller
68
+}
69
+
70
+// CreateResult bundles the issue row + the PR row (post-snapshot).
71
+type CreateResult struct {
72
+	Issue       issuesdb.Issue
73
+	PullRequest pullsdb.PullRequest
74
+}
75
+
76
+// Create opens a PR. Validates that base/head are distinct and resolve
77
+// in the on-disk repo, snapshots their OIDs, then creates the issues
78
+// row + pull_requests row in one tx. Mergeability is `unknown` until
79
+// the worker job ticks.
80
+func Create(ctx context.Context, deps Deps, p CreateParams) (CreateResult, error) {
81
+	base := strings.TrimSpace(p.BaseRef)
82
+	head := strings.TrimSpace(p.HeadRef)
83
+	if base == "" || head == "" || base == head {
84
+		return CreateResult{}, ErrSameBranch
85
+	}
86
+	baseOID, err := repogit.ResolveRefOID(ctx, p.GitDir, base)
87
+	if err != nil {
88
+		if errors.Is(err, repogit.ErrRefNotFound) {
89
+			return CreateResult{}, ErrBaseNotFound
90
+		}
91
+		return CreateResult{}, fmt.Errorf("resolve base: %w", err)
92
+	}
93
+	headOID, err := repogit.ResolveRefOID(ctx, p.GitDir, head)
94
+	if err != nil {
95
+		if errors.Is(err, repogit.ErrRefNotFound) {
96
+			return CreateResult{}, ErrHeadNotFound
97
+		}
98
+		return CreateResult{}, fmt.Errorf("resolve head: %w", err)
99
+	}
100
+	if baseOID == headOID {
101
+		return CreateResult{}, ErrNoCommitsToMerge
102
+	}
103
+
104
+	// Open the issues row first via the issues orchestrator so we get
105
+	// the per-repo number allocation, body markdown render, and
106
+	// reference indexing for free.
107
+	issueRow, err := issues.Create(ctx, issues.Deps{Pool: deps.Pool, Logger: deps.Logger}, issues.CreateParams{
108
+		RepoID:       p.RepoID,
109
+		AuthorUserID: p.AuthorUserID,
110
+		Title:        p.Title,
111
+		Body:         p.Body,
112
+		Kind:         "pr",
113
+	})
114
+	if err != nil {
115
+		return CreateResult{}, err
116
+	}
117
+
118
+	prRow, err := pullsdb.New().CreatePullRequest(ctx, deps.Pool, pullsdb.CreatePullRequestParams{
119
+		IssueID:     issueRow.ID,
120
+		BaseRef:     base,
121
+		HeadRef:     head,
122
+		HeadRepoID:  p.RepoID,
123
+		BaseOid:     baseOID,
124
+		HeadOid:     headOID,
125
+		Draft:       p.Draft,
126
+	})
127
+	if err != nil {
128
+		return CreateResult{}, fmt.Errorf("create pull_request: %w", err)
129
+	}
130
+
131
+	// Best-effort initial synchronize so the PR view has commits + files
132
+	// even before the worker queue runs. Failures here don't fail the
133
+	// open — the worker will retry on the next tick.
134
+	if err := refreshCommitsAndFiles(ctx, deps, p.GitDir, prRow.IssueID, baseOID, headOID); err != nil {
135
+		if deps.Logger != nil {
136
+			deps.Logger.WarnContext(ctx, "pulls: initial sync", "error", err, "pr_id", prRow.IssueID)
137
+		}
138
+	}
139
+
140
+	return CreateResult{Issue: issueRow, PullRequest: prRow}, nil
141
+}
142
+
143
+// refreshCommitsAndFiles is shared by Create + Synchronize. Truncates +
144
+// re-fills `pull_request_commits` and `pull_request_files`.
145
+func refreshCommitsAndFiles(ctx context.Context, deps Deps, gitDir string, prID int64, baseOID, headOID string) error {
146
+	commits, err := repogit.CommitsBetweenDetail(ctx, gitDir, baseOID, headOID, 250)
147
+	if err != nil {
148
+		return fmt.Errorf("commits: %w", err)
149
+	}
150
+	files, err := repogit.FilesChangedBetween(ctx, gitDir, baseOID, headOID)
151
+	if err != nil {
152
+		return fmt.Errorf("files: %w", err)
153
+	}
154
+	tx, err := deps.Pool.Begin(ctx)
155
+	if err != nil {
156
+		return err
157
+	}
158
+	committed := false
159
+	defer func() {
160
+		if !committed {
161
+			_ = tx.Rollback(ctx)
162
+		}
163
+	}()
164
+	q := pullsdb.New()
165
+	if err := q.ClearPullRequestCommits(ctx, tx, prID); err != nil {
166
+		return err
167
+	}
168
+	if err := q.ClearPullRequestFiles(ctx, tx, prID); err != nil {
169
+		return err
170
+	}
171
+	for i, c := range commits {
172
+		var ats, cts pgtype.Timestamptz
173
+		if !c.AuthorWhen.IsZero() {
174
+			ats = pgtype.Timestamptz{Time: c.AuthorWhen, Valid: true}
175
+		}
176
+		if !c.CommitterWhen.IsZero() {
177
+			cts = pgtype.Timestamptz{Time: c.CommitterWhen, Valid: true}
178
+		}
179
+		if err := q.InsertPullRequestCommit(ctx, tx, pullsdb.InsertPullRequestCommitParams{
180
+			PrID:           prID,
181
+			Sha:            c.OID,
182
+			Position:       int32(i),
183
+			AuthorName:     c.AuthorName,
184
+			AuthorEmail:    c.AuthorEmail,
185
+			CommitterName:  c.CommitterName,
186
+			CommitterEmail: c.CommitterEmail,
187
+			Subject:        c.Subject,
188
+			Body:           c.Body,
189
+			AuthoredAt:     ats,
190
+			CommittedAt:    cts,
191
+		}); err != nil {
192
+			return err
193
+		}
194
+	}
195
+	for _, f := range files {
196
+		oldPath := pgtype.Text{}
197
+		if f.OldPath != "" {
198
+			oldPath = pgtype.Text{String: f.OldPath, Valid: true}
199
+		}
200
+		if err := q.InsertPullRequestFile(ctx, tx, pullsdb.InsertPullRequestFileParams{
201
+			PrID:      prID,
202
+			Path:      f.Path,
203
+			Status:    pullsdb.PrFileStatus(f.Status),
204
+			OldPath:   oldPath,
205
+			Additions: int32(f.Additions),
206
+			Deletions: int32(f.Deletions),
207
+			Changes:   int32(f.Additions + f.Deletions),
208
+		}); err != nil {
209
+			return err
210
+		}
211
+	}
212
+	if err := q.SetPullRequestSnapshot(ctx, tx, pullsdb.SetPullRequestSnapshotParams{
213
+		IssueID: prID, BaseOid: baseOID, HeadOid: headOID,
214
+	}); err != nil {
215
+		return err
216
+	}
217
+	if err := tx.Commit(ctx); err != nil {
218
+		return err
219
+	}
220
+	committed = true
221
+	return nil
222
+}
223
+
224
+// Synchronize re-snapshots the PR's base/head OIDs, refreshes the
225
+// commits + files lists, and emits a `synchronized` event into the
226
+// issue timeline. Called from the pr:synchronize worker job after
227
+// any push to the head ref.
228
+func Synchronize(ctx context.Context, deps Deps, gitDir string, prID int64) error {
229
+	q := pullsdb.New()
230
+	pr, err := q.GetPullRequestByIssueID(ctx, deps.Pool, prID)
231
+	if err != nil {
232
+		if errors.Is(err, pgx.ErrNoRows) {
233
+			return ErrPRNotFound
234
+		}
235
+		return err
236
+	}
237
+	baseOID, err := repogit.ResolveRefOID(ctx, gitDir, pr.BaseRef)
238
+	if err != nil {
239
+		return fmt.Errorf("resolve base: %w", err)
240
+	}
241
+	headOID, err := repogit.ResolveRefOID(ctx, gitDir, pr.HeadRef)
242
+	if err != nil {
243
+		return fmt.Errorf("resolve head: %w", err)
244
+	}
245
+	if err := refreshCommitsAndFiles(ctx, deps, gitDir, prID, baseOID, headOID); err != nil {
246
+		return err
247
+	}
248
+	// Reset mergeability to unknown so the next mergeability tick
249
+	// recomputes against the fresh snapshot.
250
+	if err := q.SetPullRequestMergeability(ctx, deps.Pool, pullsdb.SetPullRequestMergeabilityParams{
251
+		IssueID:        prID,
252
+		Mergeable:      pgtype.Bool{},
253
+		MergeableState: pullsdb.PrMergeableStateUnknown,
254
+	}); err != nil {
255
+		return fmt.Errorf("reset mergeability: %w", err)
256
+	}
257
+	// Emit the synchronized timeline event.
258
+	iq := issuesdb.New()
259
+	if _, err := iq.InsertIssueEvent(ctx, deps.Pool, issuesdb.InsertIssueEventParams{
260
+		IssueID: prID,
261
+		Kind:    "synchronized",
262
+		Meta:    []byte(fmt.Sprintf(`{"head_oid":%q}`, headOID)),
263
+	}); err != nil {
264
+		return fmt.Errorf("emit event: %w", err)
265
+	}
266
+	return nil
267
+}
268
+
269
+// Mergeability runs the merge-tree probe and persists the result.
270
+func Mergeability(ctx context.Context, deps Deps, gitDir string, prID int64) error {
271
+	q := pullsdb.New()
272
+	pr, err := q.GetPullRequestByIssueID(ctx, deps.Pool, prID)
273
+	if err != nil {
274
+		return err
275
+	}
276
+	if pr.BaseOid == "" || pr.HeadOid == "" {
277
+		return nil // synchronize hasn't run yet; nothing to probe
278
+	}
279
+	// Behind: head has no commits ahead of base.
280
+	gitDirCtx := ctx
281
+	commits, err := repogit.CommitsBetweenDetail(gitDirCtx, gitDir, pr.BaseOid, pr.HeadOid, 1)
282
+	if err != nil && !errors.Is(err, repogit.ErrRefNotFound) {
283
+		return err
284
+	}
285
+	if len(commits) == 0 {
286
+		return q.SetPullRequestMergeability(ctx, deps.Pool, pullsdb.SetPullRequestMergeabilityParams{
287
+			IssueID:        prID,
288
+			Mergeable:      pgtype.Bool{Bool: false, Valid: true},
289
+			MergeableState: pullsdb.PrMergeableStateBehind,
290
+		})
291
+	}
292
+	res, err := repogit.ProbeMerge(gitDirCtx, gitDir, pr.BaseOid, pr.HeadOid)
293
+	if err != nil {
294
+		return fmt.Errorf("probe: %w", err)
295
+	}
296
+	state := pullsdb.PrMergeableStateClean
297
+	mergeable := true
298
+	if res.HasConflict {
299
+		state = pullsdb.PrMergeableStateDirty
300
+		mergeable = false
301
+	}
302
+	return q.SetPullRequestMergeability(ctx, deps.Pool, pullsdb.SetPullRequestMergeabilityParams{
303
+		IssueID:        prID,
304
+		Mergeable:      pgtype.Bool{Bool: mergeable, Valid: true},
305
+		MergeableState: state,
306
+	})
307
+}
308
+
309
+// EditPR updates the PR's title + body. Body markdown is re-rendered
310
+// via the same pipeline issues.Create uses so HTML is consistent.
311
+func EditPR(ctx context.Context, deps Deps, prID int64, title, body string) error {
312
+	title = strings.TrimSpace(title)
313
+	if title == "" {
314
+		return issues.ErrEmptyTitle
315
+	}
316
+	if len(title) > 256 {
317
+		return issues.ErrTitleTooLong
318
+	}
319
+	if len(body) > 65535 {
320
+		return issues.ErrBodyTooLong
321
+	}
322
+	html, _ := mdrender.RenderHTML([]byte(body))
323
+	q := issuesdb.New()
324
+	return q.UpdateIssueTitleBody(ctx, deps.Pool, issuesdb.UpdateIssueTitleBodyParams{
325
+		ID:             prID,
326
+		Title:          title,
327
+		Body:           body,
328
+		BodyHtmlCached: pgtype.Text{String: html, Valid: html != ""},
329
+	})
330
+}
331
+
332
+// SetReady flips draft → false and emits a `ready_for_review` event.
333
+func SetReady(ctx context.Context, deps Deps, actorUserID, prID int64) error {
334
+	q := pullsdb.New()
335
+	tx, err := deps.Pool.Begin(ctx)
336
+	if err != nil {
337
+		return err
338
+	}
339
+	committed := false
340
+	defer func() {
341
+		if !committed {
342
+			_ = tx.Rollback(ctx)
343
+		}
344
+	}()
345
+	if err := q.SetPullRequestDraft(ctx, tx, pullsdb.SetPullRequestDraftParams{IssueID: prID, Draft: false}); err != nil {
346
+		return err
347
+	}
348
+	iq := issuesdb.New()
349
+	if _, err := iq.InsertIssueEvent(ctx, tx, issuesdb.InsertIssueEventParams{
350
+		IssueID:     prID,
351
+		ActorUserID: pgtype.Int8{Int64: actorUserID, Valid: actorUserID != 0},
352
+		Kind:        "ready_for_review",
353
+		Meta:        []byte("{}"),
354
+	}); err != nil {
355
+		return err
356
+	}
357
+	if err := tx.Commit(ctx); err != nil {
358
+		return err
359
+	}
360
+	committed = true
361
+	return nil
362
+}
363
+
364
+// AllowedMethod returns true when the repo allows the named merge
365
+// strategy. Falls open for unknown methods so callers get a clear
366
+// error from the orchestrator.
367
+func AllowedMethod(repo reposdb.Repo, method string) bool {
368
+	switch method {
369
+	case "merge":
370
+		return repo.AllowMergeCommit
371
+	case "squash":
372
+		return repo.AllowSquashMerge
373
+	case "rebase":
374
+		return repo.AllowRebaseMerge
375
+	}
376
+	return false
377
+}
internal/pulls/pulls_test.goadded
@@ -0,0 +1,366 @@
1
+// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
3
+package pulls_test
4
+
5
+import (
6
+	"context"
7
+	"io"
8
+	"log/slog"
9
+	"os"
10
+	"os/exec"
11
+	"path/filepath"
12
+	"strings"
13
+	"sync"
14
+	"testing"
15
+
16
+	"github.com/jackc/pgx/v5/pgtype"
17
+	"github.com/jackc/pgx/v5/pgxpool"
18
+
19
+	issuesdb "github.com/tenseleyFlow/shithub/internal/issues/sqlc"
20
+	"github.com/tenseleyFlow/shithub/internal/pulls"
21
+	pullsdb "github.com/tenseleyFlow/shithub/internal/pulls/sqlc"
22
+	reposdb "github.com/tenseleyFlow/shithub/internal/repos/sqlc"
23
+	"github.com/tenseleyFlow/shithub/internal/testing/dbtest"
24
+	usersdb "github.com/tenseleyFlow/shithub/internal/users/sqlc"
25
+)
26
+
27
+const fixtureHash = "$argon2id$v=19$m=16384,t=1,p=1$" +
28
+	"AAAAAAAAAAAAAAAA$" +
29
+	"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
30
+
31
+// gitCmd suppresses the gosec G204 noise — every invocation runs
32
+// against a t.TempDir path the test set up.
33
+func gitCmd(args ...string) *exec.Cmd {
34
+	//nolint:gosec
35
+	return exec.Command("git", args...)
36
+}
37
+
38
+// fixture spins a real bare git repo on disk + a DB row pair (user
39
+// + repo + ensured issue counter) so the orchestrator's path-on-disk
40
+// assumptions hold.
41
+type fixture struct {
42
+	pool   *pgxpool.Pool
43
+	deps   pulls.Deps
44
+	userID int64
45
+	repoID int64
46
+	gitDir string
47
+}
48
+
49
+func setup(t *testing.T) fixture {
50
+	t.Helper()
51
+	pool := dbtest.NewTestDB(t)
52
+	ctx := context.Background()
53
+
54
+	uq := usersdb.New()
55
+	user, err := uq.CreateUser(ctx, pool, usersdb.CreateUserParams{
56
+		Username: "alice", DisplayName: "Alice", PasswordHash: fixtureHash,
57
+	})
58
+	if err != nil {
59
+		t.Fatalf("CreateUser: %v", err)
60
+	}
61
+	em, err := uq.CreateUserEmail(ctx, pool, usersdb.CreateUserEmailParams{
62
+		UserID: user.ID, Email: "alice@example.com", IsPrimary: true, Verified: true,
63
+	})
64
+	if err != nil {
65
+		t.Fatalf("CreateUserEmail: %v", err)
66
+	}
67
+	if err := uq.LinkUserPrimaryEmail(ctx, pool, usersdb.LinkUserPrimaryEmailParams{
68
+		ID: user.ID, PrimaryEmailID: pgtype.Int8{Int64: em.ID, Valid: true},
69
+	}); err != nil {
70
+		t.Fatalf("LinkUserPrimaryEmail: %v", err)
71
+	}
72
+
73
+	rq := reposdb.New()
74
+	repo, err := rq.CreateRepo(ctx, pool, reposdb.CreateRepoParams{
75
+		OwnerUserID:   pgtype.Int8{Int64: user.ID, Valid: true},
76
+		Name:          "demo",
77
+		DefaultBranch: "trunk",
78
+		Visibility:    reposdb.RepoVisibilityPublic,
79
+	})
80
+	if err != nil {
81
+		t.Fatalf("CreateRepo: %v", err)
82
+	}
83
+
84
+	iq := issuesdb.New()
85
+	if err := iq.EnsureRepoIssueCounter(ctx, pool, repo.ID); err != nil {
86
+		t.Fatalf("EnsureRepoIssueCounter: %v", err)
87
+	}
88
+
89
+	root := t.TempDir()
90
+	gitDir := filepath.Join(root, "demo.git")
91
+	if out, err := gitCmd("init", "--bare", "-b", "trunk", gitDir).CombinedOutput(); err != nil {
92
+		t.Fatalf("git init --bare: %v (%s)", err, out)
93
+	}
94
+
95
+	w := io.Discard
96
+	if testing.Verbose() {
97
+		w = os.Stderr
98
+	}
99
+	deps := pulls.Deps{
100
+		Pool:   pool,
101
+		Logger: slog.New(slog.NewTextHandler(w, nil)),
102
+	}
103
+	return fixture{pool: pool, deps: deps, userID: user.ID, repoID: repo.ID, gitDir: gitDir}
104
+}
105
+
106
+// commitOnBranch creates a commit on branch from a temp worktree.
107
+// Returns the new HEAD oid.
108
+func commitOnBranch(t *testing.T, gitDir, branch, msg, file, contents string) string {
109
+	t.Helper()
110
+	wt := t.TempDir()
111
+	// Add a worktree that creates the branch if missing.
112
+	addArgs := []string{"-C", gitDir, "worktree", "add"}
113
+	// If branch doesn't exist yet, create it; otherwise check it out.
114
+	if _, err := gitCmd("-C", gitDir, "show-ref", "--verify", "refs/heads/"+branch).CombinedOutput(); err != nil {
115
+		addArgs = append(addArgs, "-b", branch, wt)
116
+	} else {
117
+		addArgs = append(addArgs, wt, branch)
118
+	}
119
+	if out, err := gitCmd(addArgs...).CombinedOutput(); err != nil {
120
+		t.Fatalf("worktree add %s: %v (%s)", branch, err, out)
121
+	}
122
+	defer func() {
123
+		_ = gitCmd("-C", gitDir, "worktree", "remove", "--force", wt).Run()
124
+	}()
125
+
126
+	if err := os.WriteFile(filepath.Join(wt, file), []byte(contents), 0o644); err != nil { //nolint:gosec
127
+		t.Fatalf("write %s: %v", file, err)
128
+	}
129
+	for _, args := range [][]string{
130
+		{"-C", wt, "config", "user.name", "Alice"},
131
+		{"-C", wt, "config", "user.email", "alice@example.com"},
132
+		{"-C", wt, "add", "."},
133
+		{"-C", wt, "commit", "-m", msg},
134
+	} {
135
+		if out, err := gitCmd(args...).CombinedOutput(); err != nil {
136
+			t.Fatalf("%v: %v (%s)", args, err, out)
137
+		}
138
+	}
139
+	out, err := gitCmd("-C", wt, "rev-parse", "HEAD").Output()
140
+	if err != nil {
141
+		t.Fatalf("rev-parse HEAD: %v", err)
142
+	}
143
+	return strings.TrimSpace(string(out))
144
+}
145
+
146
+func TestCreate_OpensPRWithIssueRow(t *testing.T) {
147
+	f := setup(t)
148
+	commitOnBranch(t, f.gitDir, "trunk", "init", "README.md", "hi\n")
149
+	commitOnBranch(t, f.gitDir, "feature", "add foo", "foo.txt", "foo\n")
150
+
151
+	res, err := pulls.Create(context.Background(), f.deps, pulls.CreateParams{
152
+		RepoID:       f.repoID,
153
+		AuthorUserID: f.userID,
154
+		Title:        "Add foo",
155
+		Body:         "fixes nothing yet",
156
+		BaseRef:      "trunk",
157
+		HeadRef:      "feature",
158
+		GitDir:       f.gitDir,
159
+	})
160
+	if err != nil {
161
+		t.Fatalf("Create: %v", err)
162
+	}
163
+	if res.Issue.Kind != issuesdb.IssueKindPr {
164
+		t.Errorf("issue kind: got %s, want pr", res.Issue.Kind)
165
+	}
166
+	if res.PullRequest.BaseRef != "trunk" || res.PullRequest.HeadRef != "feature" {
167
+		t.Errorf("ref mismatch: %+v", res.PullRequest)
168
+	}
169
+	if res.PullRequest.BaseOid == "" || res.PullRequest.HeadOid == "" {
170
+		t.Errorf("OIDs not snapshotted: %+v", res.PullRequest)
171
+	}
172
+	commits, _ := pullsdb.New().ListPullRequestCommits(context.Background(), f.pool, res.PullRequest.IssueID)
173
+	if len(commits) == 0 {
174
+		t.Errorf("expected commits populated by initial sync")
175
+	}
176
+}
177
+
178
+func TestCreate_RejectsSameBranch(t *testing.T) {
179
+	f := setup(t)
180
+	commitOnBranch(t, f.gitDir, "trunk", "init", "README.md", "hi\n")
181
+	_, err := pulls.Create(context.Background(), f.deps, pulls.CreateParams{
182
+		RepoID: f.repoID, AuthorUserID: f.userID,
183
+		Title: "x", BaseRef: "trunk", HeadRef: "trunk", GitDir: f.gitDir,
184
+	})
185
+	if err == nil {
186
+		t.Fatalf("expected ErrSameBranch, got nil")
187
+	}
188
+}
189
+
190
+func TestMergeability_Clean(t *testing.T) {
191
+	f := setup(t)
192
+	commitOnBranch(t, f.gitDir, "trunk", "init", "README.md", "hi\n")
193
+	commitOnBranch(t, f.gitDir, "feature", "add foo", "foo.txt", "foo\n")
194
+	res, err := pulls.Create(context.Background(), f.deps, pulls.CreateParams{
195
+		RepoID: f.repoID, AuthorUserID: f.userID,
196
+		Title: "x", BaseRef: "trunk", HeadRef: "feature", GitDir: f.gitDir,
197
+	})
198
+	if err != nil {
199
+		t.Fatalf("Create: %v", err)
200
+	}
201
+	if err := pulls.Mergeability(context.Background(), f.deps, f.gitDir, res.PullRequest.IssueID); err != nil {
202
+		t.Fatalf("Mergeability: %v", err)
203
+	}
204
+	pr, _ := pullsdb.New().GetPullRequestByIssueID(context.Background(), f.pool, res.PullRequest.IssueID)
205
+	if pr.MergeableState != pullsdb.PrMergeableStateClean {
206
+		t.Errorf("got %s, want clean", pr.MergeableState)
207
+	}
208
+}
209
+
210
+func TestMergeability_Dirty(t *testing.T) {
211
+	f := setup(t)
212
+	commitOnBranch(t, f.gitDir, "trunk", "init", "shared.txt", "base content\n")
213
+	// Modify shared.txt on trunk.
214
+	commitOnBranch(t, f.gitDir, "trunk", "trunk edit", "shared.txt", "trunk content\n")
215
+	// Branch from earlier trunk and also edit shared.txt → conflict.
216
+	// Create the feature branch from the first trunk commit.
217
+	out, err := gitCmd("-C", f.gitDir, "rev-list", "--reverse", "trunk").Output()
218
+	if err != nil {
219
+		t.Fatalf("rev-list: %v", err)
220
+	}
221
+	firstSHA := strings.SplitN(strings.TrimSpace(string(out)), "\n", 2)[0]
222
+	if out, err := gitCmd("-C", f.gitDir, "branch", "feature", firstSHA).CombinedOutput(); err != nil {
223
+		t.Fatalf("create feature branch: %v (%s)", err, out)
224
+	}
225
+	commitOnBranch(t, f.gitDir, "feature", "feature edit", "shared.txt", "feature content\n")
226
+
227
+	res, err := pulls.Create(context.Background(), f.deps, pulls.CreateParams{
228
+		RepoID: f.repoID, AuthorUserID: f.userID,
229
+		Title: "x", BaseRef: "trunk", HeadRef: "feature", GitDir: f.gitDir,
230
+	})
231
+	if err != nil {
232
+		t.Fatalf("Create: %v", err)
233
+	}
234
+	if err := pulls.Mergeability(context.Background(), f.deps, f.gitDir, res.PullRequest.IssueID); err != nil {
235
+		t.Fatalf("Mergeability: %v", err)
236
+	}
237
+	pr, _ := pullsdb.New().GetPullRequestByIssueID(context.Background(), f.pool, res.PullRequest.IssueID)
238
+	if pr.MergeableState != pullsdb.PrMergeableStateDirty {
239
+		t.Errorf("got %s, want dirty", pr.MergeableState)
240
+	}
241
+}
242
+
243
+func TestMerge_MergeCommit(t *testing.T) {
244
+	f := setup(t)
245
+	commitOnBranch(t, f.gitDir, "trunk", "init", "README.md", "hi\n")
246
+	commitOnBranch(t, f.gitDir, "feature", "add foo", "foo.txt", "foo\n")
247
+	res, err := pulls.Create(context.Background(), f.deps, pulls.CreateParams{
248
+		RepoID: f.repoID, AuthorUserID: f.userID,
249
+		Title: "Add foo", BaseRef: "trunk", HeadRef: "feature", GitDir: f.gitDir,
250
+	})
251
+	if err != nil {
252
+		t.Fatalf("Create: %v", err)
253
+	}
254
+	if err := pulls.Mergeability(context.Background(), f.deps, f.gitDir, res.PullRequest.IssueID); err != nil {
255
+		t.Fatalf("Mergeability: %v", err)
256
+	}
257
+	if err := pulls.Merge(context.Background(), f.deps, pulls.MergeParams{
258
+		PRID: res.PullRequest.IssueID, ActorUserID: f.userID,
259
+		GitDir: f.gitDir, Method: "merge",
260
+	}); err != nil {
261
+		t.Fatalf("Merge: %v", err)
262
+	}
263
+	pr, _ := pullsdb.New().GetPullRequestByIssueID(context.Background(), f.pool, res.PullRequest.IssueID)
264
+	if !pr.MergedAt.Valid {
265
+		t.Errorf("merged_at not set")
266
+	}
267
+	if !pr.MergeCommitSha.Valid {
268
+		t.Errorf("merge_commit_sha not set")
269
+	}
270
+	// Issue side closed?
271
+	iq := issuesdb.New()
272
+	issue, _ := iq.GetIssueByID(context.Background(), f.pool, res.PullRequest.IssueID)
273
+	if issue.State != issuesdb.IssueStateClosed {
274
+		t.Errorf("issue state: got %s, want closed", issue.State)
275
+	}
276
+}
277
+
278
+func TestMerge_RejectsConcurrentDouble(t *testing.T) {
279
+	f := setup(t)
280
+	commitOnBranch(t, f.gitDir, "trunk", "init", "README.md", "hi\n")
281
+	commitOnBranch(t, f.gitDir, "feature", "add foo", "foo.txt", "foo\n")
282
+	res, err := pulls.Create(context.Background(), f.deps, pulls.CreateParams{
283
+		RepoID: f.repoID, AuthorUserID: f.userID,
284
+		Title: "x", BaseRef: "trunk", HeadRef: "feature", GitDir: f.gitDir,
285
+	})
286
+	if err != nil {
287
+		t.Fatalf("Create: %v", err)
288
+	}
289
+	_ = pulls.Mergeability(context.Background(), f.deps, f.gitDir, res.PullRequest.IssueID)
290
+
291
+	var wg sync.WaitGroup
292
+	errs := make([]error, 2)
293
+	for i := 0; i < 2; i++ {
294
+		wg.Add(1)
295
+		go func(i int) {
296
+			defer wg.Done()
297
+			errs[i] = pulls.Merge(context.Background(), f.deps, pulls.MergeParams{
298
+				PRID: res.PullRequest.IssueID, ActorUserID: f.userID,
299
+				GitDir: f.gitDir, Method: "merge",
300
+			})
301
+		}(i)
302
+	}
303
+	wg.Wait()
304
+
305
+	successes := 0
306
+	for _, e := range errs {
307
+		if e == nil {
308
+			successes++
309
+		}
310
+	}
311
+	if successes != 1 {
312
+		t.Errorf("expected exactly one successful merge, got %d (errors: %v, %v)", successes, errs[0], errs[1])
313
+	}
314
+}
315
+
316
+func TestMerge_LinkedIssueAutoClose(t *testing.T) {
317
+	f := setup(t)
318
+	ctx := context.Background()
319
+
320
+	// Create an issue first so the PR body can reference it.
321
+	iq := issuesdb.New()
322
+	num, err := iq.AllocateIssueNumber(ctx, f.pool, f.repoID)
323
+	if err != nil {
324
+		t.Fatalf("AllocateIssueNumber: %v", err)
325
+	}
326
+	issue, err := iq.CreateIssue(ctx, f.pool, issuesdb.CreateIssueParams{
327
+		RepoID:       f.repoID,
328
+		Number:       num,
329
+		Kind:         issuesdb.IssueKindIssue,
330
+		Title:        "bug",
331
+		Body:         "fix me",
332
+		AuthorUserID: pgtype.Int8{Int64: f.userID, Valid: true},
333
+	})
334
+	if err != nil {
335
+		t.Fatalf("CreateIssue: %v", err)
336
+	}
337
+
338
+	commitOnBranch(t, f.gitDir, "trunk", "init", "README.md", "hi\n")
339
+	commitOnBranch(t, f.gitDir, "feature", "add foo", "foo.txt", "foo\n")
340
+
341
+	res, err := pulls.Create(ctx, f.deps, pulls.CreateParams{
342
+		RepoID: f.repoID, AuthorUserID: f.userID,
343
+		Title: "fix the bug", Body: "Fixes #1",
344
+		BaseRef: "trunk", HeadRef: "feature", GitDir: f.gitDir,
345
+	})
346
+	if err != nil {
347
+		t.Fatalf("Create PR: %v", err)
348
+	}
349
+	_ = pulls.Mergeability(ctx, f.deps, f.gitDir, res.PullRequest.IssueID)
350
+
351
+	if err := pulls.Merge(ctx, f.deps, pulls.MergeParams{
352
+		PRID: res.PullRequest.IssueID, ActorUserID: f.userID,
353
+		GitDir: f.gitDir, Method: "squash",
354
+	}); err != nil {
355
+		t.Fatalf("Merge: %v", err)
356
+	}
357
+
358
+	// The pre-existing issue (#1) should now be closed.
359
+	got, err := iq.GetIssueByID(ctx, f.pool, issue.ID)
360
+	if err != nil {
361
+		t.Fatalf("GetIssueByID: %v", err)
362
+	}
363
+	if got.State != issuesdb.IssueStateClosed {
364
+		t.Errorf("linked issue state: got %s, want closed", got.State)
365
+	}
366
+}
internal/pulls/state.goadded
@@ -0,0 +1,47 @@
1
+// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
3
+package pulls
4
+
5
+import (
6
+	"context"
7
+	"errors"
8
+
9
+	"github.com/tenseleyFlow/shithub/internal/issues"
10
+	pullsdb "github.com/tenseleyFlow/shithub/internal/pulls/sqlc"
11
+	repogit "github.com/tenseleyFlow/shithub/internal/repos/git"
12
+)
13
+
14
+// SetState delegates to issues.SetState with a couple of PR-specific
15
+// guardrails:
16
+//
17
+//   - reopening a merged PR is a no-op (a merged PR can't reopen).
18
+//   - reopening when base or head no longer resolves returns a typed
19
+//     error so the handler can render "branches moved, cannot reopen".
20
+func SetState(ctx context.Context, deps Deps, gitDir string, actorUserID, prID int64, newState string) error {
21
+	q := pullsdb.New()
22
+	pr, err := q.GetPullRequestByIssueID(ctx, deps.Pool, prID)
23
+	if err != nil {
24
+		return ErrPRNotFound
25
+	}
26
+	if pr.MergedAt.Valid {
27
+		return ErrAlreadyMerged
28
+	}
29
+	if newState == "open" {
30
+		// Validate refs still resolve; otherwise reopen would leave a PR
31
+		// pointing at a missing branch which the diff renderer can't
32
+		// handle cleanly.
33
+		if _, err := repogit.ResolveRefOID(ctx, gitDir, pr.BaseRef); err != nil {
34
+			if errors.Is(err, repogit.ErrRefNotFound) {
35
+				return ErrBaseNotFound
36
+			}
37
+			return err
38
+		}
39
+		if _, err := repogit.ResolveRefOID(ctx, gitDir, pr.HeadRef); err != nil {
40
+			if errors.Is(err, repogit.ErrRefNotFound) {
41
+				return ErrHeadNotFound
42
+			}
43
+			return err
44
+		}
45
+	}
46
+	return issues.SetState(ctx, issues.Deps{Pool: deps.Pool, Logger: deps.Logger}, actorUserID, prID, newState, "")
47
+}