Add org GitHub import workers
Authored by
mfwolffe <wolffemf@dukes.jmu.edu>
- SHA
c96e2b4ae84d676627e4c5bc647e07abb3c7e82d- Parents
-
c782968 - Tree
e6280cc
c96e2b4
c96e2b4ae84d676627e4c5bc647e07abb3c7e82dc782968
e6280cccmd/shithubd/worker.gomodified@@ -22,6 +22,7 @@ import ( | |||
| 22 | "github.com/tenseleyFlow/shithub/internal/auth/audit" | 22 | "github.com/tenseleyFlow/shithub/internal/auth/audit" |
| 23 | "github.com/tenseleyFlow/shithub/internal/auth/email" | 23 | "github.com/tenseleyFlow/shithub/internal/auth/email" |
| 24 | "github.com/tenseleyFlow/shithub/internal/auth/secretbox" | 24 | "github.com/tenseleyFlow/shithub/internal/auth/secretbox" |
| 25 | + "github.com/tenseleyFlow/shithub/internal/auth/throttle" | ||
| 25 | "github.com/tenseleyFlow/shithub/internal/infra/config" | 26 | "github.com/tenseleyFlow/shithub/internal/infra/config" |
| 26 | "github.com/tenseleyFlow/shithub/internal/infra/db" | 27 | "github.com/tenseleyFlow/shithub/internal/infra/db" |
| 27 | "github.com/tenseleyFlow/shithub/internal/infra/storage" | 28 | "github.com/tenseleyFlow/shithub/internal/infra/storage" |
@@ -84,6 +85,12 @@ var workerCmd = &cobra.Command{ | |||
| 84 | if err != nil { | 85 | if err != nil { |
| 85 | return fmt.Errorf("object storage: %w", err) | 86 | return fmt.Errorf("object storage: %w", err) |
| 86 | } | 87 | } |
| 88 | + box, boxErr := secretbox.FromBase64(cfg.Auth.TOTPKeyB64) | ||
| 89 | + if boxErr != nil { | ||
| 90 | + logger.Warn("secretbox unavailable for encrypted worker payloads", | ||
| 91 | + "hint", "set Auth.TOTPKeyB64 to a base64 32-byte key", | ||
| 92 | + "error", boxErr) | ||
| 93 | + } | ||
| 87 | 94 | ||
| 88 | p := worker.NewPool(pool, worker.PoolConfig{ | 95 | p := worker.NewPool(pool, worker.PoolConfig{ |
| 89 | Workers: count, | 96 | Workers: count, |
@@ -118,6 +125,12 @@ var workerCmd = &cobra.Command{ | |||
| 118 | p.Register(worker.KindRepoIndexReconcile, jobs.RepoIndexReconcile(jobs.IndexReconcileDeps{ | 125 | p.Register(worker.KindRepoIndexReconcile, jobs.RepoIndexReconcile(jobs.IndexReconcileDeps{ |
| 119 | Pool: pool, Logger: logger, | 126 | Pool: pool, Logger: logger, |
| 120 | })) | 127 | })) |
| 128 | + importDeps := jobs.OrgGitHubImportDeps{ | ||
| 129 | + Pool: pool, RepoFS: rfs, Box: box, Audit: auditRecorder(), | ||
| 130 | + Limiter: throttle.NewLimiter(), Logger: logger, ShithubdPath: shithubdPath, | ||
| 131 | + } | ||
| 132 | + p.Register(worker.KindOrgGitHubImportDiscover, jobs.OrgGitHubImportDiscover(importDeps)) | ||
| 133 | + p.Register(worker.KindOrgGitHubImportRepo, jobs.OrgGitHubImportRepo(importDeps)) | ||
| 121 | 134 | ||
| 122 | notifSender, _ := pickNotifEmailSender(cfg) | 135 | notifSender, _ := pickNotifEmailSender(cfg) |
| 123 | p.Register(worker.KindNotifyFanout, jobs.NotifyFanout(jobs.NotifyFanoutDeps{ | 136 | p.Register(worker.KindNotifyFanout, jobs.NotifyFanout(jobs.NotifyFanoutDeps{ |
@@ -138,11 +151,10 @@ var workerCmd = &cobra.Command{ | |||
| 138 | // purge-old prunes terminal rows past the retention window. | 151 | // purge-old prunes terminal rows past the retention window. |
| 139 | // We reuse the TOTP key as the at-rest secretbox key — both | 152 | // We reuse the TOTP key as the at-rest secretbox key — both |
| 140 | // are encrypted-blob columns in the same trust domain. | 153 | // are encrypted-blob columns in the same trust domain. |
| 141 | - hookBox, hookBoxErr := secretbox.FromBase64(cfg.Auth.TOTPKeyB64) | 154 | + if boxErr != nil { |
| 142 | - if hookBoxErr != nil { | ||
| 143 | logger.Warn("webhook: secretbox unavailable; webhook delivery disabled", | 155 | logger.Warn("webhook: secretbox unavailable; webhook delivery disabled", |
| 144 | "hint", "set Auth.TOTPKeyB64 to a base64 32-byte key", | 156 | "hint", "set Auth.TOTPKeyB64 to a base64 32-byte key", |
| 145 | - "error", hookBoxErr) | 157 | + "error", boxErr) |
| 146 | } else { | 158 | } else { |
| 147 | p.Register(webhook.KindWebhookFanout, jobs.WebhookFanout(jobs.WebhookFanoutDeps{ | 159 | p.Register(webhook.KindWebhookFanout, jobs.WebhookFanout(jobs.WebhookFanoutDeps{ |
| 148 | Pool: pool, Logger: logger, | 160 | Pool: pool, Logger: logger, |
@@ -150,7 +162,7 @@ var workerCmd = &cobra.Command{ | |||
| 150 | p.Register(webhook.KindWebhookDeliver, jobs.WebhookDeliver(jobs.WebhookDeliverDeps{ | 162 | p.Register(webhook.KindWebhookDeliver, jobs.WebhookDeliver(jobs.WebhookDeliverDeps{ |
| 151 | Pool: pool, | 163 | Pool: pool, |
| 152 | Logger: logger, | 164 | Logger: logger, |
| 153 | - SecretBox: hookBox, | 165 | + SecretBox: box, |
| 154 | SSRF: webhook.DefaultSSRFConfig(), | 166 | SSRF: webhook.DefaultSSRFConfig(), |
| 155 | })) | 167 | })) |
| 156 | p.Register(webhook.KindWebhookPurgeOld, jobs.WebhookPurgeOld(jobs.WebhookPurgeOldDeps{ | 168 | p.Register(webhook.KindWebhookPurgeOld, jobs.WebhookPurgeOld(jobs.WebhookPurgeOldDeps{ |
internal/actions/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/admin/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/auth/policy/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/checks/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/issues/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/meta/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/migrationsfs/migrations/0059_org_github_imports.sqladded@@ -0,0 +1,88 @@ | |||
| 1 | +-- SPDX-License-Identifier: AGPL-3.0-or-later | ||
| 2 | +-- | ||
| 3 | +-- GitHub organization imports. One parent row tracks discovery/progress; | ||
| 4 | +-- one child row tracks each GitHub repository to create and fetch. | ||
| 5 | + | ||
| 6 | +-- +goose Up | ||
| 7 | + | ||
| 8 | +CREATE TABLE org_github_imports ( | ||
| 9 | + id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, | ||
| 10 | + org_id bigint NOT NULL REFERENCES orgs(id) ON DELETE CASCADE, | ||
| 11 | + source_host text NOT NULL DEFAULT 'github.com', | ||
| 12 | + source_org text NOT NULL, | ||
| 13 | + requested_by_user_id bigint REFERENCES users(id) ON DELETE SET NULL, | ||
| 14 | + status text NOT NULL DEFAULT 'queued', | ||
| 15 | + include_private boolean NOT NULL DEFAULT false, | ||
| 16 | + token_present boolean NOT NULL DEFAULT false, | ||
| 17 | + token_ciphertext bytea, | ||
| 18 | + token_nonce bytea, | ||
| 19 | + total_count integer NOT NULL DEFAULT 0, | ||
| 20 | + last_error text, | ||
| 21 | + started_at timestamptz, | ||
| 22 | + completed_at timestamptz, | ||
| 23 | + created_at timestamptz NOT NULL DEFAULT now(), | ||
| 24 | + updated_at timestamptz NOT NULL DEFAULT now(), | ||
| 25 | + CHECK (source_host = 'github.com'), | ||
| 26 | + CHECK (source_org <> ''), | ||
| 27 | + CHECK (length(source_org) <= 100), | ||
| 28 | + CHECK (status IN ('queued', 'discovering', 'importing', 'completed', 'failed')), | ||
| 29 | + CHECK (total_count >= 0), | ||
| 30 | + CHECK ((token_ciphertext IS NULL) = (token_nonce IS NULL)), | ||
| 31 | + CHECK ((token_ciphertext IS NULL) OR token_present) | ||
| 32 | +); | ||
| 33 | + | ||
| 34 | +CREATE TABLE org_github_import_repos ( | ||
| 35 | + id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, | ||
| 36 | + import_id bigint NOT NULL REFERENCES org_github_imports(id) ON DELETE CASCADE, | ||
| 37 | + github_id bigint, | ||
| 38 | + source_full_name text NOT NULL, | ||
| 39 | + source_name text NOT NULL, | ||
| 40 | + target_name text NOT NULL, | ||
| 41 | + clone_url text NOT NULL, | ||
| 42 | + description text NOT NULL DEFAULT '', | ||
| 43 | + default_branch text NOT NULL DEFAULT '', | ||
| 44 | + target_visibility repo_visibility NOT NULL DEFAULT 'public', | ||
| 45 | + is_private boolean NOT NULL DEFAULT false, | ||
| 46 | + is_fork boolean NOT NULL DEFAULT false, | ||
| 47 | + status text NOT NULL DEFAULT 'queued', | ||
| 48 | + repo_id bigint REFERENCES repos(id) ON DELETE SET NULL, | ||
| 49 | + last_error text, | ||
| 50 | + started_at timestamptz, | ||
| 51 | + completed_at timestamptz, | ||
| 52 | + created_at timestamptz NOT NULL DEFAULT now(), | ||
| 53 | + updated_at timestamptz NOT NULL DEFAULT now(), | ||
| 54 | + CHECK (source_full_name <> ''), | ||
| 55 | + CHECK (source_name <> ''), | ||
| 56 | + CHECK (target_name <> ''), | ||
| 57 | + CHECK (clone_url <> ''), | ||
| 58 | + CHECK (length(clone_url) <= 2048), | ||
| 59 | + CHECK (status IN ('queued', 'importing', 'imported', 'skipped', 'failed')) | ||
| 60 | +); | ||
| 61 | + | ||
| 62 | +CREATE UNIQUE INDEX org_github_import_repos_import_target_idx | ||
| 63 | + ON org_github_import_repos(import_id, target_name); | ||
| 64 | + | ||
| 65 | +CREATE INDEX org_github_imports_org_created_idx | ||
| 66 | + ON org_github_imports(org_id, created_at DESC); | ||
| 67 | + | ||
| 68 | +CREATE INDEX org_github_import_repos_import_status_idx | ||
| 69 | + ON org_github_import_repos(import_id, status); | ||
| 70 | + | ||
| 71 | +CREATE TRIGGER org_github_imports_set_updated_at | ||
| 72 | +BEFORE UPDATE ON org_github_imports | ||
| 73 | +FOR EACH ROW EXECUTE FUNCTION tg_set_updated_at(); | ||
| 74 | + | ||
| 75 | +CREATE TRIGGER org_github_import_repos_set_updated_at | ||
| 76 | +BEFORE UPDATE ON org_github_import_repos | ||
| 77 | +FOR EACH ROW EXECUTE FUNCTION tg_set_updated_at(); | ||
| 78 | + | ||
| 79 | + | ||
| 80 | +-- +goose Down | ||
| 81 | + | ||
| 82 | +DROP TRIGGER IF EXISTS org_github_import_repos_set_updated_at ON org_github_import_repos; | ||
| 83 | +DROP TRIGGER IF EXISTS org_github_imports_set_updated_at ON org_github_imports; | ||
| 84 | +DROP INDEX IF EXISTS org_github_import_repos_import_status_idx; | ||
| 85 | +DROP INDEX IF EXISTS org_github_imports_org_created_idx; | ||
| 86 | +DROP INDEX IF EXISTS org_github_import_repos_import_target_idx; | ||
| 87 | +DROP TABLE IF EXISTS org_github_import_repos; | ||
| 88 | +DROP TABLE IF EXISTS org_github_imports; | ||
internal/notif/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/orgs/github_import.goadded@@ -0,0 +1,292 @@ | |||
| 1 | +// SPDX-License-Identifier: AGPL-3.0-or-later | ||
| 2 | + | ||
| 3 | +package orgs | ||
| 4 | + | ||
| 5 | +import ( | ||
| 6 | + "bytes" | ||
| 7 | + "context" | ||
| 8 | + "encoding/json" | ||
| 9 | + "errors" | ||
| 10 | + "fmt" | ||
| 11 | + "io" | ||
| 12 | + "log/slog" | ||
| 13 | + "net/http" | ||
| 14 | + "net/url" | ||
| 15 | + "regexp" | ||
| 16 | + "strconv" | ||
| 17 | + "strings" | ||
| 18 | + "time" | ||
| 19 | + | ||
| 20 | + "github.com/jackc/pgx/v5" | ||
| 21 | + "github.com/jackc/pgx/v5/pgtype" | ||
| 22 | + "github.com/jackc/pgx/v5/pgxpool" | ||
| 23 | + | ||
| 24 | + "github.com/tenseleyFlow/shithub/internal/auth/secretbox" | ||
| 25 | + orgsdb "github.com/tenseleyFlow/shithub/internal/orgs/sqlc" | ||
| 26 | + "github.com/tenseleyFlow/shithub/internal/worker" | ||
| 27 | +) | ||
| 28 | + | ||
| 29 | +const GitHubHost = "github.com" | ||
| 30 | + | ||
| 31 | +const ( | ||
| 32 | + ImportStatusQueued = "queued" | ||
| 33 | + ImportStatusDiscovering = "discovering" | ||
| 34 | + ImportStatusImporting = "importing" | ||
| 35 | + ImportStatusCompleted = "completed" | ||
| 36 | + ImportStatusFailed = "failed" | ||
| 37 | + | ||
| 38 | + ImportRepoStatusQueued = "queued" | ||
| 39 | + ImportRepoStatusImporting = "importing" | ||
| 40 | + ImportRepoStatusImported = "imported" | ||
| 41 | + ImportRepoStatusSkipped = "skipped" | ||
| 42 | + ImportRepoStatusFailed = "failed" | ||
| 43 | +) | ||
| 44 | + | ||
| 45 | +var ( | ||
| 46 | + ErrInvalidGitHubOrg = errors.New("orgs: invalid GitHub organization") | ||
| 47 | + ErrImportTokenKeyNeeded = errors.New("orgs: import token encryption key is not configured") | ||
| 48 | +) | ||
| 49 | + | ||
| 50 | +var githubOrgRE = regexp.MustCompile(`^[A-Za-z0-9](?:[A-Za-z0-9-]{0,99})$`) | ||
| 51 | + | ||
| 52 | +// ImportDeps wires org-import orchestration. | ||
| 53 | +type ImportDeps struct { | ||
| 54 | + Pool *pgxpool.Pool | ||
| 55 | + Box *secretbox.Box | ||
| 56 | + Logger *slog.Logger | ||
| 57 | +} | ||
| 58 | + | ||
| 59 | +// StartGitHubImportParams describes a single org import request. | ||
| 60 | +type StartGitHubImportParams struct { | ||
| 61 | + OrgID int64 | ||
| 62 | + SourceOrg string | ||
| 63 | + RequestedByUserID int64 | ||
| 64 | + Token string | ||
| 65 | +} | ||
| 66 | + | ||
| 67 | +// StartGitHubImport persists a GitHub import request and enqueues discovery. | ||
| 68 | +func StartGitHubImport(ctx context.Context, deps ImportDeps, p StartGitHubImportParams) (orgsdb.OrgGithubImport, error) { | ||
| 69 | + sourceOrg, err := NormalizeGitHubOrg(p.SourceOrg) | ||
| 70 | + if err != nil { | ||
| 71 | + return orgsdb.OrgGithubImport{}, err | ||
| 72 | + } | ||
| 73 | + token := strings.TrimSpace(p.Token) | ||
| 74 | + var ciphertext, nonce []byte | ||
| 75 | + tokenPresent := token != "" | ||
| 76 | + if tokenPresent { | ||
| 77 | + if deps.Box == nil { | ||
| 78 | + return orgsdb.OrgGithubImport{}, ErrImportTokenKeyNeeded | ||
| 79 | + } | ||
| 80 | + ciphertext, nonce, err = deps.Box.Seal([]byte(token)) | ||
| 81 | + if err != nil { | ||
| 82 | + return orgsdb.OrgGithubImport{}, fmt.Errorf("github import: seal token: %w", err) | ||
| 83 | + } | ||
| 84 | + } | ||
| 85 | + | ||
| 86 | + tx, err := deps.Pool.Begin(ctx) | ||
| 87 | + if err != nil { | ||
| 88 | + return orgsdb.OrgGithubImport{}, err | ||
| 89 | + } | ||
| 90 | + committed := false | ||
| 91 | + defer func() { | ||
| 92 | + if !committed { | ||
| 93 | + _ = tx.Rollback(ctx) | ||
| 94 | + } | ||
| 95 | + }() | ||
| 96 | + | ||
| 97 | + q := orgsdb.New() | ||
| 98 | + row, err := q.CreateOrgGithubImport(ctx, tx, orgsdb.CreateOrgGithubImportParams{ | ||
| 99 | + OrgID: p.OrgID, | ||
| 100 | + SourceOrg: sourceOrg, | ||
| 101 | + RequestedByUserID: pgtype.Int8{Int64: p.RequestedByUserID, Valid: p.RequestedByUserID != 0}, | ||
| 102 | + IncludePrivate: tokenPresent, | ||
| 103 | + TokenPresent: tokenPresent, | ||
| 104 | + TokenCiphertext: ciphertext, | ||
| 105 | + TokenNonce: nonce, | ||
| 106 | + }) | ||
| 107 | + if err != nil { | ||
| 108 | + return orgsdb.OrgGithubImport{}, fmt.Errorf("github import: create: %w", err) | ||
| 109 | + } | ||
| 110 | + if _, err := worker.Enqueue(ctx, tx, worker.KindOrgGitHubImportDiscover, map[string]any{ | ||
| 111 | + "import_id": row.ID, | ||
| 112 | + }, worker.EnqueueOptions{}); err != nil { | ||
| 113 | + return orgsdb.OrgGithubImport{}, err | ||
| 114 | + } | ||
| 115 | + if err := worker.Notify(ctx, tx); err != nil && deps.Logger != nil { | ||
| 116 | + deps.Logger.WarnContext(ctx, "github import: notify", "error", err, "import_id", row.ID) | ||
| 117 | + } | ||
| 118 | + if err := tx.Commit(ctx); err != nil { | ||
| 119 | + return orgsdb.OrgGithubImport{}, err | ||
| 120 | + } | ||
| 121 | + committed = true | ||
| 122 | + return row, nil | ||
| 123 | +} | ||
| 124 | + | ||
| 125 | +func NormalizeGitHubOrg(raw string) (string, error) { | ||
| 126 | + org := strings.TrimSpace(raw) | ||
| 127 | + org = strings.TrimPrefix(org, "https://github.com/") | ||
| 128 | + org = strings.TrimPrefix(org, "http://github.com/") | ||
| 129 | + org = strings.Trim(org, "/") | ||
| 130 | + if org == "" || strings.Contains(org, "/") || !githubOrgRE.MatchString(org) { | ||
| 131 | + return "", ErrInvalidGitHubOrg | ||
| 132 | + } | ||
| 133 | + return org, nil | ||
| 134 | +} | ||
| 135 | + | ||
| 136 | +func DecryptGitHubImportToken(row orgsdb.OrgGithubImport, box *secretbox.Box) (string, error) { | ||
| 137 | + if len(row.TokenCiphertext) == 0 && len(row.TokenNonce) == 0 { | ||
| 138 | + return "", nil | ||
| 139 | + } | ||
| 140 | + if box == nil { | ||
| 141 | + return "", ErrImportTokenKeyNeeded | ||
| 142 | + } | ||
| 143 | + pt, err := box.Open(row.TokenCiphertext, row.TokenNonce) | ||
| 144 | + if err != nil { | ||
| 145 | + return "", fmt.Errorf("github import: decrypt token: %w", err) | ||
| 146 | + } | ||
| 147 | + return string(pt), nil | ||
| 148 | +} | ||
| 149 | + | ||
| 150 | +type GitHubClient struct { | ||
| 151 | + HTTPClient *http.Client | ||
| 152 | + BaseURL string | ||
| 153 | + UserAgent string | ||
| 154 | +} | ||
| 155 | + | ||
| 156 | +type GitHubRepo struct { | ||
| 157 | + ID int64 | ||
| 158 | + Name string | ||
| 159 | + FullName string | ||
| 160 | + CloneURL string | ||
| 161 | + Description string | ||
| 162 | + DefaultBranch string | ||
| 163 | + Private bool | ||
| 164 | + Fork bool | ||
| 165 | +} | ||
| 166 | + | ||
| 167 | +func (c GitHubClient) ListOrgRepos(ctx context.Context, org, token string) ([]GitHubRepo, error) { | ||
| 168 | + org, err := NormalizeGitHubOrg(org) | ||
| 169 | + if err != nil { | ||
| 170 | + return nil, err | ||
| 171 | + } | ||
| 172 | + base := strings.TrimRight(c.BaseURL, "/") | ||
| 173 | + if base == "" { | ||
| 174 | + base = "https://api.github.com" | ||
| 175 | + } | ||
| 176 | + client := c.HTTPClient | ||
| 177 | + if client == nil { | ||
| 178 | + client = &http.Client{Timeout: 30 * time.Second} | ||
| 179 | + } | ||
| 180 | + token = strings.TrimSpace(token) | ||
| 181 | + repoType := "public" | ||
| 182 | + if token != "" { | ||
| 183 | + repoType = "all" | ||
| 184 | + } | ||
| 185 | + var out []GitHubRepo | ||
| 186 | + for page := 1; page <= 100; page++ { | ||
| 187 | + u, err := url.Parse(base + "/orgs/" + url.PathEscape(org) + "/repos") | ||
| 188 | + if err != nil { | ||
| 189 | + return nil, err | ||
| 190 | + } | ||
| 191 | + q := u.Query() | ||
| 192 | + q.Set("type", repoType) | ||
| 193 | + q.Set("per_page", "100") | ||
| 194 | + q.Set("page", strconv.Itoa(page)) | ||
| 195 | + q.Set("sort", "full_name") | ||
| 196 | + u.RawQuery = q.Encode() | ||
| 197 | + | ||
| 198 | + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) | ||
| 199 | + if err != nil { | ||
| 200 | + return nil, err | ||
| 201 | + } | ||
| 202 | + req.Header.Set("Accept", "application/vnd.github+json") | ||
| 203 | + req.Header.Set("X-GitHub-Api-Version", "2022-11-28") | ||
| 204 | + req.Header.Set("User-Agent", userAgent(c.UserAgent)) | ||
| 205 | + if token != "" { | ||
| 206 | + req.Header.Set("Authorization", "Bearer "+token) | ||
| 207 | + } | ||
| 208 | + resp, err := client.Do(req) | ||
| 209 | + if err != nil { | ||
| 210 | + return nil, err | ||
| 211 | + } | ||
| 212 | + repos, err := decodeGitHubRepos(resp) | ||
| 213 | + if err != nil { | ||
| 214 | + return nil, err | ||
| 215 | + } | ||
| 216 | + out = append(out, repos...) | ||
| 217 | + if len(repos) < 100 { | ||
| 218 | + return out, nil | ||
| 219 | + } | ||
| 220 | + } | ||
| 221 | + return nil, fmt.Errorf("github import: too many repositories in %s", org) | ||
| 222 | +} | ||
| 223 | + | ||
| 224 | +func decodeGitHubRepos(resp *http.Response) ([]GitHubRepo, error) { | ||
| 225 | + defer resp.Body.Close() | ||
| 226 | + body, err := io.ReadAll(io.LimitReader(resp.Body, 2<<20)) | ||
| 227 | + if err != nil { | ||
| 228 | + return nil, err | ||
| 229 | + } | ||
| 230 | + if resp.StatusCode < 200 || resp.StatusCode >= 300 { | ||
| 231 | + msg := strings.TrimSpace(string(body)) | ||
| 232 | + if msg == "" { | ||
| 233 | + msg = resp.Status | ||
| 234 | + } | ||
| 235 | + return nil, fmt.Errorf("github import: GitHub API returned %s: %s", resp.Status, msg) | ||
| 236 | + } | ||
| 237 | + var payload []struct { | ||
| 238 | + ID int64 `json:"id"` | ||
| 239 | + Name string `json:"name"` | ||
| 240 | + FullName string `json:"full_name"` | ||
| 241 | + CloneURL string `json:"clone_url"` | ||
| 242 | + Description *string `json:"description"` | ||
| 243 | + DefaultBranch string `json:"default_branch"` | ||
| 244 | + Private bool `json:"private"` | ||
| 245 | + Fork bool `json:"fork"` | ||
| 246 | + } | ||
| 247 | + dec := json.NewDecoder(bytes.NewReader(body)) | ||
| 248 | + if err := dec.Decode(&payload); err != nil { | ||
| 249 | + return nil, err | ||
| 250 | + } | ||
| 251 | + out := make([]GitHubRepo, 0, len(payload)) | ||
| 252 | + for _, r := range payload { | ||
| 253 | + desc := "" | ||
| 254 | + if r.Description != nil { | ||
| 255 | + desc = strings.TrimSpace(*r.Description) | ||
| 256 | + } | ||
| 257 | + out = append(out, GitHubRepo{ | ||
| 258 | + ID: r.ID, | ||
| 259 | + Name: r.Name, | ||
| 260 | + FullName: r.FullName, | ||
| 261 | + CloneURL: r.CloneURL, | ||
| 262 | + Description: desc, | ||
| 263 | + DefaultBranch: strings.TrimSpace(r.DefaultBranch), | ||
| 264 | + Private: r.Private, | ||
| 265 | + Fork: r.Fork, | ||
| 266 | + }) | ||
| 267 | + } | ||
| 268 | + return out, nil | ||
| 269 | +} | ||
| 270 | + | ||
| 271 | +func userAgent(custom string) string { | ||
| 272 | + custom = strings.TrimSpace(custom) | ||
| 273 | + if custom != "" { | ||
| 274 | + return custom | ||
| 275 | + } | ||
| 276 | + return "shithub" | ||
| 277 | +} | ||
| 278 | + | ||
| 279 | +func IsTerminalImportStatus(status string) bool { | ||
| 280 | + return status == ImportStatusCompleted || status == ImportStatusFailed | ||
| 281 | +} | ||
| 282 | + | ||
| 283 | +func IsTerminalImportRepoStatus(status string) bool { | ||
| 284 | + return status == ImportRepoStatusImported || status == ImportRepoStatusSkipped || status == ImportRepoStatusFailed | ||
| 285 | +} | ||
| 286 | + | ||
| 287 | +func IgnoreNoRows(err error) error { | ||
| 288 | + if errors.Is(err, pgx.ErrNoRows) { | ||
| 289 | + return nil | ||
| 290 | + } | ||
| 291 | + return err | ||
| 292 | +} | ||
internal/orgs/queries/github_imports.sqladded@@ -0,0 +1,158 @@ | |||
| 1 | +-- SPDX-License-Identifier: AGPL-3.0-or-later | ||
| 2 | + | ||
| 3 | +-- name: CreateOrgGithubImport :one | ||
| 4 | +INSERT INTO org_github_imports ( | ||
| 5 | + org_id, source_org, requested_by_user_id, include_private, | ||
| 6 | + token_present, token_ciphertext, token_nonce | ||
| 7 | +) VALUES ( | ||
| 8 | + $1, $2, sqlc.narg(requested_by_user_id)::bigint, $3, | ||
| 9 | + $4, sqlc.narg(token_ciphertext)::bytea, sqlc.narg(token_nonce)::bytea | ||
| 10 | +) | ||
| 11 | +RETURNING *; | ||
| 12 | + | ||
| 13 | +-- name: GetOrgGithubImport :one | ||
| 14 | +SELECT * FROM org_github_imports WHERE id = $1; | ||
| 15 | + | ||
| 16 | +-- name: GetOrgGithubImportForOrg :one | ||
| 17 | +SELECT * FROM org_github_imports | ||
| 18 | +WHERE id = $1 AND org_id = $2; | ||
| 19 | + | ||
| 20 | +-- name: ListOrgGithubImportsForOrg :many | ||
| 21 | +SELECT * FROM org_github_imports | ||
| 22 | +WHERE org_id = $1 | ||
| 23 | +ORDER BY created_at DESC | ||
| 24 | +LIMIT $2; | ||
| 25 | + | ||
| 26 | +-- name: MarkOrgGithubImportDiscovering :exec | ||
| 27 | +UPDATE org_github_imports | ||
| 28 | + SET status = 'discovering', | ||
| 29 | + started_at = COALESCE(started_at, now()), | ||
| 30 | + last_error = NULL, | ||
| 31 | + updated_at = now() | ||
| 32 | + WHERE id = $1 | ||
| 33 | + AND status IN ('queued', 'discovering'); | ||
| 34 | + | ||
| 35 | +-- name: MarkOrgGithubImportImporting :exec | ||
| 36 | +UPDATE org_github_imports | ||
| 37 | + SET status = 'importing', | ||
| 38 | + total_count = $2, | ||
| 39 | + started_at = COALESCE(started_at, now()), | ||
| 40 | + last_error = NULL, | ||
| 41 | + updated_at = now() | ||
| 42 | + WHERE id = $1 | ||
| 43 | + AND status IN ('queued', 'discovering', 'importing'); | ||
| 44 | + | ||
| 45 | +-- name: MarkOrgGithubImportFailed :exec | ||
| 46 | +UPDATE org_github_imports | ||
| 47 | + SET status = 'failed', | ||
| 48 | + last_error = $2, | ||
| 49 | + token_ciphertext = NULL, | ||
| 50 | + token_nonce = NULL, | ||
| 51 | + completed_at = COALESCE(completed_at, now()), | ||
| 52 | + updated_at = now() | ||
| 53 | + WHERE id = $1; | ||
| 54 | + | ||
| 55 | +-- name: MarkOrgGithubImportCompleted :exec | ||
| 56 | +UPDATE org_github_imports | ||
| 57 | + SET status = 'completed', | ||
| 58 | + token_ciphertext = NULL, | ||
| 59 | + token_nonce = NULL, | ||
| 60 | + completed_at = COALESCE(completed_at, now()), | ||
| 61 | + updated_at = now() | ||
| 62 | + WHERE id = $1; | ||
| 63 | + | ||
| 64 | +-- name: MarkOrgGithubImportCompletedIfDone :one | ||
| 65 | +UPDATE org_github_imports AS i | ||
| 66 | + SET status = 'completed', | ||
| 67 | + token_ciphertext = NULL, | ||
| 68 | + token_nonce = NULL, | ||
| 69 | + completed_at = COALESCE(completed_at, now()), | ||
| 70 | + updated_at = now() | ||
| 71 | + WHERE i.id = $1 | ||
| 72 | + AND i.status = 'importing' | ||
| 73 | + AND NOT EXISTS ( | ||
| 74 | + SELECT 1 | ||
| 75 | + FROM org_github_import_repos | ||
| 76 | + WHERE import_id = $1 | ||
| 77 | + AND status IN ('queued', 'importing') | ||
| 78 | + ) | ||
| 79 | +RETURNING i.*; | ||
| 80 | + | ||
| 81 | +-- name: InsertOrgGithubImportRepo :one | ||
| 82 | +INSERT INTO org_github_import_repos ( | ||
| 83 | + import_id, github_id, source_full_name, source_name, target_name, | ||
| 84 | + clone_url, description, default_branch, target_visibility, | ||
| 85 | + is_private, is_fork | ||
| 86 | +) VALUES ( | ||
| 87 | + $1, sqlc.narg(github_id)::bigint, $2, $3, $4, | ||
| 88 | + $5, $6, $7, $8, $9, $10 | ||
| 89 | +) | ||
| 90 | +ON CONFLICT (import_id, target_name) DO UPDATE | ||
| 91 | + SET github_id = EXCLUDED.github_id, | ||
| 92 | + source_full_name = EXCLUDED.source_full_name, | ||
| 93 | + source_name = EXCLUDED.source_name, | ||
| 94 | + clone_url = EXCLUDED.clone_url, | ||
| 95 | + description = EXCLUDED.description, | ||
| 96 | + default_branch = EXCLUDED.default_branch, | ||
| 97 | + target_visibility = EXCLUDED.target_visibility, | ||
| 98 | + is_private = EXCLUDED.is_private, | ||
| 99 | + is_fork = EXCLUDED.is_fork, | ||
| 100 | + updated_at = now() | ||
| 101 | +RETURNING *; | ||
| 102 | + | ||
| 103 | +-- name: GetOrgGithubImportRepo :one | ||
| 104 | +SELECT * FROM org_github_import_repos WHERE id = $1; | ||
| 105 | + | ||
| 106 | +-- name: ListOrgGithubImportRepos :many | ||
| 107 | +SELECT * FROM org_github_import_repos | ||
| 108 | +WHERE import_id = $1 | ||
| 109 | +ORDER BY source_name ASC; | ||
| 110 | + | ||
| 111 | +-- name: MarkOrgGithubImportRepoImporting :exec | ||
| 112 | +UPDATE org_github_import_repos | ||
| 113 | + SET status = 'importing', | ||
| 114 | + started_at = COALESCE(started_at, now()), | ||
| 115 | + last_error = NULL, | ||
| 116 | + updated_at = now() | ||
| 117 | + WHERE id = $1 | ||
| 118 | + AND status = 'queued'; | ||
| 119 | + | ||
| 120 | +-- name: MarkOrgGithubImportRepoImported :exec | ||
| 121 | +UPDATE org_github_import_repos | ||
| 122 | + SET status = 'imported', | ||
| 123 | + repo_id = $2, | ||
| 124 | + last_error = NULL, | ||
| 125 | + completed_at = COALESCE(completed_at, now()), | ||
| 126 | + updated_at = now() | ||
| 127 | + WHERE id = $1; | ||
| 128 | + | ||
| 129 | +-- name: MarkOrgGithubImportRepoSkipped :exec | ||
| 130 | +UPDATE org_github_import_repos | ||
| 131 | + SET status = 'skipped', | ||
| 132 | + last_error = $2, | ||
| 133 | + completed_at = COALESCE(completed_at, now()), | ||
| 134 | + updated_at = now() | ||
| 135 | + WHERE id = $1; | ||
| 136 | + | ||
| 137 | +-- name: MarkOrgGithubImportRepoFailed :exec | ||
| 138 | +UPDATE org_github_import_repos | ||
| 139 | + SET status = 'failed', | ||
| 140 | + repo_id = COALESCE(sqlc.narg(repo_id)::bigint, repo_id), | ||
| 141 | + last_error = $2, | ||
| 142 | + completed_at = COALESCE(completed_at, now()), | ||
| 143 | + updated_at = now() | ||
| 144 | + WHERE id = $1; | ||
| 145 | + | ||
| 146 | +-- name: GetOrgGithubImportProgress :one | ||
| 147 | +SELECT | ||
| 148 | + i.*, | ||
| 149 | + count(r.id)::integer AS discovered_count, | ||
| 150 | + count(r.id) FILTER (WHERE r.status = 'queued')::integer AS queued_count, | ||
| 151 | + count(r.id) FILTER (WHERE r.status = 'importing')::integer AS importing_count, | ||
| 152 | + count(r.id) FILTER (WHERE r.status = 'imported')::integer AS imported_count, | ||
| 153 | + count(r.id) FILTER (WHERE r.status = 'skipped')::integer AS skipped_count, | ||
| 154 | + count(r.id) FILTER (WHERE r.status = 'failed')::integer AS failed_count | ||
| 155 | +FROM org_github_imports i | ||
| 156 | +LEFT JOIN org_github_import_repos r ON r.import_id = i.id | ||
| 157 | +WHERE i.id = $1 AND i.org_id = $2 | ||
| 158 | +GROUP BY i.id; | ||
internal/orgs/sqlc/github_imports.sql.goadded@@ -0,0 +1,595 @@ | |||
| 1 | +// Code generated by sqlc. DO NOT EDIT. | ||
| 2 | +// versions: | ||
| 3 | +// sqlc v1.31.1 | ||
| 4 | +// source: github_imports.sql | ||
| 5 | + | ||
| 6 | +package orgsdb | ||
| 7 | + | ||
| 8 | +import ( | ||
| 9 | + "context" | ||
| 10 | + | ||
| 11 | + "github.com/jackc/pgx/v5/pgtype" | ||
| 12 | +) | ||
| 13 | + | ||
| 14 | +const createOrgGithubImport = `-- name: CreateOrgGithubImport :one | ||
| 15 | + | ||
| 16 | +INSERT INTO org_github_imports ( | ||
| 17 | + org_id, source_org, requested_by_user_id, include_private, | ||
| 18 | + token_present, token_ciphertext, token_nonce | ||
| 19 | +) VALUES ( | ||
| 20 | + $1, $2, $5::bigint, $3, | ||
| 21 | + $4, $6::bytea, $7::bytea | ||
| 22 | +) | ||
| 23 | +RETURNING id, org_id, source_host, source_org, requested_by_user_id, status, include_private, token_present, token_ciphertext, token_nonce, total_count, last_error, started_at, completed_at, created_at, updated_at | ||
| 24 | +` | ||
| 25 | + | ||
| 26 | +type CreateOrgGithubImportParams struct { | ||
| 27 | + OrgID int64 | ||
| 28 | + SourceOrg string | ||
| 29 | + IncludePrivate bool | ||
| 30 | + TokenPresent bool | ||
| 31 | + RequestedByUserID pgtype.Int8 | ||
| 32 | + TokenCiphertext []byte | ||
| 33 | + TokenNonce []byte | ||
| 34 | +} | ||
| 35 | + | ||
| 36 | +// SPDX-License-Identifier: AGPL-3.0-or-later | ||
| 37 | +func (q *Queries) CreateOrgGithubImport(ctx context.Context, db DBTX, arg CreateOrgGithubImportParams) (OrgGithubImport, error) { | ||
| 38 | + row := db.QueryRow(ctx, createOrgGithubImport, | ||
| 39 | + arg.OrgID, | ||
| 40 | + arg.SourceOrg, | ||
| 41 | + arg.IncludePrivate, | ||
| 42 | + arg.TokenPresent, | ||
| 43 | + arg.RequestedByUserID, | ||
| 44 | + arg.TokenCiphertext, | ||
| 45 | + arg.TokenNonce, | ||
| 46 | + ) | ||
| 47 | + var i OrgGithubImport | ||
| 48 | + err := row.Scan( | ||
| 49 | + &i.ID, | ||
| 50 | + &i.OrgID, | ||
| 51 | + &i.SourceHost, | ||
| 52 | + &i.SourceOrg, | ||
| 53 | + &i.RequestedByUserID, | ||
| 54 | + &i.Status, | ||
| 55 | + &i.IncludePrivate, | ||
| 56 | + &i.TokenPresent, | ||
| 57 | + &i.TokenCiphertext, | ||
| 58 | + &i.TokenNonce, | ||
| 59 | + &i.TotalCount, | ||
| 60 | + &i.LastError, | ||
| 61 | + &i.StartedAt, | ||
| 62 | + &i.CompletedAt, | ||
| 63 | + &i.CreatedAt, | ||
| 64 | + &i.UpdatedAt, | ||
| 65 | + ) | ||
| 66 | + return i, err | ||
| 67 | +} | ||
| 68 | + | ||
| 69 | +const getOrgGithubImport = `-- name: GetOrgGithubImport :one | ||
| 70 | +SELECT id, org_id, source_host, source_org, requested_by_user_id, status, include_private, token_present, token_ciphertext, token_nonce, total_count, last_error, started_at, completed_at, created_at, updated_at FROM org_github_imports WHERE id = $1 | ||
| 71 | +` | ||
| 72 | + | ||
| 73 | +func (q *Queries) GetOrgGithubImport(ctx context.Context, db DBTX, id int64) (OrgGithubImport, error) { | ||
| 74 | + row := db.QueryRow(ctx, getOrgGithubImport, id) | ||
| 75 | + var i OrgGithubImport | ||
| 76 | + err := row.Scan( | ||
| 77 | + &i.ID, | ||
| 78 | + &i.OrgID, | ||
| 79 | + &i.SourceHost, | ||
| 80 | + &i.SourceOrg, | ||
| 81 | + &i.RequestedByUserID, | ||
| 82 | + &i.Status, | ||
| 83 | + &i.IncludePrivate, | ||
| 84 | + &i.TokenPresent, | ||
| 85 | + &i.TokenCiphertext, | ||
| 86 | + &i.TokenNonce, | ||
| 87 | + &i.TotalCount, | ||
| 88 | + &i.LastError, | ||
| 89 | + &i.StartedAt, | ||
| 90 | + &i.CompletedAt, | ||
| 91 | + &i.CreatedAt, | ||
| 92 | + &i.UpdatedAt, | ||
| 93 | + ) | ||
| 94 | + return i, err | ||
| 95 | +} | ||
| 96 | + | ||
| 97 | +const getOrgGithubImportForOrg = `-- name: GetOrgGithubImportForOrg :one | ||
| 98 | +SELECT id, org_id, source_host, source_org, requested_by_user_id, status, include_private, token_present, token_ciphertext, token_nonce, total_count, last_error, started_at, completed_at, created_at, updated_at FROM org_github_imports | ||
| 99 | +WHERE id = $1 AND org_id = $2 | ||
| 100 | +` | ||
| 101 | + | ||
| 102 | +type GetOrgGithubImportForOrgParams struct { | ||
| 103 | + ID int64 | ||
| 104 | + OrgID int64 | ||
| 105 | +} | ||
| 106 | + | ||
| 107 | +func (q *Queries) GetOrgGithubImportForOrg(ctx context.Context, db DBTX, arg GetOrgGithubImportForOrgParams) (OrgGithubImport, error) { | ||
| 108 | + row := db.QueryRow(ctx, getOrgGithubImportForOrg, arg.ID, arg.OrgID) | ||
| 109 | + var i OrgGithubImport | ||
| 110 | + err := row.Scan( | ||
| 111 | + &i.ID, | ||
| 112 | + &i.OrgID, | ||
| 113 | + &i.SourceHost, | ||
| 114 | + &i.SourceOrg, | ||
| 115 | + &i.RequestedByUserID, | ||
| 116 | + &i.Status, | ||
| 117 | + &i.IncludePrivate, | ||
| 118 | + &i.TokenPresent, | ||
| 119 | + &i.TokenCiphertext, | ||
| 120 | + &i.TokenNonce, | ||
| 121 | + &i.TotalCount, | ||
| 122 | + &i.LastError, | ||
| 123 | + &i.StartedAt, | ||
| 124 | + &i.CompletedAt, | ||
| 125 | + &i.CreatedAt, | ||
| 126 | + &i.UpdatedAt, | ||
| 127 | + ) | ||
| 128 | + return i, err | ||
| 129 | +} | ||
| 130 | + | ||
| 131 | +const getOrgGithubImportProgress = `-- name: GetOrgGithubImportProgress :one | ||
| 132 | +SELECT | ||
| 133 | + i.id, i.org_id, i.source_host, i.source_org, i.requested_by_user_id, i.status, i.include_private, i.token_present, i.token_ciphertext, i.token_nonce, i.total_count, i.last_error, i.started_at, i.completed_at, i.created_at, i.updated_at, | ||
| 134 | + count(r.id)::integer AS discovered_count, | ||
| 135 | + count(r.id) FILTER (WHERE r.status = 'queued')::integer AS queued_count, | ||
| 136 | + count(r.id) FILTER (WHERE r.status = 'importing')::integer AS importing_count, | ||
| 137 | + count(r.id) FILTER (WHERE r.status = 'imported')::integer AS imported_count, | ||
| 138 | + count(r.id) FILTER (WHERE r.status = 'skipped')::integer AS skipped_count, | ||
| 139 | + count(r.id) FILTER (WHERE r.status = 'failed')::integer AS failed_count | ||
| 140 | +FROM org_github_imports i | ||
| 141 | +LEFT JOIN org_github_import_repos r ON r.import_id = i.id | ||
| 142 | +WHERE i.id = $1 AND i.org_id = $2 | ||
| 143 | +GROUP BY i.id | ||
| 144 | +` | ||
| 145 | + | ||
| 146 | +type GetOrgGithubImportProgressParams struct { | ||
| 147 | + ID int64 | ||
| 148 | + OrgID int64 | ||
| 149 | +} | ||
| 150 | + | ||
| 151 | +type GetOrgGithubImportProgressRow struct { | ||
| 152 | + ID int64 | ||
| 153 | + OrgID int64 | ||
| 154 | + SourceHost string | ||
| 155 | + SourceOrg string | ||
| 156 | + RequestedByUserID pgtype.Int8 | ||
| 157 | + Status string | ||
| 158 | + IncludePrivate bool | ||
| 159 | + TokenPresent bool | ||
| 160 | + TokenCiphertext []byte | ||
| 161 | + TokenNonce []byte | ||
| 162 | + TotalCount int32 | ||
| 163 | + LastError pgtype.Text | ||
| 164 | + StartedAt pgtype.Timestamptz | ||
| 165 | + CompletedAt pgtype.Timestamptz | ||
| 166 | + CreatedAt pgtype.Timestamptz | ||
| 167 | + UpdatedAt pgtype.Timestamptz | ||
| 168 | + DiscoveredCount int32 | ||
| 169 | + QueuedCount int32 | ||
| 170 | + ImportingCount int32 | ||
| 171 | + ImportedCount int32 | ||
| 172 | + SkippedCount int32 | ||
| 173 | + FailedCount int32 | ||
| 174 | +} | ||
| 175 | + | ||
| 176 | +func (q *Queries) GetOrgGithubImportProgress(ctx context.Context, db DBTX, arg GetOrgGithubImportProgressParams) (GetOrgGithubImportProgressRow, error) { | ||
| 177 | + row := db.QueryRow(ctx, getOrgGithubImportProgress, arg.ID, arg.OrgID) | ||
| 178 | + var i GetOrgGithubImportProgressRow | ||
| 179 | + err := row.Scan( | ||
| 180 | + &i.ID, | ||
| 181 | + &i.OrgID, | ||
| 182 | + &i.SourceHost, | ||
| 183 | + &i.SourceOrg, | ||
| 184 | + &i.RequestedByUserID, | ||
| 185 | + &i.Status, | ||
| 186 | + &i.IncludePrivate, | ||
| 187 | + &i.TokenPresent, | ||
| 188 | + &i.TokenCiphertext, | ||
| 189 | + &i.TokenNonce, | ||
| 190 | + &i.TotalCount, | ||
| 191 | + &i.LastError, | ||
| 192 | + &i.StartedAt, | ||
| 193 | + &i.CompletedAt, | ||
| 194 | + &i.CreatedAt, | ||
| 195 | + &i.UpdatedAt, | ||
| 196 | + &i.DiscoveredCount, | ||
| 197 | + &i.QueuedCount, | ||
| 198 | + &i.ImportingCount, | ||
| 199 | + &i.ImportedCount, | ||
| 200 | + &i.SkippedCount, | ||
| 201 | + &i.FailedCount, | ||
| 202 | + ) | ||
| 203 | + return i, err | ||
| 204 | +} | ||
| 205 | + | ||
| 206 | +const getOrgGithubImportRepo = `-- name: GetOrgGithubImportRepo :one | ||
| 207 | +SELECT id, import_id, github_id, source_full_name, source_name, target_name, clone_url, description, default_branch, target_visibility, is_private, is_fork, status, repo_id, last_error, started_at, completed_at, created_at, updated_at FROM org_github_import_repos WHERE id = $1 | ||
| 208 | +` | ||
| 209 | + | ||
| 210 | +func (q *Queries) GetOrgGithubImportRepo(ctx context.Context, db DBTX, id int64) (OrgGithubImportRepo, error) { | ||
| 211 | + row := db.QueryRow(ctx, getOrgGithubImportRepo, id) | ||
| 212 | + var i OrgGithubImportRepo | ||
| 213 | + err := row.Scan( | ||
| 214 | + &i.ID, | ||
| 215 | + &i.ImportID, | ||
| 216 | + &i.GithubID, | ||
| 217 | + &i.SourceFullName, | ||
| 218 | + &i.SourceName, | ||
| 219 | + &i.TargetName, | ||
| 220 | + &i.CloneUrl, | ||
| 221 | + &i.Description, | ||
| 222 | + &i.DefaultBranch, | ||
| 223 | + &i.TargetVisibility, | ||
| 224 | + &i.IsPrivate, | ||
| 225 | + &i.IsFork, | ||
| 226 | + &i.Status, | ||
| 227 | + &i.RepoID, | ||
| 228 | + &i.LastError, | ||
| 229 | + &i.StartedAt, | ||
| 230 | + &i.CompletedAt, | ||
| 231 | + &i.CreatedAt, | ||
| 232 | + &i.UpdatedAt, | ||
| 233 | + ) | ||
| 234 | + return i, err | ||
| 235 | +} | ||
| 236 | + | ||
| 237 | +const insertOrgGithubImportRepo = `-- name: InsertOrgGithubImportRepo :one | ||
| 238 | +INSERT INTO org_github_import_repos ( | ||
| 239 | + import_id, github_id, source_full_name, source_name, target_name, | ||
| 240 | + clone_url, description, default_branch, target_visibility, | ||
| 241 | + is_private, is_fork | ||
| 242 | +) VALUES ( | ||
| 243 | + $1, $11::bigint, $2, $3, $4, | ||
| 244 | + $5, $6, $7, $8, $9, $10 | ||
| 245 | +) | ||
| 246 | +ON CONFLICT (import_id, target_name) DO UPDATE | ||
| 247 | + SET github_id = EXCLUDED.github_id, | ||
| 248 | + source_full_name = EXCLUDED.source_full_name, | ||
| 249 | + source_name = EXCLUDED.source_name, | ||
| 250 | + clone_url = EXCLUDED.clone_url, | ||
| 251 | + description = EXCLUDED.description, | ||
| 252 | + default_branch = EXCLUDED.default_branch, | ||
| 253 | + target_visibility = EXCLUDED.target_visibility, | ||
| 254 | + is_private = EXCLUDED.is_private, | ||
| 255 | + is_fork = EXCLUDED.is_fork, | ||
| 256 | + updated_at = now() | ||
| 257 | +RETURNING id, import_id, github_id, source_full_name, source_name, target_name, clone_url, description, default_branch, target_visibility, is_private, is_fork, status, repo_id, last_error, started_at, completed_at, created_at, updated_at | ||
| 258 | +` | ||
| 259 | + | ||
| 260 | +type InsertOrgGithubImportRepoParams struct { | ||
| 261 | + ImportID int64 | ||
| 262 | + SourceFullName string | ||
| 263 | + SourceName string | ||
| 264 | + TargetName string | ||
| 265 | + CloneUrl string | ||
| 266 | + Description string | ||
| 267 | + DefaultBranch string | ||
| 268 | + TargetVisibility RepoVisibility | ||
| 269 | + IsPrivate bool | ||
| 270 | + IsFork bool | ||
| 271 | + GithubID pgtype.Int8 | ||
| 272 | +} | ||
| 273 | + | ||
| 274 | +func (q *Queries) InsertOrgGithubImportRepo(ctx context.Context, db DBTX, arg InsertOrgGithubImportRepoParams) (OrgGithubImportRepo, error) { | ||
| 275 | + row := db.QueryRow(ctx, insertOrgGithubImportRepo, | ||
| 276 | + arg.ImportID, | ||
| 277 | + arg.SourceFullName, | ||
| 278 | + arg.SourceName, | ||
| 279 | + arg.TargetName, | ||
| 280 | + arg.CloneUrl, | ||
| 281 | + arg.Description, | ||
| 282 | + arg.DefaultBranch, | ||
| 283 | + arg.TargetVisibility, | ||
| 284 | + arg.IsPrivate, | ||
| 285 | + arg.IsFork, | ||
| 286 | + arg.GithubID, | ||
| 287 | + ) | ||
| 288 | + var i OrgGithubImportRepo | ||
| 289 | + err := row.Scan( | ||
| 290 | + &i.ID, | ||
| 291 | + &i.ImportID, | ||
| 292 | + &i.GithubID, | ||
| 293 | + &i.SourceFullName, | ||
| 294 | + &i.SourceName, | ||
| 295 | + &i.TargetName, | ||
| 296 | + &i.CloneUrl, | ||
| 297 | + &i.Description, | ||
| 298 | + &i.DefaultBranch, | ||
| 299 | + &i.TargetVisibility, | ||
| 300 | + &i.IsPrivate, | ||
| 301 | + &i.IsFork, | ||
| 302 | + &i.Status, | ||
| 303 | + &i.RepoID, | ||
| 304 | + &i.LastError, | ||
| 305 | + &i.StartedAt, | ||
| 306 | + &i.CompletedAt, | ||
| 307 | + &i.CreatedAt, | ||
| 308 | + &i.UpdatedAt, | ||
| 309 | + ) | ||
| 310 | + return i, err | ||
| 311 | +} | ||
| 312 | + | ||
| 313 | +const listOrgGithubImportRepos = `-- name: ListOrgGithubImportRepos :many | ||
| 314 | +SELECT id, import_id, github_id, source_full_name, source_name, target_name, clone_url, description, default_branch, target_visibility, is_private, is_fork, status, repo_id, last_error, started_at, completed_at, created_at, updated_at FROM org_github_import_repos | ||
| 315 | +WHERE import_id = $1 | ||
| 316 | +ORDER BY source_name ASC | ||
| 317 | +` | ||
| 318 | + | ||
| 319 | +func (q *Queries) ListOrgGithubImportRepos(ctx context.Context, db DBTX, importID int64) ([]OrgGithubImportRepo, error) { | ||
| 320 | + rows, err := db.Query(ctx, listOrgGithubImportRepos, importID) | ||
| 321 | + if err != nil { | ||
| 322 | + return nil, err | ||
| 323 | + } | ||
| 324 | + defer rows.Close() | ||
| 325 | + items := []OrgGithubImportRepo{} | ||
| 326 | + for rows.Next() { | ||
| 327 | + var i OrgGithubImportRepo | ||
| 328 | + if err := rows.Scan( | ||
| 329 | + &i.ID, | ||
| 330 | + &i.ImportID, | ||
| 331 | + &i.GithubID, | ||
| 332 | + &i.SourceFullName, | ||
| 333 | + &i.SourceName, | ||
| 334 | + &i.TargetName, | ||
| 335 | + &i.CloneUrl, | ||
| 336 | + &i.Description, | ||
| 337 | + &i.DefaultBranch, | ||
| 338 | + &i.TargetVisibility, | ||
| 339 | + &i.IsPrivate, | ||
| 340 | + &i.IsFork, | ||
| 341 | + &i.Status, | ||
| 342 | + &i.RepoID, | ||
| 343 | + &i.LastError, | ||
| 344 | + &i.StartedAt, | ||
| 345 | + &i.CompletedAt, | ||
| 346 | + &i.CreatedAt, | ||
| 347 | + &i.UpdatedAt, | ||
| 348 | + ); err != nil { | ||
| 349 | + return nil, err | ||
| 350 | + } | ||
| 351 | + items = append(items, i) | ||
| 352 | + } | ||
| 353 | + if err := rows.Err(); err != nil { | ||
| 354 | + return nil, err | ||
| 355 | + } | ||
| 356 | + return items, nil | ||
| 357 | +} | ||
| 358 | + | ||
| 359 | +const listOrgGithubImportsForOrg = `-- name: ListOrgGithubImportsForOrg :many | ||
| 360 | +SELECT id, org_id, source_host, source_org, requested_by_user_id, status, include_private, token_present, token_ciphertext, token_nonce, total_count, last_error, started_at, completed_at, created_at, updated_at FROM org_github_imports | ||
| 361 | +WHERE org_id = $1 | ||
| 362 | +ORDER BY created_at DESC | ||
| 363 | +LIMIT $2 | ||
| 364 | +` | ||
| 365 | + | ||
| 366 | +type ListOrgGithubImportsForOrgParams struct { | ||
| 367 | + OrgID int64 | ||
| 368 | + Limit int32 | ||
| 369 | +} | ||
| 370 | + | ||
| 371 | +func (q *Queries) ListOrgGithubImportsForOrg(ctx context.Context, db DBTX, arg ListOrgGithubImportsForOrgParams) ([]OrgGithubImport, error) { | ||
| 372 | + rows, err := db.Query(ctx, listOrgGithubImportsForOrg, arg.OrgID, arg.Limit) | ||
| 373 | + if err != nil { | ||
| 374 | + return nil, err | ||
| 375 | + } | ||
| 376 | + defer rows.Close() | ||
| 377 | + items := []OrgGithubImport{} | ||
| 378 | + for rows.Next() { | ||
| 379 | + var i OrgGithubImport | ||
| 380 | + if err := rows.Scan( | ||
| 381 | + &i.ID, | ||
| 382 | + &i.OrgID, | ||
| 383 | + &i.SourceHost, | ||
| 384 | + &i.SourceOrg, | ||
| 385 | + &i.RequestedByUserID, | ||
| 386 | + &i.Status, | ||
| 387 | + &i.IncludePrivate, | ||
| 388 | + &i.TokenPresent, | ||
| 389 | + &i.TokenCiphertext, | ||
| 390 | + &i.TokenNonce, | ||
| 391 | + &i.TotalCount, | ||
| 392 | + &i.LastError, | ||
| 393 | + &i.StartedAt, | ||
| 394 | + &i.CompletedAt, | ||
| 395 | + &i.CreatedAt, | ||
| 396 | + &i.UpdatedAt, | ||
| 397 | + ); err != nil { | ||
| 398 | + return nil, err | ||
| 399 | + } | ||
| 400 | + items = append(items, i) | ||
| 401 | + } | ||
| 402 | + if err := rows.Err(); err != nil { | ||
| 403 | + return nil, err | ||
| 404 | + } | ||
| 405 | + return items, nil | ||
| 406 | +} | ||
| 407 | + | ||
| 408 | +const markOrgGithubImportCompleted = `-- name: MarkOrgGithubImportCompleted :exec | ||
| 409 | +UPDATE org_github_imports | ||
| 410 | + SET status = 'completed', | ||
| 411 | + token_ciphertext = NULL, | ||
| 412 | + token_nonce = NULL, | ||
| 413 | + completed_at = COALESCE(completed_at, now()), | ||
| 414 | + updated_at = now() | ||
| 415 | + WHERE id = $1 | ||
| 416 | +` | ||
| 417 | + | ||
| 418 | +func (q *Queries) MarkOrgGithubImportCompleted(ctx context.Context, db DBTX, id int64) error { | ||
| 419 | + _, err := db.Exec(ctx, markOrgGithubImportCompleted, id) | ||
| 420 | + return err | ||
| 421 | +} | ||
| 422 | + | ||
| 423 | +const markOrgGithubImportCompletedIfDone = `-- name: MarkOrgGithubImportCompletedIfDone :one | ||
| 424 | +UPDATE org_github_imports AS i | ||
| 425 | + SET status = 'completed', | ||
| 426 | + token_ciphertext = NULL, | ||
| 427 | + token_nonce = NULL, | ||
| 428 | + completed_at = COALESCE(completed_at, now()), | ||
| 429 | + updated_at = now() | ||
| 430 | + WHERE i.id = $1 | ||
| 431 | + AND i.status = 'importing' | ||
| 432 | + AND NOT EXISTS ( | ||
| 433 | + SELECT 1 | ||
| 434 | + FROM org_github_import_repos | ||
| 435 | + WHERE import_id = $1 | ||
| 436 | + AND status IN ('queued', 'importing') | ||
| 437 | + ) | ||
| 438 | +RETURNING i.id, i.org_id, i.source_host, i.source_org, i.requested_by_user_id, i.status, i.include_private, i.token_present, i.token_ciphertext, i.token_nonce, i.total_count, i.last_error, i.started_at, i.completed_at, i.created_at, i.updated_at | ||
| 439 | +` | ||
| 440 | + | ||
| 441 | +func (q *Queries) MarkOrgGithubImportCompletedIfDone(ctx context.Context, db DBTX, id int64) (OrgGithubImport, error) { | ||
| 442 | + row := db.QueryRow(ctx, markOrgGithubImportCompletedIfDone, id) | ||
| 443 | + var i OrgGithubImport | ||
| 444 | + err := row.Scan( | ||
| 445 | + &i.ID, | ||
| 446 | + &i.OrgID, | ||
| 447 | + &i.SourceHost, | ||
| 448 | + &i.SourceOrg, | ||
| 449 | + &i.RequestedByUserID, | ||
| 450 | + &i.Status, | ||
| 451 | + &i.IncludePrivate, | ||
| 452 | + &i.TokenPresent, | ||
| 453 | + &i.TokenCiphertext, | ||
| 454 | + &i.TokenNonce, | ||
| 455 | + &i.TotalCount, | ||
| 456 | + &i.LastError, | ||
| 457 | + &i.StartedAt, | ||
| 458 | + &i.CompletedAt, | ||
| 459 | + &i.CreatedAt, | ||
| 460 | + &i.UpdatedAt, | ||
| 461 | + ) | ||
| 462 | + return i, err | ||
| 463 | +} | ||
| 464 | + | ||
| 465 | +const markOrgGithubImportDiscovering = `-- name: MarkOrgGithubImportDiscovering :exec | ||
| 466 | +UPDATE org_github_imports | ||
| 467 | + SET status = 'discovering', | ||
| 468 | + started_at = COALESCE(started_at, now()), | ||
| 469 | + last_error = NULL, | ||
| 470 | + updated_at = now() | ||
| 471 | + WHERE id = $1 | ||
| 472 | + AND status IN ('queued', 'discovering') | ||
| 473 | +` | ||
| 474 | + | ||
| 475 | +func (q *Queries) MarkOrgGithubImportDiscovering(ctx context.Context, db DBTX, id int64) error { | ||
| 476 | + _, err := db.Exec(ctx, markOrgGithubImportDiscovering, id) | ||
| 477 | + return err | ||
| 478 | +} | ||
| 479 | + | ||
| 480 | +const markOrgGithubImportFailed = `-- name: MarkOrgGithubImportFailed :exec | ||
| 481 | +UPDATE org_github_imports | ||
| 482 | + SET status = 'failed', | ||
| 483 | + last_error = $2, | ||
| 484 | + token_ciphertext = NULL, | ||
| 485 | + token_nonce = NULL, | ||
| 486 | + completed_at = COALESCE(completed_at, now()), | ||
| 487 | + updated_at = now() | ||
| 488 | + WHERE id = $1 | ||
| 489 | +` | ||
| 490 | + | ||
| 491 | +type MarkOrgGithubImportFailedParams struct { | ||
| 492 | + ID int64 | ||
| 493 | + LastError pgtype.Text | ||
| 494 | +} | ||
| 495 | + | ||
| 496 | +func (q *Queries) MarkOrgGithubImportFailed(ctx context.Context, db DBTX, arg MarkOrgGithubImportFailedParams) error { | ||
| 497 | + _, err := db.Exec(ctx, markOrgGithubImportFailed, arg.ID, arg.LastError) | ||
| 498 | + return err | ||
| 499 | +} | ||
| 500 | + | ||
| 501 | +const markOrgGithubImportImporting = `-- name: MarkOrgGithubImportImporting :exec | ||
| 502 | +UPDATE org_github_imports | ||
| 503 | + SET status = 'importing', | ||
| 504 | + total_count = $2, | ||
| 505 | + started_at = COALESCE(started_at, now()), | ||
| 506 | + last_error = NULL, | ||
| 507 | + updated_at = now() | ||
| 508 | + WHERE id = $1 | ||
| 509 | + AND status IN ('queued', 'discovering', 'importing') | ||
| 510 | +` | ||
| 511 | + | ||
| 512 | +type MarkOrgGithubImportImportingParams struct { | ||
| 513 | + ID int64 | ||
| 514 | + TotalCount int32 | ||
| 515 | +} | ||
| 516 | + | ||
| 517 | +func (q *Queries) MarkOrgGithubImportImporting(ctx context.Context, db DBTX, arg MarkOrgGithubImportImportingParams) error { | ||
| 518 | + _, err := db.Exec(ctx, markOrgGithubImportImporting, arg.ID, arg.TotalCount) | ||
| 519 | + return err | ||
| 520 | +} | ||
| 521 | + | ||
| 522 | +const markOrgGithubImportRepoFailed = `-- name: MarkOrgGithubImportRepoFailed :exec | ||
| 523 | +UPDATE org_github_import_repos | ||
| 524 | + SET status = 'failed', | ||
| 525 | + repo_id = COALESCE($3::bigint, repo_id), | ||
| 526 | + last_error = $2, | ||
| 527 | + completed_at = COALESCE(completed_at, now()), | ||
| 528 | + updated_at = now() | ||
| 529 | + WHERE id = $1 | ||
| 530 | +` | ||
| 531 | + | ||
| 532 | +type MarkOrgGithubImportRepoFailedParams struct { | ||
| 533 | + ID int64 | ||
| 534 | + LastError pgtype.Text | ||
| 535 | + RepoID pgtype.Int8 | ||
| 536 | +} | ||
| 537 | + | ||
| 538 | +func (q *Queries) MarkOrgGithubImportRepoFailed(ctx context.Context, db DBTX, arg MarkOrgGithubImportRepoFailedParams) error { | ||
| 539 | + _, err := db.Exec(ctx, markOrgGithubImportRepoFailed, arg.ID, arg.LastError, arg.RepoID) | ||
| 540 | + return err | ||
| 541 | +} | ||
| 542 | + | ||
| 543 | +const markOrgGithubImportRepoImported = `-- name: MarkOrgGithubImportRepoImported :exec | ||
| 544 | +UPDATE org_github_import_repos | ||
| 545 | + SET status = 'imported', | ||
| 546 | + repo_id = $2, | ||
| 547 | + last_error = NULL, | ||
| 548 | + completed_at = COALESCE(completed_at, now()), | ||
| 549 | + updated_at = now() | ||
| 550 | + WHERE id = $1 | ||
| 551 | +` | ||
| 552 | + | ||
| 553 | +type MarkOrgGithubImportRepoImportedParams struct { | ||
| 554 | + ID int64 | ||
| 555 | + RepoID pgtype.Int8 | ||
| 556 | +} | ||
| 557 | + | ||
| 558 | +func (q *Queries) MarkOrgGithubImportRepoImported(ctx context.Context, db DBTX, arg MarkOrgGithubImportRepoImportedParams) error { | ||
| 559 | + _, err := db.Exec(ctx, markOrgGithubImportRepoImported, arg.ID, arg.RepoID) | ||
| 560 | + return err | ||
| 561 | +} | ||
| 562 | + | ||
| 563 | +const markOrgGithubImportRepoImporting = `-- name: MarkOrgGithubImportRepoImporting :exec | ||
| 564 | +UPDATE org_github_import_repos | ||
| 565 | + SET status = 'importing', | ||
| 566 | + started_at = COALESCE(started_at, now()), | ||
| 567 | + last_error = NULL, | ||
| 568 | + updated_at = now() | ||
| 569 | + WHERE id = $1 | ||
| 570 | + AND status = 'queued' | ||
| 571 | +` | ||
| 572 | + | ||
| 573 | +func (q *Queries) MarkOrgGithubImportRepoImporting(ctx context.Context, db DBTX, id int64) error { | ||
| 574 | + _, err := db.Exec(ctx, markOrgGithubImportRepoImporting, id) | ||
| 575 | + return err | ||
| 576 | +} | ||
| 577 | + | ||
| 578 | +const markOrgGithubImportRepoSkipped = `-- name: MarkOrgGithubImportRepoSkipped :exec | ||
| 579 | +UPDATE org_github_import_repos | ||
| 580 | + SET status = 'skipped', | ||
| 581 | + last_error = $2, | ||
| 582 | + completed_at = COALESCE(completed_at, now()), | ||
| 583 | + updated_at = now() | ||
| 584 | + WHERE id = $1 | ||
| 585 | +` | ||
| 586 | + | ||
| 587 | +type MarkOrgGithubImportRepoSkippedParams struct { | ||
| 588 | + ID int64 | ||
| 589 | + LastError pgtype.Text | ||
| 590 | +} | ||
| 591 | + | ||
| 592 | +func (q *Queries) MarkOrgGithubImportRepoSkipped(ctx context.Context, db DBTX, arg MarkOrgGithubImportRepoSkippedParams) error { | ||
| 593 | + _, err := db.Exec(ctx, markOrgGithubImportRepoSkipped, arg.ID, arg.LastError) | ||
| 594 | + return err | ||
| 595 | +} | ||
internal/orgs/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/orgs/sqlc/querier.gomodified@@ -33,6 +33,8 @@ type Querier interface { | |||
| 33 | // tx (separate query so the orchestrator owns ordering). | 33 | // tx (separate query so the orchestrator owns ordering). |
| 34 | CreateOrg(ctx context.Context, db DBTX, arg CreateOrgParams) (Org, error) | 34 | CreateOrg(ctx context.Context, db DBTX, arg CreateOrgParams) (Org, error) |
| 35 | // SPDX-License-Identifier: AGPL-3.0-or-later | 35 | // SPDX-License-Identifier: AGPL-3.0-or-later |
| 36 | + CreateOrgGithubImport(ctx context.Context, db DBTX, arg CreateOrgGithubImportParams) (OrgGithubImport, error) | ||
| 37 | + // SPDX-License-Identifier: AGPL-3.0-or-later | ||
| 36 | // ─── org_invitations ─────────────────────────────────────────────── | 38 | // ─── org_invitations ─────────────────────────────────────────────── |
| 37 | CreateOrgInvitation(ctx context.Context, db DBTX, arg CreateOrgInvitationParams) (OrgInvitation, error) | 39 | CreateOrgInvitation(ctx context.Context, db DBTX, arg CreateOrgInvitationParams) (OrgInvitation, error) |
| 38 | // SPDX-License-Identifier: AGPL-3.0-or-later | 40 | // SPDX-License-Identifier: AGPL-3.0-or-later |
@@ -53,6 +55,10 @@ type Querier interface { | |||
| 53 | // column for grace-period restore). | 55 | // column for grace-period restore). |
| 54 | GetOrgBySlug(ctx context.Context, db DBTX, slug string) (Org, error) | 56 | GetOrgBySlug(ctx context.Context, db DBTX, slug string) (Org, error) |
| 55 | GetOrgBySlugIncludingDeleted(ctx context.Context, db DBTX, slug string) (Org, error) | 57 | GetOrgBySlugIncludingDeleted(ctx context.Context, db DBTX, slug string) (Org, error) |
| 58 | + GetOrgGithubImport(ctx context.Context, db DBTX, id int64) (OrgGithubImport, error) | ||
| 59 | + GetOrgGithubImportForOrg(ctx context.Context, db DBTX, arg GetOrgGithubImportForOrgParams) (OrgGithubImport, error) | ||
| 60 | + GetOrgGithubImportProgress(ctx context.Context, db DBTX, arg GetOrgGithubImportProgressParams) (GetOrgGithubImportProgressRow, error) | ||
| 61 | + GetOrgGithubImportRepo(ctx context.Context, db DBTX, id int64) (OrgGithubImportRepo, error) | ||
| 56 | GetOrgInvitationByID(ctx context.Context, db DBTX, id int64) (OrgInvitation, error) | 62 | GetOrgInvitationByID(ctx context.Context, db DBTX, id int64) (OrgInvitation, error) |
| 57 | GetOrgInvitationByTokenHash(ctx context.Context, db DBTX, tokenHash []byte) (OrgInvitation, error) | 63 | GetOrgInvitationByTokenHash(ctx context.Context, db DBTX, tokenHash []byte) (OrgInvitation, error) |
| 58 | GetOrgMember(ctx context.Context, db DBTX, arg GetOrgMemberParams) (OrgMember, error) | 64 | GetOrgMember(ctx context.Context, db DBTX, arg GetOrgMemberParams) (OrgMember, error) |
@@ -64,11 +70,14 @@ type Querier interface { | |||
| 64 | // Final row removal after the cascade finished. The principals | 70 | // Final row removal after the cascade finished. The principals |
| 65 | // trigger drops the matching principals row in the same tx. | 71 | // trigger drops the matching principals row in the same tx. |
| 66 | HardDeleteOrgRow(ctx context.Context, db DBTX, id int64) error | 72 | HardDeleteOrgRow(ctx context.Context, db DBTX, id int64) error |
| 73 | + InsertOrgGithubImportRepo(ctx context.Context, db DBTX, arg InsertOrgGithubImportRepoParams) (OrgGithubImportRepo, error) | ||
| 67 | // Replaces the inline EXISTS query in handlers/orgs/teams.go | 74 | // Replaces the inline EXISTS query in handlers/orgs/teams.go |
| 68 | // canSeeTeam + filterSecretTeams (SR2 M2). Used by the visibility | 75 | // canSeeTeam + filterSecretTeams (SR2 M2). Used by the visibility |
| 69 | // gate for secret teams. | 76 | // gate for secret teams. |
| 70 | IsTeamMember(ctx context.Context, db DBTX, arg IsTeamMemberParams) (bool, error) | 77 | IsTeamMember(ctx context.Context, db DBTX, arg IsTeamMemberParams) (bool, error) |
| 71 | ListChildTeams(ctx context.Context, db DBTX, parentTeamID pgtype.Int8) ([]Team, error) | 78 | ListChildTeams(ctx context.Context, db DBTX, parentTeamID pgtype.Int8) ([]Team, error) |
| 79 | + ListOrgGithubImportRepos(ctx context.Context, db DBTX, importID int64) ([]OrgGithubImportRepo, error) | ||
| 80 | + ListOrgGithubImportsForOrg(ctx context.Context, db DBTX, arg ListOrgGithubImportsForOrgParams) ([]OrgGithubImport, error) | ||
| 72 | // Sweep input for the lifecycle worker: every soft-deleted org whose | 81 | // Sweep input for the lifecycle worker: every soft-deleted org whose |
| 73 | // 14-day grace window has elapsed. The interval is intentionally a | 82 | // 14-day grace window has elapsed. The interval is intentionally a |
| 74 | // DB literal (not a parameter) so the policy lives next to the data. | 83 | // DB literal (not a parameter) so the policy lives next to the data. |
@@ -100,6 +109,15 @@ type Querier interface { | |||
| 100 | // policy aggregator unions this with each row's parent_team_id to | 109 | // policy aggregator unions this with each row's parent_team_id to |
| 101 | // get the inherited set. | 110 | // get the inherited set. |
| 102 | ListTeamsForUserInOrg(ctx context.Context, db DBTX, arg ListTeamsForUserInOrgParams) ([]ListTeamsForUserInOrgRow, error) | 111 | ListTeamsForUserInOrg(ctx context.Context, db DBTX, arg ListTeamsForUserInOrgParams) ([]ListTeamsForUserInOrgRow, error) |
| 112 | + MarkOrgGithubImportCompleted(ctx context.Context, db DBTX, id int64) error | ||
| 113 | + MarkOrgGithubImportCompletedIfDone(ctx context.Context, db DBTX, id int64) (OrgGithubImport, error) | ||
| 114 | + MarkOrgGithubImportDiscovering(ctx context.Context, db DBTX, id int64) error | ||
| 115 | + MarkOrgGithubImportFailed(ctx context.Context, db DBTX, arg MarkOrgGithubImportFailedParams) error | ||
| 116 | + MarkOrgGithubImportImporting(ctx context.Context, db DBTX, arg MarkOrgGithubImportImportingParams) error | ||
| 117 | + MarkOrgGithubImportRepoFailed(ctx context.Context, db DBTX, arg MarkOrgGithubImportRepoFailedParams) error | ||
| 118 | + MarkOrgGithubImportRepoImported(ctx context.Context, db DBTX, arg MarkOrgGithubImportRepoImportedParams) error | ||
| 119 | + MarkOrgGithubImportRepoImporting(ctx context.Context, db DBTX, id int64) error | ||
| 120 | + MarkOrgGithubImportRepoSkipped(ctx context.Context, db DBTX, arg MarkOrgGithubImportRepoSkippedParams) error | ||
| 103 | RemoveOrgMember(ctx context.Context, db DBTX, arg RemoveOrgMemberParams) error | 121 | RemoveOrgMember(ctx context.Context, db DBTX, arg RemoveOrgMemberParams) error |
| 104 | RemoveTeamMember(ctx context.Context, db DBTX, arg RemoveTeamMemberParams) error | 122 | RemoveTeamMember(ctx context.Context, db DBTX, arg RemoveTeamMemberParams) error |
| 105 | // ─── principals (read-only from this domain) ─────────────────────── | 123 | // ─── principals (read-only from this domain) ─────────────────────── |
internal/pulls/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/ratelimit/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/repos/create.gomodified@@ -78,6 +78,11 @@ type Params struct { | |||
| 78 | // accounts, not to throttle staff. | 78 | // accounts, not to throttle staff. |
| 79 | ActorIsSiteAdmin bool | 79 | ActorIsSiteAdmin bool |
| 80 | 80 | ||
| 81 | + // BypassCreateRateLimit lets trusted server-side bulk operations | ||
| 82 | + // create many repos for the same actor without tripping the browser | ||
| 83 | + // anti-abuse throttle. Keep false for direct user submits. | ||
| 84 | + BypassCreateRateLimit bool | ||
| 85 | + | ||
| 81 | Name string // already lowercased + trimmed | 86 | Name string // already lowercased + trimmed |
| 82 | Description string | 87 | Description string |
| 83 | Visibility string // "public" | "private" | 88 | Visibility string // "public" | "private" |
@@ -146,7 +151,7 @@ func Create(ctx context.Context, deps Deps, p Params) (Result, error) { | |||
| 146 | // Rate-limit per actor (NOT per owner) so a user can't bypass the | 151 | // Rate-limit per actor (NOT per owner) so a user can't bypass the |
| 147 | // per-account cap by spreading creates across orgs they manage. | 152 | // per-account cap by spreading creates across orgs they manage. |
| 148 | // Site admins skip the cap entirely. | 153 | // Site admins skip the cap entirely. |
| 149 | - if !p.ActorIsSiteAdmin { | 154 | + if !p.ActorIsSiteAdmin && !p.BypassCreateRateLimit { |
| 150 | if err := deps.Limiter.Hit(ctx, deps.Pool, throttle.Limit{ | 155 | if err := deps.Limiter.Hit(ctx, deps.Pool, throttle.Limit{ |
| 151 | Scope: "repo_create", | 156 | Scope: "repo_create", |
| 152 | Identifier: fmt.Sprintf("user:%d", p.ActorUserID), | 157 | Identifier: fmt.Sprintf("user:%d", p.ActorUserID), |
internal/repos/git/remotes.gomodified@@ -8,6 +8,7 @@ import ( | |||
| 8 | "fmt" | 8 | "fmt" |
| 9 | "os" | 9 | "os" |
| 10 | "os/exec" | 10 | "os/exec" |
| 11 | + "path/filepath" | ||
| 11 | "strings" | 12 | "strings" |
| 12 | ) | 13 | ) |
| 13 | 14 | ||
@@ -16,12 +17,37 @@ import ( | |||
| 16 | // if a local branch or tag has diverged, git rejects the update instead of | 17 | // if a local branch or tag has diverged, git rejects the update instead of |
| 17 | // overwriting local history. | 18 | // overwriting local history. |
| 18 | func FetchRemoteHeadsAndTags(ctx context.Context, gitDir, remoteURL string) error { | 19 | func FetchRemoteHeadsAndTags(ctx context.Context, gitDir, remoteURL string) error { |
| 20 | + return fetchRemoteHeadsAndTags(ctx, gitDir, remoteURL, "") | ||
| 21 | +} | ||
| 22 | + | ||
| 23 | +// FetchRemoteHeadsAndTagsWithToken is the authenticated variant used by | ||
| 24 | +// GitHub imports for private repositories. The token is supplied through a | ||
| 25 | +// short-lived askpass helper, not embedded in the remote URL or git argv. | ||
| 26 | +func FetchRemoteHeadsAndTagsWithToken(ctx context.Context, gitDir, remoteURL, token string) error { | ||
| 27 | + return fetchRemoteHeadsAndTags(ctx, gitDir, remoteURL, strings.TrimSpace(token)) | ||
| 28 | +} | ||
| 29 | + | ||
| 30 | +func fetchRemoteHeadsAndTags(ctx context.Context, gitDir, remoteURL, token string) error { | ||
| 19 | if gitDir == "" { | 31 | if gitDir == "" { |
| 20 | return errors.New("git fetch: gitDir is required") | 32 | return errors.New("git fetch: gitDir is required") |
| 21 | } | 33 | } |
| 22 | if strings.TrimSpace(remoteURL) == "" { | 34 | if strings.TrimSpace(remoteURL) == "" { |
| 23 | return errors.New("git fetch: remoteURL is required") | 35 | return errors.New("git fetch: remoteURL is required") |
| 24 | } | 36 | } |
| 37 | + env := append(os.Environ(), | ||
| 38 | + "GIT_CONFIG_NOSYSTEM=1", | ||
| 39 | + "GIT_CONFIG_GLOBAL=/dev/null", | ||
| 40 | + "GIT_CONFIG_XDG=/dev/null", | ||
| 41 | + "GIT_TERMINAL_PROMPT=0", | ||
| 42 | + ) | ||
| 43 | + if token != "" { | ||
| 44 | + askpass, cleanup, err := writeAskpass(token) | ||
| 45 | + if err != nil { | ||
| 46 | + return err | ||
| 47 | + } | ||
| 48 | + defer cleanup() | ||
| 49 | + env = append(env, "GIT_ASKPASS="+askpass) | ||
| 50 | + } | ||
| 25 | //nolint:gosec // G204: gitDir is RepoFS-derived at call sites; remoteURL is caller-allowlisted and passed as argv, not shell. | 51 | //nolint:gosec // G204: gitDir is RepoFS-derived at call sites; remoteURL is caller-allowlisted and passed as argv, not shell. |
| 26 | cmd := exec.CommandContext(ctx, "git", | 52 | cmd := exec.CommandContext(ctx, "git", |
| 27 | "-c", "protocol.ext.allow=never", | 53 | "-c", "protocol.ext.allow=never", |
@@ -33,14 +59,34 @@ func FetchRemoteHeadsAndTags(ctx context.Context, gitDir, remoteURL string) erro | |||
| 33 | "refs/heads/*:refs/heads/*", | 59 | "refs/heads/*:refs/heads/*", |
| 34 | "refs/tags/*:refs/tags/*", | 60 | "refs/tags/*:refs/tags/*", |
| 35 | ) | 61 | ) |
| 36 | - cmd.Env = append(os.Environ(), | 62 | + cmd.Env = env |
| 37 | - "GIT_CONFIG_NOSYSTEM=1", | ||
| 38 | - "GIT_CONFIG_GLOBAL=/dev/null", | ||
| 39 | - "GIT_CONFIG_XDG=/dev/null", | ||
| 40 | - ) | ||
| 41 | out, err := cmd.CombinedOutput() | 63 | out, err := cmd.CombinedOutput() |
| 42 | if err != nil { | 64 | if err != nil { |
| 43 | return fmt.Errorf("git fetch remote refs: %w (%s)", err, strings.TrimSpace(string(out))) | 65 | return fmt.Errorf("git fetch remote refs: %w (%s)", err, strings.TrimSpace(string(out))) |
| 44 | } | 66 | } |
| 45 | return nil | 67 | return nil |
| 46 | } | 68 | } |
| 69 | + | ||
| 70 | +func writeAskpass(token string) (path string, cleanup func(), err error) { | ||
| 71 | + dir, err := os.MkdirTemp("", "shithub-git-askpass-*") | ||
| 72 | + if err != nil { | ||
| 73 | + return "", func() {}, fmt.Errorf("git fetch: askpass tempdir: %w", err) | ||
| 74 | + } | ||
| 75 | + cleanup = func() { _ = os.RemoveAll(dir) } | ||
| 76 | + path = filepath.Join(dir, "askpass.sh") | ||
| 77 | + body := "#!/bin/sh\n" + | ||
| 78 | + "case \"$1\" in\n" + | ||
| 79 | + "*Username*) printf '%s\\n' 'x-access-token' ;;\n" + | ||
| 80 | + "*Password*) printf '%s\\n' " + shellQuote(token) + " ;;\n" + | ||
| 81 | + "*) printf '\\n' ;;\n" + | ||
| 82 | + "esac\n" | ||
| 83 | + if err := os.WriteFile(path, []byte(body), 0o700); err != nil { | ||
| 84 | + cleanup() | ||
| 85 | + return "", func() {}, fmt.Errorf("git fetch: askpass write: %w", err) | ||
| 86 | + } | ||
| 87 | + return path, cleanup, nil | ||
| 88 | +} | ||
| 89 | + | ||
| 90 | +func shellQuote(s string) string { | ||
| 91 | + return "'" + strings.ReplaceAll(s, "'", "'\"'\"'") + "'" | ||
| 92 | +} | ||
internal/repos/queries/repos.sqlmodified@@ -34,12 +34,13 @@ FROM repos | |||
| 34 | WHERE id = $1; | 34 | WHERE id = $1; |
| 35 | 35 | ||
| 36 | -- name: GetRepoOwnerUsernameByID :one | 36 | -- name: GetRepoOwnerUsernameByID :one |
| 37 | --- Returns the owner_username for a repo. Used by size-recalc and other | 37 | +-- Returns the owner slug for a repo. Used by size-recalc, indexing, and |
| 38 | --- jobs that need to derive the bare-repo on-disk path without round- | 38 | +-- other jobs that need the bare-repo on-disk path. Org-owned repos use the |
| 39 | --- tripping through the full user row. | 39 | +-- org slug in the same path position as user-owned repos. |
| 40 | -SELECT u.username AS owner_username, r.name AS repo_name | 40 | +SELECT COALESCE(u.username::varchar, o.slug::varchar) AS owner_username, r.name AS repo_name |
| 41 | FROM repos r | 41 | FROM repos r |
| 42 | -JOIN users u ON u.id = r.owner_user_id | 42 | +LEFT JOIN users u ON u.id = r.owner_user_id |
| 43 | +LEFT JOIN orgs o ON o.id = r.owner_org_id | ||
| 43 | WHERE r.id = $1; | 44 | WHERE r.id = $1; |
| 44 | 45 | ||
| 45 | -- name: GetRepoByOwnerUserAndName :one | 46 | -- name: GetRepoByOwnerUserAndName :one |
internal/repos/source_remote.gomodified@@ -6,13 +6,23 @@ import ( | |||
| 6 | "context" | 6 | "context" |
| 7 | "errors" | 7 | "errors" |
| 8 | "fmt" | 8 | "fmt" |
| 9 | + "log/slog" | ||
| 9 | "net/url" | 10 | "net/url" |
| 10 | "strings" | 11 | "strings" |
| 12 | + "time" | ||
| 11 | 13 | ||
| 14 | + "github.com/jackc/pgx/v5/pgtype" | ||
| 15 | + "github.com/jackc/pgx/v5/pgxpool" | ||
| 16 | + | ||
| 17 | + "github.com/tenseleyFlow/shithub/internal/infra/storage" | ||
| 18 | + repogit "github.com/tenseleyFlow/shithub/internal/repos/git" | ||
| 19 | + reposdb "github.com/tenseleyFlow/shithub/internal/repos/sqlc" | ||
| 12 | "github.com/tenseleyFlow/shithub/internal/security/ssrf" | 20 | "github.com/tenseleyFlow/shithub/internal/security/ssrf" |
| 21 | + "github.com/tenseleyFlow/shithub/internal/worker" | ||
| 13 | ) | 22 | ) |
| 14 | 23 | ||
| 15 | const MaxSourceRemoteURLLen = 2048 | 24 | const MaxSourceRemoteURLLen = 2048 |
| 25 | +const SourceRemoteFetchTimeout = 45 * time.Second | ||
| 16 | 26 | ||
| 17 | var ErrInvalidSourceRemote = errors.New("repos: invalid source remote URL") | 27 | var ErrInvalidSourceRemote = errors.New("repos: invalid source remote URL") |
| 18 | 28 | ||
@@ -67,3 +77,142 @@ func ValidateSourceRemoteURL(ctx context.Context, raw string) (string, error) { | |||
| 67 | } | 77 | } |
| 68 | return normalized, nil | 78 | return normalized, nil |
| 69 | } | 79 | } |
| 80 | + | ||
| 81 | +// SourceRemoteDeps wires source-remote fetches. FetchToken is optional and | ||
| 82 | +// only used for private GitHub imports; it is not stored in repo_source_remotes. | ||
| 83 | +type SourceRemoteDeps struct { | ||
| 84 | + Pool *pgxpool.Pool | ||
| 85 | + RepoFS *storage.RepoFS | ||
| 86 | + Logger *slog.Logger | ||
| 87 | + FetchToken string | ||
| 88 | +} | ||
| 89 | + | ||
| 90 | +// SaveSourceRemote validates and persists the credential-free source remote. | ||
| 91 | +func SaveSourceRemote(ctx context.Context, deps SourceRemoteDeps, repoID int64, rawURL string) (string, error) { | ||
| 92 | + remoteURL, err := ValidateSourceRemoteURL(ctx, rawURL) | ||
| 93 | + if err != nil || remoteURL == "" { | ||
| 94 | + return remoteURL, err | ||
| 95 | + } | ||
| 96 | + _, err = reposdb.New().UpsertRepoSourceRemote(ctx, deps.Pool, reposdb.UpsertRepoSourceRemoteParams{ | ||
| 97 | + RepoID: repoID, | ||
| 98 | + RemoteUrl: remoteURL, | ||
| 99 | + }) | ||
| 100 | + return remoteURL, err | ||
| 101 | +} | ||
| 102 | + | ||
| 103 | +// FetchSourceRemote imports public heads/tags from a configured source remote | ||
| 104 | +// and updates cached default-branch/index/size state. | ||
| 105 | +func FetchSourceRemote(ctx context.Context, deps SourceRemoteDeps, row reposdb.Repo, ownerSlug, remoteURL string) error { | ||
| 106 | + remoteURL, err := ValidateSourceRemoteURL(ctx, remoteURL) | ||
| 107 | + if err != nil { | ||
| 108 | + MarkSourceRemoteFetchError(ctx, deps, row.ID, err) | ||
| 109 | + return err | ||
| 110 | + } | ||
| 111 | + gitDir, err := deps.RepoFS.RepoPath(ownerSlug, row.Name) | ||
| 112 | + if err != nil { | ||
| 113 | + MarkSourceRemoteFetchError(ctx, deps, row.ID, err) | ||
| 114 | + return err | ||
| 115 | + } | ||
| 116 | + fetchCtx, cancel := context.WithTimeout(ctx, SourceRemoteFetchTimeout) | ||
| 117 | + defer cancel() | ||
| 118 | + if strings.TrimSpace(deps.FetchToken) != "" { | ||
| 119 | + err = repogit.FetchRemoteHeadsAndTagsWithToken(fetchCtx, gitDir, remoteURL, deps.FetchToken) | ||
| 120 | + } else { | ||
| 121 | + err = repogit.FetchRemoteHeadsAndTags(fetchCtx, gitDir, remoteURL) | ||
| 122 | + } | ||
| 123 | + if err != nil { | ||
| 124 | + MarkSourceRemoteFetchError(ctx, deps, row.ID, err) | ||
| 125 | + return err | ||
| 126 | + } | ||
| 127 | + if err := RefreshFetchedRepoState(ctx, deps, row, gitDir); err != nil { | ||
| 128 | + MarkSourceRemoteFetchError(ctx, deps, row.ID, err) | ||
| 129 | + return err | ||
| 130 | + } | ||
| 131 | + q := reposdb.New() | ||
| 132 | + if err := q.MarkRepoSourceRemoteFetched(ctx, deps.Pool, row.ID); err != nil && deps.Logger != nil { | ||
| 133 | + deps.Logger.WarnContext(ctx, "source-remote: mark fetched", "error", err, "repo_id", row.ID) | ||
| 134 | + } | ||
| 135 | + return nil | ||
| 136 | +} | ||
| 137 | + | ||
| 138 | +// RefreshFetchedRepoState reconciles the repo row after a source fetch. | ||
| 139 | +func RefreshFetchedRepoState(ctx context.Context, deps SourceRemoteDeps, row reposdb.Repo, gitDir string) error { | ||
| 140 | + refs, err := repogit.ListRefs(ctx, gitDir) | ||
| 141 | + if err != nil { | ||
| 142 | + return err | ||
| 143 | + } | ||
| 144 | + branch, oid := ChooseFetchedDefaultBranch(row.DefaultBranch, refs.Branches) | ||
| 145 | + if branch == "" { | ||
| 146 | + return nil | ||
| 147 | + } | ||
| 148 | + q := reposdb.New() | ||
| 149 | + if branch != row.DefaultBranch { | ||
| 150 | + if err := q.UpdateRepoDefaultBranch(ctx, deps.Pool, reposdb.UpdateRepoDefaultBranchParams{ | ||
| 151 | + ID: row.ID, | ||
| 152 | + DefaultBranch: branch, | ||
| 153 | + }); err != nil { | ||
| 154 | + return err | ||
| 155 | + } | ||
| 156 | + if err := repogit.SetSymbolicRef(ctx, gitDir, "HEAD", "refs/heads/"+branch); err != nil && deps.Logger != nil { | ||
| 157 | + deps.Logger.WarnContext(ctx, "source-remote: set symbolic head", "error", err, "repo_id", row.ID, "branch", branch) | ||
| 158 | + } | ||
| 159 | + } | ||
| 160 | + if !row.DefaultBranchOid.Valid || row.DefaultBranchOid.String != oid { | ||
| 161 | + if err := q.UpdateRepoDefaultBranchOID(ctx, deps.Pool, reposdb.UpdateRepoDefaultBranchOIDParams{ | ||
| 162 | + ID: row.ID, | ||
| 163 | + DefaultBranchOid: pgtype.Text{String: oid, Valid: true}, | ||
| 164 | + }); err != nil { | ||
| 165 | + return err | ||
| 166 | + } | ||
| 167 | + if _, err := worker.Enqueue(ctx, deps.Pool, worker.KindRepoIndexCode, map[string]any{"repo_id": row.ID}, worker.EnqueueOptions{}); err != nil && deps.Logger != nil { | ||
| 168 | + deps.Logger.WarnContext(ctx, "source-remote: enqueue index", "error", err, "repo_id", row.ID) | ||
| 169 | + } | ||
| 170 | + } | ||
| 171 | + if _, err := worker.Enqueue(ctx, deps.Pool, worker.KindRepoSizeRecalc, map[string]any{"repo_id": row.ID}, worker.EnqueueOptions{}); err != nil && deps.Logger != nil { | ||
| 172 | + deps.Logger.WarnContext(ctx, "source-remote: enqueue size", "error", err, "repo_id", row.ID) | ||
| 173 | + } | ||
| 174 | + _ = worker.Notify(ctx, deps.Pool) | ||
| 175 | + return nil | ||
| 176 | +} | ||
| 177 | + | ||
| 178 | +// ChooseFetchedDefaultBranch mirrors GitHub import behavior: keep the current | ||
| 179 | +// default if present, otherwise prefer trunk/main/master before falling back to | ||
| 180 | +// the first fetched branch. | ||
| 181 | +func ChooseFetchedDefaultBranch(current string, branches []repogit.RefEntry) (name, oid string) { | ||
| 182 | + if len(branches) == 0 { | ||
| 183 | + return "", "" | ||
| 184 | + } | ||
| 185 | + for _, candidate := range []string{current, "trunk", "main", "master"} { | ||
| 186 | + if candidate == "" { | ||
| 187 | + continue | ||
| 188 | + } | ||
| 189 | + for _, branch := range branches { | ||
| 190 | + if branch.Name == candidate { | ||
| 191 | + return branch.Name, branch.OID | ||
| 192 | + } | ||
| 193 | + } | ||
| 194 | + } | ||
| 195 | + return branches[0].Name, branches[0].OID | ||
| 196 | +} | ||
| 197 | + | ||
| 198 | +// MarkSourceRemoteFetchError stores the latest source-fetch failure without | ||
| 199 | +// leaking credentials; callers pass credential-free remote URLs. | ||
| 200 | +func MarkSourceRemoteFetchError(ctx context.Context, deps SourceRemoteDeps, repoID int64, err error) { | ||
| 201 | + if err == nil { | ||
| 202 | + return | ||
| 203 | + } | ||
| 204 | + msg := strings.TrimSpace(err.Error()) | ||
| 205 | + if len(msg) > 500 { | ||
| 206 | + msg = msg[:500] | ||
| 207 | + } | ||
| 208 | + if markErr := reposdb.New().MarkRepoSourceRemoteFetchError(ctx, deps.Pool, reposdb.MarkRepoSourceRemoteFetchErrorParams{ | ||
| 209 | + RepoID: repoID, | ||
| 210 | + LastError: pgtype.Text{String: msg, Valid: true}, | ||
| 211 | + }); markErr != nil && deps.Logger != nil { | ||
| 212 | + deps.Logger.WarnContext(ctx, "source-remote: mark fetch error", "error", markErr, "cause", err, "repo_id", repoID) | ||
| 213 | + } | ||
| 214 | +} | ||
| 215 | + | ||
| 216 | +func IsInvalidSourceRemote(err error) bool { | ||
| 217 | + return errors.Is(err, ErrInvalidSourceRemote) | ||
| 218 | +} | ||
internal/repos/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/repos/sqlc/querier.gomodified@@ -61,9 +61,9 @@ type Querier interface { | |||
| 61 | // O(1) cost the user-side path enjoys. | 61 | // O(1) cost the user-side path enjoys. |
| 62 | GetRepoByOwnerOrgAndName(ctx context.Context, db DBTX, arg GetRepoByOwnerOrgAndNameParams) (Repo, error) | 62 | GetRepoByOwnerOrgAndName(ctx context.Context, db DBTX, arg GetRepoByOwnerOrgAndNameParams) (Repo, error) |
| 63 | GetRepoByOwnerUserAndName(ctx context.Context, db DBTX, arg GetRepoByOwnerUserAndNameParams) (Repo, error) | 63 | GetRepoByOwnerUserAndName(ctx context.Context, db DBTX, arg GetRepoByOwnerUserAndNameParams) (Repo, error) |
| 64 | - // Returns the owner_username for a repo. Used by size-recalc and other | 64 | + // Returns the owner slug for a repo. Used by size-recalc, indexing, and |
| 65 | - // jobs that need to derive the bare-repo on-disk path without round- | 65 | + // other jobs that need the bare-repo on-disk path. Org-owned repos use the |
| 66 | - // tripping through the full user row. | 66 | + // org slug in the same path position as user-owned repos. |
| 67 | GetRepoOwnerUsernameByID(ctx context.Context, db DBTX, id int64) (GetRepoOwnerUsernameByIDRow, error) | 67 | GetRepoOwnerUsernameByID(ctx context.Context, db DBTX, id int64) (GetRepoOwnerUsernameByIDRow, error) |
| 68 | // SPDX-License-Identifier: AGPL-3.0-or-later | 68 | // SPDX-License-Identifier: AGPL-3.0-or-later |
| 69 | GetRepoSourceRemote(ctx context.Context, db DBTX, repoID int64) (RepoSourceRemote, error) | 69 | GetRepoSourceRemote(ctx context.Context, db DBTX, repoID int64) (RepoSourceRemote, error) |
internal/repos/sqlc/repos.sql.gomodified@@ -427,20 +427,21 @@ func (q *Queries) GetRepoByOwnerUserAndName(ctx context.Context, db DBTX, arg Ge | |||
| 427 | } | 427 | } |
| 428 | 428 | ||
| 429 | const getRepoOwnerUsernameByID = `-- name: GetRepoOwnerUsernameByID :one | 429 | const getRepoOwnerUsernameByID = `-- name: GetRepoOwnerUsernameByID :one |
| 430 | -SELECT u.username AS owner_username, r.name AS repo_name | 430 | +SELECT COALESCE(u.username::varchar, o.slug::varchar) AS owner_username, r.name AS repo_name |
| 431 | FROM repos r | 431 | FROM repos r |
| 432 | -JOIN users u ON u.id = r.owner_user_id | 432 | +LEFT JOIN users u ON u.id = r.owner_user_id |
| 433 | +LEFT JOIN orgs o ON o.id = r.owner_org_id | ||
| 433 | WHERE r.id = $1 | 434 | WHERE r.id = $1 |
| 434 | ` | 435 | ` |
| 435 | 436 | ||
| 436 | type GetRepoOwnerUsernameByIDRow struct { | 437 | type GetRepoOwnerUsernameByIDRow struct { |
| 437 | - OwnerUsername string | 438 | + OwnerUsername interface{} |
| 438 | RepoName string | 439 | RepoName string |
| 439 | } | 440 | } |
| 440 | 441 | ||
| 441 | -// Returns the owner_username for a repo. Used by size-recalc and other | 442 | +// Returns the owner slug for a repo. Used by size-recalc, indexing, and |
| 442 | -// jobs that need to derive the bare-repo on-disk path without round- | 443 | +// other jobs that need the bare-repo on-disk path. Org-owned repos use the |
| 443 | -// tripping through the full user row. | 444 | +// org slug in the same path position as user-owned repos. |
| 444 | func (q *Queries) GetRepoOwnerUsernameByID(ctx context.Context, db DBTX, id int64) (GetRepoOwnerUsernameByIDRow, error) { | 445 | func (q *Queries) GetRepoOwnerUsernameByID(ctx context.Context, db DBTX, id int64) (GetRepoOwnerUsernameByIDRow, error) { |
| 445 | row := db.QueryRow(ctx, getRepoOwnerUsernameByID, id) | 446 | row := db.QueryRow(ctx, getRepoOwnerUsernameByID, id) |
| 446 | var i GetRepoOwnerUsernameByIDRow | 447 | var i GetRepoOwnerUsernameByIDRow |
internal/users/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/web/handlers/repo/source_remote.gomodified@@ -4,130 +4,41 @@ package repo | |||
| 4 | 4 | ||
| 5 | import ( | 5 | import ( |
| 6 | "context" | 6 | "context" |
| 7 | - "errors" | ||
| 8 | - "strings" | ||
| 9 | - "time" | ||
| 10 | - | ||
| 11 | - "github.com/jackc/pgx/v5/pgtype" | ||
| 12 | 7 | ||
| 13 | "github.com/tenseleyFlow/shithub/internal/repos" | 8 | "github.com/tenseleyFlow/shithub/internal/repos" |
| 14 | repogit "github.com/tenseleyFlow/shithub/internal/repos/git" | 9 | repogit "github.com/tenseleyFlow/shithub/internal/repos/git" |
| 15 | reposdb "github.com/tenseleyFlow/shithub/internal/repos/sqlc" | 10 | reposdb "github.com/tenseleyFlow/shithub/internal/repos/sqlc" |
| 16 | - "github.com/tenseleyFlow/shithub/internal/worker" | ||
| 17 | ) | 11 | ) |
| 18 | 12 | ||
| 19 | -const sourceRemoteFetchTimeout = 45 * time.Second | ||
| 20 | - | ||
| 21 | func (h *Handlers) saveRepoSourceRemote(ctx context.Context, repoID int64, rawURL string) (string, error) { | 13 | func (h *Handlers) saveRepoSourceRemote(ctx context.Context, repoID int64, rawURL string) (string, error) { |
| 22 | - remoteURL, err := repos.ValidateSourceRemoteURL(ctx, rawURL) | 14 | + return repos.SaveSourceRemote(ctx, h.sourceRemoteDeps(""), repoID, rawURL) |
| 23 | - if err != nil || remoteURL == "" { | ||
| 24 | - return remoteURL, err | ||
| 25 | - } | ||
| 26 | - _, err = h.rq.UpsertRepoSourceRemote(ctx, h.d.Pool, reposdb.UpsertRepoSourceRemoteParams{ | ||
| 27 | - RepoID: repoID, | ||
| 28 | - RemoteUrl: remoteURL, | ||
| 29 | - }) | ||
| 30 | - return remoteURL, err | ||
| 31 | } | 15 | } |
| 32 | 16 | ||
| 33 | func (h *Handlers) fetchRepoSourceRemote(ctx context.Context, row reposdb.Repo, ownerSlug, remoteURL string) error { | 17 | func (h *Handlers) fetchRepoSourceRemote(ctx context.Context, row reposdb.Repo, ownerSlug, remoteURL string) error { |
| 34 | - remoteURL, err := repos.ValidateSourceRemoteURL(ctx, remoteURL) | 18 | + return repos.FetchSourceRemote(ctx, h.sourceRemoteDeps(""), row, ownerSlug, remoteURL) |
| 35 | - if err != nil { | ||
| 36 | - h.markRepoSourceRemoteFetchError(ctx, row.ID, err) | ||
| 37 | - return err | ||
| 38 | - } | ||
| 39 | - gitDir, err := h.d.RepoFS.RepoPath(ownerSlug, row.Name) | ||
| 40 | - if err != nil { | ||
| 41 | - h.markRepoSourceRemoteFetchError(ctx, row.ID, err) | ||
| 42 | - return err | ||
| 43 | - } | ||
| 44 | - fetchCtx, cancel := context.WithTimeout(ctx, sourceRemoteFetchTimeout) | ||
| 45 | - defer cancel() | ||
| 46 | - if err := repogit.FetchRemoteHeadsAndTags(fetchCtx, gitDir, remoteURL); err != nil { | ||
| 47 | - h.markRepoSourceRemoteFetchError(ctx, row.ID, err) | ||
| 48 | - return err | ||
| 49 | - } | ||
| 50 | - if err := h.refreshFetchedRepoState(ctx, row, gitDir); err != nil { | ||
| 51 | - h.markRepoSourceRemoteFetchError(ctx, row.ID, err) | ||
| 52 | - return err | ||
| 53 | - } | ||
| 54 | - if err := h.rq.MarkRepoSourceRemoteFetched(ctx, h.d.Pool, row.ID); err != nil && h.d.Logger != nil { | ||
| 55 | - h.d.Logger.WarnContext(ctx, "source-remote: mark fetched", "error", err, "repo_id", row.ID) | ||
| 56 | - } | ||
| 57 | - return nil | ||
| 58 | } | 19 | } |
| 59 | 20 | ||
| 60 | func (h *Handlers) refreshFetchedRepoState(ctx context.Context, row reposdb.Repo, gitDir string) error { | 21 | func (h *Handlers) refreshFetchedRepoState(ctx context.Context, row reposdb.Repo, gitDir string) error { |
| 61 | - refs, err := repogit.ListRefs(ctx, gitDir) | 22 | + return repos.RefreshFetchedRepoState(ctx, h.sourceRemoteDeps(""), row, gitDir) |
| 62 | - if err != nil { | ||
| 63 | - return err | ||
| 64 | - } | ||
| 65 | - branch, oid := chooseFetchedDefaultBranch(row.DefaultBranch, refs.Branches) | ||
| 66 | - if branch == "" { | ||
| 67 | - return nil | ||
| 68 | - } | ||
| 69 | - if branch != row.DefaultBranch { | ||
| 70 | - if err := h.rq.UpdateRepoDefaultBranch(ctx, h.d.Pool, reposdb.UpdateRepoDefaultBranchParams{ | ||
| 71 | - ID: row.ID, | ||
| 72 | - DefaultBranch: branch, | ||
| 73 | - }); err != nil { | ||
| 74 | - return err | ||
| 75 | - } | ||
| 76 | - if err := repogit.SetSymbolicRef(ctx, gitDir, "HEAD", "refs/heads/"+branch); err != nil && h.d.Logger != nil { | ||
| 77 | - h.d.Logger.WarnContext(ctx, "source-remote: set symbolic head", "error", err, "repo_id", row.ID, "branch", branch) | ||
| 78 | - } | ||
| 79 | - } | ||
| 80 | - if !row.DefaultBranchOid.Valid || row.DefaultBranchOid.String != oid { | ||
| 81 | - if err := h.rq.UpdateRepoDefaultBranchOID(ctx, h.d.Pool, reposdb.UpdateRepoDefaultBranchOIDParams{ | ||
| 82 | - ID: row.ID, | ||
| 83 | - DefaultBranchOid: pgtype.Text{String: oid, Valid: true}, | ||
| 84 | - }); err != nil { | ||
| 85 | - return err | ||
| 86 | - } | ||
| 87 | - if _, err := worker.Enqueue(ctx, h.d.Pool, worker.KindRepoIndexCode, map[string]any{"repo_id": row.ID}, worker.EnqueueOptions{}); err != nil && h.d.Logger != nil { | ||
| 88 | - h.d.Logger.WarnContext(ctx, "source-remote: enqueue index", "error", err, "repo_id", row.ID) | ||
| 89 | - } | ||
| 90 | - } | ||
| 91 | - if _, err := worker.Enqueue(ctx, h.d.Pool, worker.KindRepoSizeRecalc, map[string]any{"repo_id": row.ID}, worker.EnqueueOptions{}); err != nil && h.d.Logger != nil { | ||
| 92 | - h.d.Logger.WarnContext(ctx, "source-remote: enqueue size", "error", err, "repo_id", row.ID) | ||
| 93 | - } | ||
| 94 | - _ = worker.Notify(ctx, h.d.Pool) | ||
| 95 | - return nil | ||
| 96 | } | 23 | } |
| 97 | 24 | ||
| 98 | func chooseFetchedDefaultBranch(current string, branches []repogit.RefEntry) (name, oid string) { | 25 | func chooseFetchedDefaultBranch(current string, branches []repogit.RefEntry) (name, oid string) { |
| 99 | - if len(branches) == 0 { | 26 | + return repos.ChooseFetchedDefaultBranch(current, branches) |
| 100 | - return "", "" | ||
| 101 | - } | ||
| 102 | - for _, candidate := range []string{current, "trunk", "main", "master"} { | ||
| 103 | - if candidate == "" { | ||
| 104 | - continue | ||
| 105 | - } | ||
| 106 | - for _, branch := range branches { | ||
| 107 | - if branch.Name == candidate { | ||
| 108 | - return branch.Name, branch.OID | ||
| 109 | - } | ||
| 110 | - } | ||
| 111 | - } | ||
| 112 | - return branches[0].Name, branches[0].OID | ||
| 113 | } | 27 | } |
| 114 | 28 | ||
| 115 | func (h *Handlers) markRepoSourceRemoteFetchError(ctx context.Context, repoID int64, err error) { | 29 | func (h *Handlers) markRepoSourceRemoteFetchError(ctx context.Context, repoID int64, err error) { |
| 116 | - if err == nil { | 30 | + repos.MarkSourceRemoteFetchError(ctx, h.sourceRemoteDeps(""), repoID, err) |
| 117 | - return | ||
| 118 | - } | ||
| 119 | - msg := strings.TrimSpace(err.Error()) | ||
| 120 | - if len(msg) > 500 { | ||
| 121 | - msg = msg[:500] | ||
| 122 | - } | ||
| 123 | - if markErr := h.rq.MarkRepoSourceRemoteFetchError(ctx, h.d.Pool, reposdb.MarkRepoSourceRemoteFetchErrorParams{ | ||
| 124 | - RepoID: repoID, | ||
| 125 | - LastError: pgtype.Text{String: msg, Valid: true}, | ||
| 126 | - }); markErr != nil && h.d.Logger != nil { | ||
| 127 | - h.d.Logger.WarnContext(ctx, "source-remote: mark fetch error", "error", markErr, "cause", err, "repo_id", repoID) | ||
| 128 | - } | ||
| 129 | } | 31 | } |
| 130 | 32 | ||
| 131 | func isInvalidSourceRemote(err error) bool { | 33 | func isInvalidSourceRemote(err error) bool { |
| 132 | - return errors.Is(err, repos.ErrInvalidSourceRemote) | 34 | + return repos.IsInvalidSourceRemote(err) |
| 35 | +} | ||
| 36 | + | ||
| 37 | +func (h *Handlers) sourceRemoteDeps(token string) repos.SourceRemoteDeps { | ||
| 38 | + return repos.SourceRemoteDeps{ | ||
| 39 | + Pool: h.d.Pool, | ||
| 40 | + RepoFS: h.d.RepoFS, | ||
| 41 | + Logger: h.d.Logger, | ||
| 42 | + FetchToken: token, | ||
| 43 | + } | ||
| 133 | } | 44 | } |
internal/webhook/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/worker/jobs/org_github_import.goadded@@ -0,0 +1,347 @@ | |||
| 1 | +// SPDX-License-Identifier: AGPL-3.0-or-later | ||
| 2 | + | ||
| 3 | +package jobs | ||
| 4 | + | ||
| 5 | +import ( | ||
| 6 | + "context" | ||
| 7 | + "encoding/json" | ||
| 8 | + "errors" | ||
| 9 | + "fmt" | ||
| 10 | + "log/slog" | ||
| 11 | + "strings" | ||
| 12 | + | ||
| 13 | + "github.com/jackc/pgx/v5" | ||
| 14 | + "github.com/jackc/pgx/v5/pgtype" | ||
| 15 | + "github.com/jackc/pgx/v5/pgxpool" | ||
| 16 | + | ||
| 17 | + "github.com/tenseleyFlow/shithub/internal/auth/audit" | ||
| 18 | + "github.com/tenseleyFlow/shithub/internal/auth/secretbox" | ||
| 19 | + "github.com/tenseleyFlow/shithub/internal/auth/throttle" | ||
| 20 | + "github.com/tenseleyFlow/shithub/internal/infra/storage" | ||
| 21 | + "github.com/tenseleyFlow/shithub/internal/orgs" | ||
| 22 | + orgsdb "github.com/tenseleyFlow/shithub/internal/orgs/sqlc" | ||
| 23 | + "github.com/tenseleyFlow/shithub/internal/repos" | ||
| 24 | + reposdb "github.com/tenseleyFlow/shithub/internal/repos/sqlc" | ||
| 25 | + "github.com/tenseleyFlow/shithub/internal/worker" | ||
| 26 | +) | ||
| 27 | + | ||
| 28 | +type OrgGitHubImportDeps struct { | ||
| 29 | + Pool *pgxpool.Pool | ||
| 30 | + RepoFS *storage.RepoFS | ||
| 31 | + Box *secretbox.Box | ||
| 32 | + Audit *audit.Recorder | ||
| 33 | + Limiter *throttle.Limiter | ||
| 34 | + Logger *slog.Logger | ||
| 35 | + ShithubdPath string | ||
| 36 | + GitHubClient orgs.GitHubClient | ||
| 37 | +} | ||
| 38 | + | ||
| 39 | +type OrgGitHubImportDiscoverPayload struct { | ||
| 40 | + ImportID int64 `json:"import_id"` | ||
| 41 | +} | ||
| 42 | + | ||
| 43 | +type OrgGitHubImportRepoPayload struct { | ||
| 44 | + ImportRepoID int64 `json:"import_repo_id"` | ||
| 45 | +} | ||
| 46 | + | ||
| 47 | +func OrgGitHubImportDiscover(deps OrgGitHubImportDeps) worker.Handler { | ||
| 48 | + return func(ctx context.Context, raw json.RawMessage) error { | ||
| 49 | + var p OrgGitHubImportDiscoverPayload | ||
| 50 | + if err := json.Unmarshal(raw, &p); err != nil { | ||
| 51 | + return worker.PoisonError(fmt.Errorf("bad payload: %w", err)) | ||
| 52 | + } | ||
| 53 | + if p.ImportID == 0 { | ||
| 54 | + return worker.PoisonError(errors.New("missing import_id")) | ||
| 55 | + } | ||
| 56 | + | ||
| 57 | + q := orgsdb.New() | ||
| 58 | + imp, err := q.GetOrgGithubImport(ctx, deps.Pool, p.ImportID) | ||
| 59 | + if err != nil { | ||
| 60 | + if errors.Is(err, pgx.ErrNoRows) { | ||
| 61 | + return worker.PoisonError(fmt.Errorf("import %d not found", p.ImportID)) | ||
| 62 | + } | ||
| 63 | + return err | ||
| 64 | + } | ||
| 65 | + if orgs.IsTerminalImportStatus(imp.Status) { | ||
| 66 | + return nil | ||
| 67 | + } | ||
| 68 | + token, err := orgs.DecryptGitHubImportToken(imp, deps.Box) | ||
| 69 | + if err != nil { | ||
| 70 | + _ = markImportFailed(ctx, deps, imp.ID, err) | ||
| 71 | + return worker.PoisonError(err) | ||
| 72 | + } | ||
| 73 | + if err := q.MarkOrgGithubImportDiscovering(ctx, deps.Pool, imp.ID); err != nil { | ||
| 74 | + return err | ||
| 75 | + } | ||
| 76 | + ghRepos, err := deps.GitHubClient.ListOrgRepos(ctx, imp.SourceOrg, token) | ||
| 77 | + if err != nil { | ||
| 78 | + _ = markImportFailed(ctx, deps, imp.ID, err) | ||
| 79 | + return nil | ||
| 80 | + } | ||
| 81 | + | ||
| 82 | + tx, err := deps.Pool.Begin(ctx) | ||
| 83 | + if err != nil { | ||
| 84 | + return err | ||
| 85 | + } | ||
| 86 | + committed := false | ||
| 87 | + defer func() { | ||
| 88 | + if !committed { | ||
| 89 | + _ = tx.Rollback(ctx) | ||
| 90 | + } | ||
| 91 | + }() | ||
| 92 | + | ||
| 93 | + for _, gh := range ghRepos { | ||
| 94 | + targetName := repos.NormalizeName(gh.Name) | ||
| 95 | + visibility := orgsdb.RepoVisibilityPublic | ||
| 96 | + if gh.Private { | ||
| 97 | + visibility = orgsdb.RepoVisibilityPrivate | ||
| 98 | + } | ||
| 99 | + row, err := q.InsertOrgGithubImportRepo(ctx, tx, orgsdb.InsertOrgGithubImportRepoParams{ | ||
| 100 | + ImportID: imp.ID, | ||
| 101 | + GithubID: pgtype.Int8{Int64: gh.ID, Valid: gh.ID != 0}, | ||
| 102 | + SourceFullName: fallbackFullName(imp.SourceOrg, gh), | ||
| 103 | + SourceName: strings.TrimSpace(gh.Name), | ||
| 104 | + TargetName: targetName, | ||
| 105 | + CloneUrl: strings.TrimSpace(gh.CloneURL), | ||
| 106 | + Description: truncateRunes(gh.Description, repos.MaxDescriptionLen), | ||
| 107 | + DefaultBranch: strings.TrimSpace(gh.DefaultBranch), | ||
| 108 | + TargetVisibility: visibility, | ||
| 109 | + IsPrivate: gh.Private, | ||
| 110 | + IsFork: gh.Fork, | ||
| 111 | + }) | ||
| 112 | + if err != nil { | ||
| 113 | + return err | ||
| 114 | + } | ||
| 115 | + if _, err := worker.Enqueue(ctx, tx, worker.KindOrgGitHubImportRepo, OrgGitHubImportRepoPayload{ | ||
| 116 | + ImportRepoID: row.ID, | ||
| 117 | + }, worker.EnqueueOptions{}); err != nil { | ||
| 118 | + return err | ||
| 119 | + } | ||
| 120 | + } | ||
| 121 | + if len(ghRepos) == 0 { | ||
| 122 | + if err := q.MarkOrgGithubImportCompleted(ctx, tx, imp.ID); err != nil { | ||
| 123 | + return err | ||
| 124 | + } | ||
| 125 | + } else if err := q.MarkOrgGithubImportImporting(ctx, tx, orgsdb.MarkOrgGithubImportImportingParams{ | ||
| 126 | + ID: imp.ID, | ||
| 127 | + TotalCount: int32(len(ghRepos)), | ||
| 128 | + }); err != nil { | ||
| 129 | + return err | ||
| 130 | + } | ||
| 131 | + if err := worker.Notify(ctx, tx); err != nil && deps.Logger != nil { | ||
| 132 | + deps.Logger.WarnContext(ctx, "github import: notify children", "error", err, "import_id", imp.ID) | ||
| 133 | + } | ||
| 134 | + if err := tx.Commit(ctx); err != nil { | ||
| 135 | + return err | ||
| 136 | + } | ||
| 137 | + committed = true | ||
| 138 | + return nil | ||
| 139 | + } | ||
| 140 | +} | ||
| 141 | + | ||
| 142 | +func OrgGitHubImportRepo(deps OrgGitHubImportDeps) worker.Handler { | ||
| 143 | + return func(ctx context.Context, raw json.RawMessage) error { | ||
| 144 | + var p OrgGitHubImportRepoPayload | ||
| 145 | + if err := json.Unmarshal(raw, &p); err != nil { | ||
| 146 | + return worker.PoisonError(fmt.Errorf("bad payload: %w", err)) | ||
| 147 | + } | ||
| 148 | + if p.ImportRepoID == 0 { | ||
| 149 | + return worker.PoisonError(errors.New("missing import_repo_id")) | ||
| 150 | + } | ||
| 151 | + | ||
| 152 | + q := orgsdb.New() | ||
| 153 | + rq := reposdb.New() | ||
| 154 | + item, err := q.GetOrgGithubImportRepo(ctx, deps.Pool, p.ImportRepoID) | ||
| 155 | + if err != nil { | ||
| 156 | + if errors.Is(err, pgx.ErrNoRows) { | ||
| 157 | + return worker.PoisonError(fmt.Errorf("import repo %d not found", p.ImportRepoID)) | ||
| 158 | + } | ||
| 159 | + return err | ||
| 160 | + } | ||
| 161 | + if orgs.IsTerminalImportRepoStatus(item.Status) { | ||
| 162 | + return nil | ||
| 163 | + } | ||
| 164 | + imp, err := q.GetOrgGithubImport(ctx, deps.Pool, item.ImportID) | ||
| 165 | + if err != nil { | ||
| 166 | + return err | ||
| 167 | + } | ||
| 168 | + if orgs.IsTerminalImportStatus(imp.Status) { | ||
| 169 | + return nil | ||
| 170 | + } | ||
| 171 | + org, err := q.GetOrgByID(ctx, deps.Pool, imp.OrgID) | ||
| 172 | + if err != nil { | ||
| 173 | + return err | ||
| 174 | + } | ||
| 175 | + if org.DeletedAt.Valid { | ||
| 176 | + if err := markImportRepoFailed(ctx, q, deps.Pool, item.ID, 0, "Organization was deleted during import."); err != nil { | ||
| 177 | + return err | ||
| 178 | + } | ||
| 179 | + return completeImportIfDone(ctx, deps, imp.ID) | ||
| 180 | + } | ||
| 181 | + if err := q.MarkOrgGithubImportRepoImporting(ctx, deps.Pool, item.ID); err != nil { | ||
| 182 | + return err | ||
| 183 | + } | ||
| 184 | + | ||
| 185 | + if err := repos.ValidateName(item.TargetName); err != nil { | ||
| 186 | + if err := markImportRepoFailed(ctx, q, deps.Pool, item.ID, 0, friendlyRepoImportError(err)); err != nil { | ||
| 187 | + return err | ||
| 188 | + } | ||
| 189 | + return completeImportIfDone(ctx, deps, imp.ID) | ||
| 190 | + } | ||
| 191 | + exists, err := rq.ExistsRepoForOwnerOrg(ctx, deps.Pool, reposdb.ExistsRepoForOwnerOrgParams{ | ||
| 192 | + OwnerOrgID: pgtype.Int8{Int64: org.ID, Valid: true}, | ||
| 193 | + Name: item.TargetName, | ||
| 194 | + }) | ||
| 195 | + if err != nil { | ||
| 196 | + return err | ||
| 197 | + } | ||
| 198 | + if exists { | ||
| 199 | + if err := q.MarkOrgGithubImportRepoSkipped(ctx, deps.Pool, orgsdb.MarkOrgGithubImportRepoSkippedParams{ | ||
| 200 | + ID: item.ID, | ||
| 201 | + LastError: pgtype.Text{String: "Repository already exists in this organization.", Valid: true}, | ||
| 202 | + }); err != nil { | ||
| 203 | + return err | ||
| 204 | + } | ||
| 205 | + return completeImportIfDone(ctx, deps, imp.ID) | ||
| 206 | + } | ||
| 207 | + | ||
| 208 | + token, err := orgs.DecryptGitHubImportToken(imp, deps.Box) | ||
| 209 | + if err != nil { | ||
| 210 | + if err := markImportRepoFailed(ctx, q, deps.Pool, item.ID, 0, err.Error()); err != nil { | ||
| 211 | + return err | ||
| 212 | + } | ||
| 213 | + return completeImportIfDone(ctx, deps, imp.ID) | ||
| 214 | + } | ||
| 215 | + if item.IsPrivate && token == "" { | ||
| 216 | + if err := markImportRepoFailed(ctx, q, deps.Pool, item.ID, 0, "GitHub token unavailable for private repository."); err != nil { | ||
| 217 | + return err | ||
| 218 | + } | ||
| 219 | + return completeImportIfDone(ctx, deps, imp.ID) | ||
| 220 | + } | ||
| 221 | + | ||
| 222 | + result, err := repos.Create(ctx, repos.Deps{ | ||
| 223 | + Pool: deps.Pool, | ||
| 224 | + RepoFS: deps.RepoFS, | ||
| 225 | + Audit: deps.Audit, | ||
| 226 | + Limiter: deps.Limiter, | ||
| 227 | + Logger: deps.Logger, | ||
| 228 | + ShithubdPath: deps.ShithubdPath, | ||
| 229 | + }, repos.Params{ | ||
| 230 | + OwnerOrgID: org.ID, | ||
| 231 | + OwnerSlug: string(org.Slug), | ||
| 232 | + ActorUserID: int64Value(imp.RequestedByUserID), | ||
| 233 | + BypassCreateRateLimit: true, | ||
| 234 | + Name: item.TargetName, | ||
| 235 | + Description: item.Description, | ||
| 236 | + Visibility: string(item.TargetVisibility), | ||
| 237 | + }) | ||
| 238 | + if err != nil { | ||
| 239 | + if errors.Is(err, repos.ErrTaken) { | ||
| 240 | + if err := q.MarkOrgGithubImportRepoSkipped(ctx, deps.Pool, orgsdb.MarkOrgGithubImportRepoSkippedParams{ | ||
| 241 | + ID: item.ID, | ||
| 242 | + LastError: pgtype.Text{String: "Repository already exists in this organization.", Valid: true}, | ||
| 243 | + }); err != nil { | ||
| 244 | + return err | ||
| 245 | + } | ||
| 246 | + return completeImportIfDone(ctx, deps, imp.ID) | ||
| 247 | + } | ||
| 248 | + if err := markImportRepoFailed(ctx, q, deps.Pool, item.ID, 0, friendlyRepoImportError(err)); err != nil { | ||
| 249 | + return err | ||
| 250 | + } | ||
| 251 | + return completeImportIfDone(ctx, deps, imp.ID) | ||
| 252 | + } | ||
| 253 | + | ||
| 254 | + remoteURL, err := repos.SaveSourceRemote(ctx, sourceRemoteDeps(deps, token), result.Repo.ID, item.CloneUrl) | ||
| 255 | + if err != nil { | ||
| 256 | + if err := markImportRepoFailed(ctx, q, deps.Pool, item.ID, result.Repo.ID, friendlyRepoImportError(err)); err != nil { | ||
| 257 | + return err | ||
| 258 | + } | ||
| 259 | + return completeImportIfDone(ctx, deps, imp.ID) | ||
| 260 | + } | ||
| 261 | + if err := repos.FetchSourceRemote(ctx, sourceRemoteDeps(deps, token), result.Repo, string(org.Slug), remoteURL); err != nil { | ||
| 262 | + if err := markImportRepoFailed(ctx, q, deps.Pool, item.ID, result.Repo.ID, friendlyRepoImportError(err)); err != nil { | ||
| 263 | + return err | ||
| 264 | + } | ||
| 265 | + return completeImportIfDone(ctx, deps, imp.ID) | ||
| 266 | + } | ||
| 267 | + if err := q.MarkOrgGithubImportRepoImported(ctx, deps.Pool, orgsdb.MarkOrgGithubImportRepoImportedParams{ | ||
| 268 | + ID: item.ID, | ||
| 269 | + RepoID: pgtype.Int8{Int64: result.Repo.ID, Valid: true}, | ||
| 270 | + }); err != nil { | ||
| 271 | + return err | ||
| 272 | + } | ||
| 273 | + return completeImportIfDone(ctx, deps, imp.ID) | ||
| 274 | + } | ||
| 275 | +} | ||
| 276 | + | ||
| 277 | +func sourceRemoteDeps(deps OrgGitHubImportDeps, token string) repos.SourceRemoteDeps { | ||
| 278 | + return repos.SourceRemoteDeps{ | ||
| 279 | + Pool: deps.Pool, | ||
| 280 | + RepoFS: deps.RepoFS, | ||
| 281 | + Logger: deps.Logger, | ||
| 282 | + FetchToken: token, | ||
| 283 | + } | ||
| 284 | +} | ||
| 285 | + | ||
| 286 | +func markImportFailed(ctx context.Context, deps OrgGitHubImportDeps, importID int64, err error) error { | ||
| 287 | + msg := friendlyRepoImportError(err) | ||
| 288 | + return orgsdb.New().MarkOrgGithubImportFailed(ctx, deps.Pool, orgsdb.MarkOrgGithubImportFailedParams{ | ||
| 289 | + ID: importID, | ||
| 290 | + LastError: pgtype.Text{String: msg, Valid: true}, | ||
| 291 | + }) | ||
| 292 | +} | ||
| 293 | + | ||
| 294 | +func markImportRepoFailed(ctx context.Context, q *orgsdb.Queries, db orgsdb.DBTX, itemID, repoID int64, msg string) error { | ||
| 295 | + if strings.TrimSpace(msg) == "" { | ||
| 296 | + msg = "Import failed." | ||
| 297 | + } | ||
| 298 | + return q.MarkOrgGithubImportRepoFailed(ctx, db, orgsdb.MarkOrgGithubImportRepoFailedParams{ | ||
| 299 | + ID: itemID, | ||
| 300 | + LastError: pgtype.Text{String: truncateRunes(msg, 500), Valid: true}, | ||
| 301 | + RepoID: pgtype.Int8{Int64: repoID, Valid: repoID != 0}, | ||
| 302 | + }) | ||
| 303 | +} | ||
| 304 | + | ||
| 305 | +func completeImportIfDone(ctx context.Context, deps OrgGitHubImportDeps, importID int64) error { | ||
| 306 | + _, err := orgsdb.New().MarkOrgGithubImportCompletedIfDone(ctx, deps.Pool, importID) | ||
| 307 | + if errors.Is(err, pgx.ErrNoRows) { | ||
| 308 | + return nil | ||
| 309 | + } | ||
| 310 | + return err | ||
| 311 | +} | ||
| 312 | + | ||
| 313 | +func fallbackFullName(sourceOrg string, repo orgs.GitHubRepo) string { | ||
| 314 | + if strings.TrimSpace(repo.FullName) != "" { | ||
| 315 | + return strings.TrimSpace(repo.FullName) | ||
| 316 | + } | ||
| 317 | + return sourceOrg + "/" + strings.TrimSpace(repo.Name) | ||
| 318 | +} | ||
| 319 | + | ||
| 320 | +func truncateRunes(s string, max int) string { | ||
| 321 | + if max <= 0 { | ||
| 322 | + return "" | ||
| 323 | + } | ||
| 324 | + runes := []rune(strings.TrimSpace(s)) | ||
| 325 | + if len(runes) <= max { | ||
| 326 | + return string(runes) | ||
| 327 | + } | ||
| 328 | + return string(runes[:max]) | ||
| 329 | +} | ||
| 330 | + | ||
| 331 | +func friendlyRepoImportError(err error) string { | ||
| 332 | + if err == nil { | ||
| 333 | + return "" | ||
| 334 | + } | ||
| 335 | + msg := strings.TrimSpace(err.Error()) | ||
| 336 | + if msg == "" { | ||
| 337 | + return "Import failed." | ||
| 338 | + } | ||
| 339 | + return truncateRunes(msg, 500) | ||
| 340 | +} | ||
| 341 | + | ||
| 342 | +func int64Value(v pgtype.Int8) int64 { | ||
| 343 | + if !v.Valid { | ||
| 344 | + return 0 | ||
| 345 | + } | ||
| 346 | + return v.Int64 | ||
| 347 | +} | ||
internal/worker/jobs/repo_index_code.gomodified@@ -18,7 +18,6 @@ import ( | |||
| 18 | "github.com/tenseleyFlow/shithub/internal/infra/storage" | 18 | "github.com/tenseleyFlow/shithub/internal/infra/storage" |
| 19 | repogit "github.com/tenseleyFlow/shithub/internal/repos/git" | 19 | repogit "github.com/tenseleyFlow/shithub/internal/repos/git" |
| 20 | reposdb "github.com/tenseleyFlow/shithub/internal/repos/sqlc" | 20 | reposdb "github.com/tenseleyFlow/shithub/internal/repos/sqlc" |
| 21 | - usersdb "github.com/tenseleyFlow/shithub/internal/users/sqlc" | ||
| 22 | "github.com/tenseleyFlow/shithub/internal/worker" | 21 | "github.com/tenseleyFlow/shithub/internal/worker" |
| 23 | ) | 22 | ) |
| 24 | 23 | ||
@@ -62,7 +61,6 @@ func RepoIndexCode(deps IndexCodeDeps) worker.Handler { | |||
| 62 | } | 61 | } |
| 63 | 62 | ||
| 64 | rq := reposdb.New() | 63 | rq := reposdb.New() |
| 65 | - uq := usersdb.New() | ||
| 66 | repo, err := rq.GetRepoByID(ctx, deps.Pool, p.RepoID) | 64 | repo, err := rq.GetRepoByID(ctx, deps.Pool, p.RepoID) |
| 67 | if err != nil { | 65 | if err != nil { |
| 68 | if errors.Is(err, pgx.ErrNoRows) { | 66 | if errors.Is(err, pgx.ErrNoRows) { |
@@ -74,14 +72,15 @@ func RepoIndexCode(deps IndexCodeDeps) worker.Handler { | |||
| 74 | // Repo went away between enqueue and now. Nothing to do. | 72 | // Repo went away between enqueue and now. Nothing to do. |
| 75 | return nil | 73 | return nil |
| 76 | } | 74 | } |
| 77 | - if !repo.OwnerUserID.Valid { | 75 | + owner, err := rq.GetRepoOwnerUsernameByID(ctx, deps.Pool, repo.ID) |
| 78 | - return worker.PoisonError(fmt.Errorf("repo %d has no user owner (org-owned arrives in S31)", repo.ID)) | 76 | + if err != nil { |
| 77 | + return err | ||
| 79 | } | 78 | } |
| 80 | - owner, err := uq.GetUserByID(ctx, deps.Pool, repo.OwnerUserID.Int64) | 79 | + ownerSlug, err := ownerSlugString(owner.OwnerUsername) |
| 81 | if err != nil { | 80 | if err != nil { |
| 82 | return err | 81 | return err |
| 83 | } | 82 | } |
| 84 | - gitDir, err := deps.RepoFS.RepoPath(owner.Username, repo.Name) | 83 | + gitDir, err := deps.RepoFS.RepoPath(ownerSlug, repo.Name) |
| 85 | if err != nil { | 84 | if err != nil { |
| 86 | return err | 85 | return err |
| 87 | } | 86 | } |
internal/worker/jobs/repo_owner_slug.goadded@@ -0,0 +1,16 @@ | |||
| 1 | +// SPDX-License-Identifier: AGPL-3.0-or-later | ||
| 2 | + | ||
| 3 | +package jobs | ||
| 4 | + | ||
| 5 | +import "fmt" | ||
| 6 | + | ||
| 7 | +func ownerSlugString(v any) (string, error) { | ||
| 8 | + switch s := v.(type) { | ||
| 9 | + case string: | ||
| 10 | + return s, nil | ||
| 11 | + case []byte: | ||
| 12 | + return string(s), nil | ||
| 13 | + default: | ||
| 14 | + return "", fmt.Errorf("unexpected owner slug type %T", v) | ||
| 15 | + } | ||
| 16 | +} | ||
internal/worker/jobs/repo_size_recalc.gomodified@@ -58,7 +58,11 @@ func RepoSizeRecalc(deps RepoSizeRecalcDeps) worker.Handler { | |||
| 58 | return fmt.Errorf("load repo: %w", err) | 58 | return fmt.Errorf("load repo: %w", err) |
| 59 | } | 59 | } |
| 60 | 60 | ||
| 61 | - gitDir, err := deps.RepoFS.RepoPath(ownerRow.OwnerUsername, ownerRow.RepoName) | 61 | + ownerSlug, err := ownerSlugString(ownerRow.OwnerUsername) |
| 62 | + if err != nil { | ||
| 63 | + return worker.PoisonError(fmt.Errorf("repo owner slug: %w", err)) | ||
| 64 | + } | ||
| 65 | + gitDir, err := deps.RepoFS.RepoPath(ownerSlug, ownerRow.RepoName) | ||
| 62 | if err != nil { | 66 | if err != nil { |
| 63 | return worker.PoisonError(fmt.Errorf("repo path: %w", err)) | 67 | return worker.PoisonError(fmt.Errorf("repo path: %w", err)) |
| 64 | } | 68 | } |
internal/worker/sqlc/models.gomodified@@ -1870,6 +1870,47 @@ type Org struct { | |||
| 1870 | UpdatedAt pgtype.Timestamptz | 1870 | UpdatedAt pgtype.Timestamptz |
| 1871 | } | 1871 | } |
| 1872 | 1872 | ||
| 1873 | +type OrgGithubImport struct { | ||
| 1874 | + ID int64 | ||
| 1875 | + OrgID int64 | ||
| 1876 | + SourceHost string | ||
| 1877 | + SourceOrg string | ||
| 1878 | + RequestedByUserID pgtype.Int8 | ||
| 1879 | + Status string | ||
| 1880 | + IncludePrivate bool | ||
| 1881 | + TokenPresent bool | ||
| 1882 | + TokenCiphertext []byte | ||
| 1883 | + TokenNonce []byte | ||
| 1884 | + TotalCount int32 | ||
| 1885 | + LastError pgtype.Text | ||
| 1886 | + StartedAt pgtype.Timestamptz | ||
| 1887 | + CompletedAt pgtype.Timestamptz | ||
| 1888 | + CreatedAt pgtype.Timestamptz | ||
| 1889 | + UpdatedAt pgtype.Timestamptz | ||
| 1890 | +} | ||
| 1891 | + | ||
| 1892 | +type OrgGithubImportRepo struct { | ||
| 1893 | + ID int64 | ||
| 1894 | + ImportID int64 | ||
| 1895 | + GithubID pgtype.Int8 | ||
| 1896 | + SourceFullName string | ||
| 1897 | + SourceName string | ||
| 1898 | + TargetName string | ||
| 1899 | + CloneUrl string | ||
| 1900 | + Description string | ||
| 1901 | + DefaultBranch string | ||
| 1902 | + TargetVisibility RepoVisibility | ||
| 1903 | + IsPrivate bool | ||
| 1904 | + IsFork bool | ||
| 1905 | + Status string | ||
| 1906 | + RepoID pgtype.Int8 | ||
| 1907 | + LastError pgtype.Text | ||
| 1908 | + StartedAt pgtype.Timestamptz | ||
| 1909 | + CompletedAt pgtype.Timestamptz | ||
| 1910 | + CreatedAt pgtype.Timestamptz | ||
| 1911 | + UpdatedAt pgtype.Timestamptz | ||
| 1912 | +} | ||
| 1913 | + | ||
| 1873 | type OrgInvitation struct { | 1914 | type OrgInvitation struct { |
| 1874 | ID int64 | 1915 | ID int64 |
| 1875 | OrgID int64 | 1916 | OrgID int64 |
internal/worker/types.gomodified@@ -79,6 +79,14 @@ const ( | |||
| 79 | KindTrendingCompute Kind = "trending:compute" | 79 | KindTrendingCompute Kind = "trending:compute" |
| 80 | ) | 80 | ) |
| 81 | 81 | ||
| 82 | +// Organization import kinds. The discovery job lists GitHub repositories | ||
| 83 | +// and fans out one child job per repository so large organizations can | ||
| 84 | +// progress incrementally. | ||
| 85 | +const ( | ||
| 86 | + KindOrgGitHubImportDiscover Kind = "org:github_import_discover" | ||
| 87 | + KindOrgGitHubImportRepo Kind = "org:github_import_repo" | ||
| 88 | +) | ||
| 89 | + | ||
| 82 | // NotifyChannel is the Postgres LISTEN/NOTIFY channel the pool subscribes | 90 | // NotifyChannel is the Postgres LISTEN/NOTIFY channel the pool subscribes |
| 83 | // to so it wakes up immediately when a job is enqueued, instead of | 91 | // to so it wakes up immediately when a job is enqueued, instead of |
| 84 | // polling. Callers wrapping enqueue in a tx must NOTIFY inside the | 92 | // polling. Callers wrapping enqueue in a tx must NOTIFY inside the |