Lindenii Project Forge
Replace FileContributor with FileCopyrightText
# SPDX-License-Identifier: AGPL-3.0-only
# SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
# SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
root = true [*] end_of_line = lf insert_final_newline = true indent_style = tab indent_size = 8 tab_size = 8 [*.py] indent_style = space indent_size = 4 [*.yaml] indent_style = space indent_size = 2
# SPDX-License-Identifier: AGPL-3.0-only
# SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
# SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
.PHONY: clean version.go man source.tar.gz CFLAGS = -Wall -Wextra -Werror -pedantic -std=c99 -D_GNU_SOURCE MAN_PAGES = forge.5 hookc.1 forge: source.tar.gz version.go hookc/*.c hookc/hookc man # TODO go build . man: $(MAN_PAGES:%=man/%.html) $(MAN_PAGES:%=man/%.txt) man/%.html: man/% mandoc -Thtml -O style=./mandoc.css $< > $@ man/%.txt: man/% utils/colb mandoc $< | ./utils/colb > $@ utils/colb: utils/colb.c hookc/hookc: version.go: printf 'package main\n\nconst VERSION = "%s"\n' `git describe --tags --always --dirty` > $@ clean: $(RM) forge version.go vendor source.tar.gz: rm -f source.tar.gz go mod vendor git ls-files -z | xargs -0 tar -czf source.tar.gz vendor
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "context" "github.com/jackc/pgx/v5/pgtype" ) // getRepoInfo returns the filesystem path and direct // access permission for a given repo and a provided ssh public key. func getRepoInfo(ctx context.Context, groupPath []string, repoName, sshPubkey string) (repoID int, fsPath string, access bool, contribReq, userType string, userID int, err error) { err = database.QueryRow(ctx, ` WITH RECURSIVE group_path_cte AS ( -- Start: match the first name in the path where parent_group IS NULL SELECT id, parent_group, name, 1 AS depth FROM groups WHERE name = ($1::text[])[1] AND parent_group IS NULL UNION ALL -- Recurse: join next segment of the path SELECT g.id, g.parent_group, g.name, group_path_cte.depth + 1 FROM groups g JOIN group_path_cte ON g.parent_group = group_path_cte.id WHERE g.name = ($1::text[])[group_path_cte.depth + 1] AND group_path_cte.depth + 1 <= cardinality($1::text[]) ) SELECT r.id, r.filesystem_path, CASE WHEN ugr.user_id IS NOT NULL THEN TRUE ELSE FALSE END AS has_role_in_group, r.contrib_requirements, COALESCE(u.type, ''), COALESCE(u.id, 0) FROM group_path_cte g JOIN repos r ON r.group_id = g.id LEFT JOIN ssh_public_keys s ON s.key_string = $3 LEFT JOIN users u ON u.id = s.user_id LEFT JOIN user_group_roles ugr ON ugr.group_id = g.id AND ugr.user_id = u.id WHERE g.depth = cardinality($1::text[]) AND r.name = $2 `, pgtype.FlatArray[string](groupPath), repoName, sshPubkey, ).Scan(&repoID, &fsPath, &access, &contribReq, &userType, &userID) return }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "html/template" "github.com/dgraph-io/ristretto/v2" "go.lindenii.runxiyu.org/lindenii-common/clog" ) var commitPathFileHTMLCache *ristretto.Cache[[]byte, template.HTML] func init() { var err error commitPathFileHTMLCache, err = ristretto.NewCache(&ristretto.Config[[]byte, template.HTML]{ NumCounters: 1e4, MaxCost: 1 << 60, BufferItems: 8192, }) if err != nil { clog.Fatal(1, "Error initializing commitPathFileHTMLCache: "+err.Error()) } }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "github.com/dgraph-io/ristretto/v2" "go.lindenii.runxiyu.org/lindenii-common/clog" ) var commitPathFileRawCache *ristretto.Cache[[]byte, string] func init() { var err error commitPathFileRawCache, err = ristretto.NewCache(&ristretto.Config[[]byte, string]{ NumCounters: 1e4, MaxCost: 1 << 60, BufferItems: 8192, }) if err != nil { clog.Fatal(1, "Error initializing commitPathFileRawCache: "+err.Error()) } }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "html/template" "github.com/dgraph-io/ristretto/v2" "go.lindenii.runxiyu.org/lindenii-common/clog" ) type treeReadmeCacheEntry struct { DisplayTree []displayTreeEntry ReadmeFilename string ReadmeRendered template.HTML } var treeReadmeCache *ristretto.Cache[[]byte, treeReadmeCacheEntry] func init() { var err error treeReadmeCache, err = ristretto.NewCache(&ristretto.Config[[]byte, treeReadmeCacheEntry]{ NumCounters: 1e4, MaxCost: 1 << 60, BufferItems: 8192, }) if err != nil { clog.Fatal(1, "Error initializing indexPageCache: "+err.Error()) } }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "github.com/dgraph-io/ristretto/v2" "go.lindenii.runxiyu.org/lindenii-common/clog" ) var indexCommitsDisplayCache *ristretto.Cache[[]byte, []commitDisplay] func init() { var err error indexCommitsDisplayCache, err = ristretto.NewCache(&ristretto.Config[[]byte, []commitDisplay]{ NumCounters: 1e4, MaxCost: 1 << 60, BufferItems: 8192, }) if err != nil { clog.Fatal(1, "Error initializing indexCommitsCache: "+err.Error()) } }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "bufio" "context" "errors" "os" "github.com/jackc/pgx/v5/pgxpool" "go.lindenii.runxiyu.org/lindenii-common/scfg" ) var database *pgxpool.Pool var config struct { HTTP struct { Net string `scfg:"net"` Addr string `scfg:"addr"` CookieExpiry int `scfg:"cookie_expiry"` Root string `scfg:"root"` ReadTimeout uint32 `scfg:"read_timeout"` WriteTimeout uint32 `scfg:"write_timeout"` IdleTimeout uint32 `scfg:"idle_timeout"` ReverseProxy bool `scfg:"reverse_proxy"` } `scfg:"http"` Hooks struct { Socket string `scfg:"socket"` Execs string `scfg:"execs"` } `scfg:"hooks"` Git struct { RepoDir string `scfg:"repo_dir"` } `scfg:"git"` SSH struct { Net string `scfg:"net"` Addr string `scfg:"addr"` Key string `scfg:"key"` Root string `scfg:"root"` } `scfg:"ssh"` IRC struct { Net string `scfg:"net"` Addr string `scfg:"addr"` TLS bool `scfg:"tls"` SendQ uint `scfg:"sendq"` Nick string `scfg:"nick"` User string `scfg:"user"` Gecos string `scfg:"gecos"` } `scfg:"irc"` General struct { Title string `scfg:"title"` } `scfg:"general"` DB struct { Type string `scfg:"type"` Conn string `scfg:"conn"` } `scfg:"db"` } func loadConfig(path string) (err error) { var configFile *os.File var decoder *scfg.Decoder if configFile, err = os.Open(path); err != nil { return err } defer configFile.Close() decoder = scfg.NewDecoder(bufio.NewReader(configFile)) if err = decoder.Decode(&config); err != nil { return err } if config.DB.Type != "postgres" { return errors.New("unsupported database type") } if database, err = pgxpool.New(context.Background(), config.DB.Conn); err != nil { return err } globalData["forge_title"] = config.General.Title return nil }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "context" "github.com/jackc/pgx/v5" ) // queryNameDesc is a helper function that executes a query and returns a // list of name_desc_t results. func queryNameDesc(ctx context.Context, query string, args ...any) (result []nameDesc, err error) { var rows pgx.Rows if rows, err = database.Query(ctx, query, args...); err != nil { return nil, err } defer rows.Close() for rows.Next() { var name, description string if err = rows.Scan(&name, &description); err != nil { return nil, err } result = append(result, nameDesc{name, description}) } return result, rows.Err() } // nameDesc holds a name and a description. type nameDesc struct { Name string Description string }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "bufio" "context" "errors" "io" "net/http" "net/url" "strings" "github.com/jackc/pgx/v5" ) func fedauth(ctx context.Context, userID int, service, remoteUsername, pubkey string) (bool, error) { var err error matched := false usernameEscaped := url.PathEscape(remoteUsername) var req *http.Request switch service { case "sr.ht": req, err = http.NewRequestWithContext(ctx, http.MethodGet, "https://meta.sr.ht/~"+usernameEscaped+".keys", nil) case "github": req, err = http.NewRequestWithContext(ctx, http.MethodGet, "https://github.com/"+usernameEscaped+".keys", nil) case "codeberg": req, err = http.NewRequestWithContext(ctx, http.MethodGet, "https://codeberg.org/"+usernameEscaped+".keys", nil) case "tangled": req, err = http.NewRequestWithContext(ctx, http.MethodGet, "https://tangled.sh/keys/"+usernameEscaped, nil) // TODO: Don't rely on one webview default: return false, errors.New("unknown federated service") } if err != nil { return false, err } resp, err := http.DefaultClient.Do(req) if err != nil { return false, err } defer func() { _ = resp.Body.Close() }() buf := bufio.NewReader(resp.Body) for { line, err := buf.ReadString('\n') if errors.Is(err, io.EOF) { break } else if err != nil { return false, err } lineSplit := strings.Split(line, " ") if len(lineSplit) < 2 { continue } line = strings.Join(lineSplit[:2], " ") if line == pubkey { matched = true break } } if !matched { return false, nil } var txn pgx.Tx if txn, err = database.Begin(ctx); err != nil { return false, err } defer func() { _ = txn.Rollback(ctx) }() if _, err = txn.Exec(ctx, `UPDATE users SET type = 'federated' WHERE id = $1 AND type = 'pubkey_only'`, userID); err != nil { return false, err } if _, err = txn.Exec(ctx, `INSERT INTO federated_identities (user_id, service, remote_username) VALUES ($1, $2, $3)`, userID, service, remoteUsername); err != nil { return false, err } if err = txn.Commit(ctx); err != nil { return false, err } return true, nil }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "bytes" "fmt" "strings" "time" "github.com/go-git/go-git/v5/plumbing/object" ) // get_patch_from_commit formats a commit object as if it was returned by // git-format-patch. func fmtCommitPatch(commit *object.Commit) (final string, err error) { var patch *object.Patch var buf bytes.Buffer var author object.Signature var date string var commitTitle, commitDetails string if _, patch, err = fmtCommitAsPatch(commit); err != nil { return "", err } author = commit.Author date = author.When.Format(time.RFC1123Z) commitTitle, commitDetails, _ = strings.Cut(commit.Message, "\n") // This date is hardcoded in Git. fmt.Fprintf(&buf, "From %s Mon Sep 17 00:00:00 2001\n", commit.Hash) fmt.Fprintf(&buf, "From: %s <%s>\n", author.Name, author.Email) fmt.Fprintf(&buf, "Date: %s\n", date) fmt.Fprintf(&buf, "Subject: [PATCH] %s\n\n", commitTitle) if commitDetails != "" { commitDetails1, commitDetails2, _ := strings.Cut(commitDetails, "\n") if strings.TrimSpace(commitDetails1) == "" { commitDetails = commitDetails2 } buf.WriteString(commitDetails) buf.WriteString("\n") } buf.WriteString("---\n") fmt.Fprint(&buf, patch.Stats().String()) fmt.Fprintln(&buf) buf.WriteString(patch.String()) fmt.Fprintf(&buf, "\n-- \n2.48.1\n") return buf.String(), nil }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "errors" "io" "io/fs" "os" "path/filepath" ) // deployHooks deploys the git hooks client to the filesystem. // The git hooks client is expected to be embedded in resources_fs and must be // pre-compiled during the build process; see the Makefile. func deployHooks() (err error) { err = func() (err error) { var srcFD fs.File var dstFD *os.File if srcFD, err = resourcesFS.Open("hookc/hookc"); err != nil { return err } defer srcFD.Close() if dstFD, err = os.OpenFile(filepath.Join(config.Hooks.Execs, "hookc"), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755); err != nil { return err } defer dstFD.Close() if _, err = io.Copy(dstFD, srcFD); err != nil { return err } return nil }() if err != nil { return err } // Go's embed filesystems do not store permissions; but in any case, // they would need to be 0o755: if err = os.Chmod(filepath.Join(config.Hooks.Execs, "hookc"), 0o755); err != nil { return err } for _, hookName := range []string{ "pre-receive", } { if err = os.Symlink(filepath.Join(config.Hooks.Execs, "hookc"), filepath.Join(config.Hooks.Execs, hookName)); err != nil { if !errors.Is(err, fs.ErrExist) { return err } // TODO: Maybe check if it points to the right place? } } return nil }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
// //go:build linux package main import ( "bytes" "context" "encoding/binary" "errors" "fmt" "io" "net" "os" "path/filepath" "strconv" "strings" "syscall" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/jackc/pgx/v5" "go.lindenii.runxiyu.org/lindenii-common/ansiec" "go.lindenii.runxiyu.org/lindenii-common/clog" ) var ( errGetFD = errors.New("unable to get file descriptor") errGetUcred = errors.New("failed getsockopt") ) // hooksHandler handles a connection from hookc via the // unix socket. func hooksHandler(conn net.Conn) { var ctx context.Context var cancel context.CancelFunc var ucred *syscall.Ucred var err error var cookie []byte var packPass packPass var sshStderr io.Writer var hookRet byte defer conn.Close() ctx, cancel = context.WithCancel(context.Background()) defer cancel() // There aren't reasonable cases where someone would run this as // another user. if ucred, err = getUcred(conn); err != nil { if _, err = conn.Write([]byte{1}); err != nil { return } writeRedError(conn, "\nUnable to get peer credentials: %v", err) return } uint32uid := uint32(os.Getuid()) //#nosec G115 if ucred.Uid != uint32uid { if _, err = conn.Write([]byte{1}); err != nil { return } writeRedError(conn, "\nUID mismatch") return } cookie = make([]byte, 64) if _, err = conn.Read(cookie); err != nil { if _, err = conn.Write([]byte{1}); err != nil { return } writeRedError(conn, "\nFailed to read cookie: %v", err) return } { var ok bool packPass, ok = packPasses.Load(bytesToString(cookie)) if !ok { if _, err = conn.Write([]byte{1}); err != nil { return } writeRedError(conn, "\nInvalid handler cookie") return } } sshStderr = packPass.session.Stderr() _, _ = sshStderr.Write([]byte{'\n'}) hookRet = func() byte { var argc64 uint64 if err = binary.Read(conn, binary.NativeEndian, &argc64); err != nil { writeRedError(sshStderr, "Failed to read argc: %v", err) return 1 } var args []string for range argc64 { var arg bytes.Buffer for { nextByte := make([]byte, 1) n, err := conn.Read(nextByte) if err != nil || n != 1 { writeRedError(sshStderr, "Failed to read arg: %v", err) return 1 } if nextByte[0] == 0 { break } arg.WriteByte(nextByte[0]) } args = append(args, arg.String()) } gitEnv := make(map[string]string) for { var envLine bytes.Buffer for { nextByte := make([]byte, 1) n, err := conn.Read(nextByte) if err != nil || n != 1 { writeRedError(sshStderr, "Failed to read environment variable: %v", err) return 1 } if nextByte[0] == 0 { break } envLine.WriteByte(nextByte[0]) } if envLine.Len() == 0 { break } kv := envLine.String() parts := strings.SplitN(kv, "=", 2) if len(parts) < 2 { writeRedError(sshStderr, "Invalid environment variable line: %v", kv) return 1 } gitEnv[parts[0]] = parts[1] } var stdin bytes.Buffer if _, err = io.Copy(&stdin, conn); err != nil { writeRedError(conn, "Failed to read to the stdin buffer: %v", err) } switch filepath.Base(args[0]) { case "pre-receive": if packPass.directAccess { return 0 } allOK := true for { var line, oldOID, rest, newIOID, refName string var found bool var oldHash, newHash plumbing.Hash var oldCommit, newCommit *object.Commit var pushOptCount int pushOptCount, err = strconv.Atoi(gitEnv["GIT_PUSH_OPTION_COUNT"]) if err != nil { writeRedError(sshStderr, "Failed to parse GIT_PUSH_OPTION_COUNT: %v", err) return 1 } // TODO: Allow existing users (even if they are already federated or registered) to add a federated user ID... though perhaps this should be in the normal SSH interface instead of the git push interface? // Also it'd be nice to be able to combine users or whatever if packPass.contribReq == "federated" && packPass.userType != "federated" && packPass.userType != "registered" { if pushOptCount == 0 { writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu") return 1 } for pushOptIndex := range pushOptCount { pushOpt, ok := gitEnv[fmt.Sprintf("GIT_PUSH_OPTION_%d", pushOptIndex)] if !ok { writeRedError(sshStderr, "Failed to get push option %d", pushOptIndex) return 1 } if strings.HasPrefix(pushOpt, "fedid=") { fedUserID := strings.TrimPrefix(pushOpt, "fedid=") service, username, found := strings.Cut(fedUserID, ":") if !found { writeRedError(sshStderr, "Invalid federated user identifier %#v does not contain a colon", fedUserID) return 1 } ok, err := fedauth(ctx, packPass.userID, service, username, packPass.pubkey) if err != nil { writeRedError(sshStderr, "Failed to verify federated user identifier %#v: %v", fedUserID, err) return 1 } if !ok { writeRedError(sshStderr, "Failed to verify federated user identifier %#v: you don't seem to be on the list", fedUserID) return 1 } break } if pushOptIndex == pushOptCount-1 { writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu") return 1 } } } line, err = stdin.ReadString('\n') if errors.Is(err, io.EOF) { break } else if err != nil { writeRedError(sshStderr, "Failed to read pre-receive line: %v", err) return 1 } line = line[:len(line)-1] oldOID, rest, found = strings.Cut(line, " ") if !found { writeRedError(sshStderr, "Invalid pre-receive line: %v", line) return 1 } newIOID, refName, found = strings.Cut(rest, " ") if !found { writeRedError(sshStderr, "Invalid pre-receive line: %v", line) return 1 } if strings.HasPrefix(refName, "refs/heads/contrib/") { if allZero(oldOID) { // New branch fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName) var newMRID int if packPass.userID != 0 { err = database.QueryRow(ctx, "INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING id", packPass.repoID, packPass.userID, strings.TrimPrefix(refName, "refs/heads/"), ).Scan(&newMRID) } else { err = database.QueryRow(ctx, "INSERT INTO merge_requests (repo_id, source_ref, status) VALUES ($1, $2, 'open') RETURNING id", packPass.repoID, strings.TrimPrefix(refName, "refs/heads/"), ).Scan(&newMRID) } if err != nil { writeRedError(sshStderr, "Error creating merge request: %v", err) return 1 } mergeRequestWebURL := fmt.Sprintf("%s/contrib/%d/", genHTTPRemoteURL(packPass.groupPath, packPass.repoName), newMRID) fmt.Fprintln(sshStderr, ansiec.Blue+"Created merge request at", mergeRequestWebURL+ansiec.Reset) select { case ircSendBuffered <- "PRIVMSG #chat :New merge request at " + mergeRequestWebURL: default: clog.Error("IRC SendQ exceeded") } } else { // Existing contrib branch var existingMRUser int var isAncestor bool err = database.QueryRow(ctx, "SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2", strings.TrimPrefix(refName, "refs/heads/"), packPass.repoID, ).Scan(&existingMRUser) if err != nil { if errors.Is(err, pgx.ErrNoRows) { writeRedError(sshStderr, "No existing merge request for existing contrib branch: %v", err) } else { writeRedError(sshStderr, "Error querying for existing merge request: %v", err) } return 1 } if existingMRUser == 0 { allOK = false fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs to unowned MR)") continue } if existingMRUser != packPass.userID { allOK = false fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs another user's MR)") continue } oldHash = plumbing.NewHash(oldOID) if oldCommit, err = packPass.repo.CommitObject(oldHash); err != nil { writeRedError(sshStderr, "Daemon failed to get old commit: %v", err) return 1 } // Potential BUG: I'm not sure if new_commit is guaranteed to be // detectable as they haven't been merged into the main repo's // objects yet. But it seems to work, and I don't think there's // any reason for this to only work intermitently. newHash = plumbing.NewHash(newIOID) if newCommit, err = packPass.repo.CommitObject(newHash); err != nil { writeRedError(sshStderr, "Daemon failed to get new commit: %v", err) return 1 } if isAncestor, err = oldCommit.IsAncestor(newCommit); err != nil { writeRedError(sshStderr, "Daemon failed to check if old commit is ancestor: %v", err) return 1 } if !isAncestor { // TODO: Create MR snapshot ref instead allOK = false fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(force pushes are not supported yet)") continue } fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName) } } else { // Non-contrib branch allOK = false fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(you cannot push to branches outside of contrib/*)") } } fmt.Fprintln(sshStderr) if allOK { fmt.Fprintln(sshStderr, "Overall "+ansiec.Green+"ACK"+ansiec.Reset+" (all checks passed)") return 0 } fmt.Fprintln(sshStderr, "Overall "+ansiec.Red+"NAK"+ansiec.Reset+" (one or more branches failed checks)") return 1 default: fmt.Fprintln(sshStderr, ansiec.Red+"Invalid hook:", args[0]+ansiec.Reset) return 1 } }() fmt.Fprintln(sshStderr) _, _ = conn.Write([]byte{hookRet}) } func serveGitHooks(listener net.Listener) error { for { conn, err := listener.Accept() if err != nil { return err } go hooksHandler(conn) } } func getUcred(conn net.Conn) (ucred *syscall.Ucred, err error) { unixConn := conn.(*net.UnixConn) var unixConnFD *os.File if unixConnFD, err = unixConn.File(); err != nil { return nil, errGetFD } defer unixConnFD.Close() if ucred, err = syscall.GetsockoptUcred(int(unixConnFD.Fd()), syscall.SOL_SOCKET, syscall.SO_PEERCRED); err != nil { return nil, errGetUcred } return ucred, nil } func allZero(s string) bool { for _, r := range s { if r != '0' { return false } } return true }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
// //go:build !linux package main import ( "bytes" "context" "encoding/binary" "errors" "fmt" "io" "net" "path/filepath" "strconv" "strings" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/jackc/pgx/v5" "go.lindenii.runxiyu.org/lindenii-common/ansiec" "go.lindenii.runxiyu.org/lindenii-common/clog" ) var errGetFD = errors.New("unable to get file descriptor") // hooksHandler handles a connection from hookc via the // unix socket. func hooksHandler(conn net.Conn) { var ctx context.Context var cancel context.CancelFunc var err error var cookie []byte var packPass packPass var sshStderr io.Writer var hookRet byte defer conn.Close() ctx, cancel = context.WithCancel(context.Background()) defer cancel() // TODO: Validate that the connection is from the right user. cookie = make([]byte, 64) if _, err = conn.Read(cookie); err != nil { if _, err = conn.Write([]byte{1}); err != nil { return } writeRedError(conn, "\nFailed to read cookie: %v", err) return } { var ok bool packPass, ok = packPasses.Load(bytesToString(cookie)) if !ok { if _, err = conn.Write([]byte{1}); err != nil { return } writeRedError(conn, "\nInvalid handler cookie") return } } sshStderr = packPass.session.Stderr() _, _ = sshStderr.Write([]byte{'\n'}) hookRet = func() byte { var argc64 uint64 if err = binary.Read(conn, binary.NativeEndian, &argc64); err != nil { writeRedError(sshStderr, "Failed to read argc: %v", err) return 1 } var args []string for range argc64 { var arg bytes.Buffer for { nextByte := make([]byte, 1) n, err := conn.Read(nextByte) if err != nil || n != 1 { writeRedError(sshStderr, "Failed to read arg: %v", err) return 1 } if nextByte[0] == 0 { break } arg.WriteByte(nextByte[0]) } args = append(args, arg.String()) } gitEnv := make(map[string]string) for { var envLine bytes.Buffer for { nextByte := make([]byte, 1) n, err := conn.Read(nextByte) if err != nil || n != 1 { writeRedError(sshStderr, "Failed to read environment variable: %v", err) return 1 } if nextByte[0] == 0 { break } envLine.WriteByte(nextByte[0]) } if envLine.Len() == 0 { break } kv := envLine.String() parts := strings.SplitN(kv, "=", 2) if len(parts) < 2 { writeRedError(sshStderr, "Invalid environment variable line: %v", kv) return 1 } gitEnv[parts[0]] = parts[1] } var stdin bytes.Buffer if _, err = io.Copy(&stdin, conn); err != nil { writeRedError(conn, "Failed to read to the stdin buffer: %v", err) } switch filepath.Base(args[0]) { case "pre-receive": if packPass.directAccess { return 0 } allOK := true for { var line, oldOID, rest, newIOID, refName string var found bool var oldHash, newHash plumbing.Hash var oldCommit, newCommit *object.Commit var pushOptCount int pushOptCount, err = strconv.Atoi(gitEnv["GIT_PUSH_OPTION_COUNT"]) if err != nil { writeRedError(sshStderr, "Failed to parse GIT_PUSH_OPTION_COUNT: %v", err) return 1 } // TODO: Allow existing users (even if they are already federated or registered) to add a federated user ID... though perhaps this should be in the normal SSH interface instead of the git push interface? // Also it'd be nice to be able to combine users or whatever if packPass.contribReq == "federated" && packPass.userType != "federated" && packPass.userType != "registered" { if pushOptCount == 0 { writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu") return 1 } for pushOptIndex := range pushOptCount { pushOpt, ok := gitEnv[fmt.Sprintf("GIT_PUSH_OPTION_%d", pushOptIndex)] if !ok { writeRedError(sshStderr, "Failed to get push option %d", pushOptIndex) return 1 } if strings.HasPrefix(pushOpt, "fedid=") { fedUserID := strings.TrimPrefix(pushOpt, "fedid=") service, username, found := strings.Cut(fedUserID, ":") if !found { writeRedError(sshStderr, "Invalid federated user identifier %#v does not contain a colon", fedUserID) return 1 } ok, err := fedauth(ctx, packPass.userID, service, username, packPass.pubkey) if err != nil { writeRedError(sshStderr, "Failed to verify federated user identifier %#v: %v", fedUserID, err) return 1 } if !ok { writeRedError(sshStderr, "Failed to verify federated user identifier %#v: you don't seem to be on the list", fedUserID) return 1 } break } if pushOptIndex == pushOptCount-1 { writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu") return 1 } } } line, err = stdin.ReadString('\n') if errors.Is(err, io.EOF) { break } else if err != nil { writeRedError(sshStderr, "Failed to read pre-receive line: %v", err) return 1 } line = line[:len(line)-1] oldOID, rest, found = strings.Cut(line, " ") if !found { writeRedError(sshStderr, "Invalid pre-receive line: %v", line) return 1 } newIOID, refName, found = strings.Cut(rest, " ") if !found { writeRedError(sshStderr, "Invalid pre-receive line: %v", line) return 1 } if strings.HasPrefix(refName, "refs/heads/contrib/") { if allZero(oldOID) { // New branch fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName) var newMRID int if packPass.userID != 0 { err = database.QueryRow(ctx, "INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING id", packPass.repoID, packPass.userID, strings.TrimPrefix(refName, "refs/heads/"), ).Scan(&newMRID) } else { err = database.QueryRow(ctx, "INSERT INTO merge_requests (repo_id, source_ref, status) VALUES ($1, $2, 'open') RETURNING id", packPass.repoID, strings.TrimPrefix(refName, "refs/heads/"), ).Scan(&newMRID) } if err != nil { writeRedError(sshStderr, "Error creating merge request: %v", err) return 1 } mergeRequestWebURL := fmt.Sprintf("%s/contrib/%d/", genHTTPRemoteURL(packPass.groupPath, packPass.repoName), newMRID) fmt.Fprintln(sshStderr, ansiec.Blue+"Created merge request at", mergeRequestWebURL+ansiec.Reset) select { case ircSendBuffered <- "PRIVMSG #chat :New merge request at " + mergeRequestWebURL: default: clog.Error("IRC SendQ exceeded") } } else { // Existing contrib branch var existingMRUser int var isAncestor bool err = database.QueryRow(ctx, "SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2", strings.TrimPrefix(refName, "refs/heads/"), packPass.repoID, ).Scan(&existingMRUser) if err != nil { if errors.Is(err, pgx.ErrNoRows) { writeRedError(sshStderr, "No existing merge request for existing contrib branch: %v", err) } else { writeRedError(sshStderr, "Error querying for existing merge request: %v", err) } return 1 } if existingMRUser == 0 { allOK = false fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs to unowned MR)") continue } if existingMRUser != packPass.userID { allOK = false fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs another user's MR)") continue } oldHash = plumbing.NewHash(oldOID) if oldCommit, err = packPass.repo.CommitObject(oldHash); err != nil { writeRedError(sshStderr, "Daemon failed to get old commit: %v", err) return 1 } // Potential BUG: I'm not sure if new_commit is guaranteed to be // detectable as they haven't been merged into the main repo's // objects yet. But it seems to work, and I don't think there's // any reason for this to only work intermitently. newHash = plumbing.NewHash(newIOID) if newCommit, err = packPass.repo.CommitObject(newHash); err != nil { writeRedError(sshStderr, "Daemon failed to get new commit: %v", err) return 1 } if isAncestor, err = oldCommit.IsAncestor(newCommit); err != nil { writeRedError(sshStderr, "Daemon failed to check if old commit is ancestor: %v", err) return 1 } if !isAncestor { // TODO: Create MR snapshot ref instead allOK = false fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(force pushes are not supported yet)") continue } fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName) } } else { // Non-contrib branch allOK = false fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(you cannot push to branches outside of contrib/*)") } } fmt.Fprintln(sshStderr) if allOK { fmt.Fprintln(sshStderr, "Overall "+ansiec.Green+"ACK"+ansiec.Reset+" (all checks passed)") return 0 } fmt.Fprintln(sshStderr, "Overall "+ansiec.Red+"NAK"+ansiec.Reset+" (one or more branches failed checks)") return 1 default: fmt.Fprintln(sshStderr, ansiec.Red+"Invalid hook:", args[0]+ansiec.Reset) return 1 } }() fmt.Fprintln(sshStderr) _, _ = conn.Write([]byte{hookRet}) } func serveGitHooks(listener net.Listener) error { for { conn, err := listener.Accept() if err != nil { return err } go hooksHandler(conn) } } func allZero(s string) bool { for _, r := range s { if r != '0' { return false } } return true }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "github.com/go-git/go-git/v5" gitConfig "github.com/go-git/go-git/v5/config" gitFmtConfig "github.com/go-git/go-git/v5/plumbing/format/config" ) // gitInit initializes a bare git repository with the // forge-deployed hooks directory as the hooksPath. func gitInit(repoPath string) (err error) { var repo *git.Repository var gitConf *gitConfig.Config if repo, err = git.PlainInit(repoPath, true); err != nil { return err } if gitConf, err = repo.Config(); err != nil { return err } gitConf.Raw.SetOption("core", gitFmtConfig.NoSubsection, "hooksPath", config.Hooks.Execs) gitConf.Raw.SetOption("receive", gitFmtConfig.NoSubsection, "advertisePushOptions", "true") if err = repo.SetConfig(gitConf); err != nil { return err } return nil }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "context" "errors" "io" "iter" "os" "strings" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/jackc/pgx/v5/pgtype" ) // openRepo opens a git repository by group and repo name. func openRepo(ctx context.Context, groupPath []string, repoName string) (repo *git.Repository, description string, repoID int, err error) { var fsPath string err = database.QueryRow(ctx, ` WITH RECURSIVE group_path_cte AS ( -- Start: match the first name in the path where parent_group IS NULL SELECT id, parent_group, name, 1 AS depth FROM groups WHERE name = ($1::text[])[1] AND parent_group IS NULL UNION ALL -- Recurse: join next segment of the path SELECT g.id, g.parent_group, g.name, group_path_cte.depth + 1 FROM groups g JOIN group_path_cte ON g.parent_group = group_path_cte.id WHERE g.name = ($1::text[])[group_path_cte.depth + 1] AND group_path_cte.depth + 1 <= cardinality($1::text[]) ) SELECT r.filesystem_path, COALESCE(r.description, ''), r.id FROM group_path_cte g JOIN repos r ON r.group_id = g.id WHERE g.depth = cardinality($1::text[]) AND r.name = $2 `, pgtype.FlatArray[string](groupPath), repoName).Scan(&fsPath, &description, &repoID) if err != nil { return } repo, err = git.PlainOpen(fsPath) return } // go-git's tree entries are not friendly for use in HTML templates. type displayTreeEntry struct { Name string Mode string Size int64 IsFile bool IsSubtree bool } func makeDisplayTree(tree *object.Tree) (displayTree []displayTreeEntry) { for _, entry := range tree.Entries { displayEntry := displayTreeEntry{} //exhaustruct:ignore var err error var osMode os.FileMode if osMode, err = entry.Mode.ToOSFileMode(); err != nil { displayEntry.Mode = "x---------" } else { displayEntry.Mode = osMode.String() } displayEntry.IsFile = entry.Mode.IsFile() if displayEntry.Size, err = tree.Size(entry.Name); err != nil { displayEntry.Size = 0 } displayEntry.Name = strings.TrimPrefix(entry.Name, "/") displayTree = append(displayTree, displayEntry) } return displayTree } func commitIterSeqErr(commitIter object.CommitIter) (iter.Seq[*object.Commit], *error) { var err error return func(yield func(*object.Commit) bool) { for { commit, err2 := commitIter.Next() if err2 != nil { if errors.Is(err2, io.EOF) { return } err = err2 return } if !yield(commit) { return } } }, &err } func iterSeqLimit[T any](s iter.Seq[T], n uint) iter.Seq[T] { return func(yield func(T) bool) { var iterations uint for v := range s { if iterations > n-1 { return } if !yield(v) { return } iterations++ } } } func getRecentCommits(repo *git.Repository, headHash plumbing.Hash, numCommits int) (recentCommits []*object.Commit, err error) { var commitIter object.CommitIter var thisCommit *object.Commit commitIter, err = repo.Log(&git.LogOptions{From: headHash}) //exhaustruct:ignore if err != nil { return nil, err } recentCommits = make([]*object.Commit, 0) defer commitIter.Close() if numCommits < 0 { for { thisCommit, err = commitIter.Next() if errors.Is(err, io.EOF) { return recentCommits, nil } else if err != nil { return nil, err } recentCommits = append(recentCommits, thisCommit) } } else { for range numCommits { thisCommit, err = commitIter.Next() if errors.Is(err, io.EOF) { return recentCommits, nil } else if err != nil { return nil, err } recentCommits = append(recentCommits, thisCommit) } } return recentCommits, err } func getRecentCommitsDisplay(repo *git.Repository, headHash plumbing.Hash, numCommits int) (recentCommits []commitDisplay, err error) { var commitIter object.CommitIter var thisCommit *object.Commit commitIter, err = repo.Log(&git.LogOptions{From: headHash}) //exhaustruct:ignore if err != nil { return nil, err } recentCommits = make([]commitDisplay, 0) defer commitIter.Close() if numCommits < 0 { for { thisCommit, err = commitIter.Next() if errors.Is(err, io.EOF) { return recentCommits, nil } else if err != nil { return nil, err } recentCommits = append(recentCommits, commitDisplay{ thisCommit.Hash, thisCommit.Author, thisCommit.Committer, thisCommit.Message, thisCommit.TreeHash, }) } } else { for range numCommits { thisCommit, err = commitIter.Next() if errors.Is(err, io.EOF) { return recentCommits, nil } else if err != nil { return nil, err } recentCommits = append(recentCommits, commitDisplay{ thisCommit.Hash, thisCommit.Author, thisCommit.Committer, thisCommit.Message, thisCommit.TreeHash, }) } } return recentCommits, err } type commitDisplay struct { Hash plumbing.Hash Author object.Signature Committer object.Signature Message string TreeHash plumbing.Hash } func fmtCommitAsPatch(commit *object.Commit) (parentCommitHash plumbing.Hash, patch *object.Patch, err error) { var parentCommit *object.Commit var commitTree *object.Tree parentCommit, err = commit.Parent(0) switch { case errors.Is(err, object.ErrParentNotFound): if commitTree, err = commit.Tree(); err != nil { return } if patch, err = nullTree.Patch(commitTree); err != nil { return } case err != nil: return default: parentCommitHash = parentCommit.Hash if patch, err = parentCommit.Patch(commit); err != nil { return } } return } var nullTree object.Tree
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" ) // getRefHash returns the hash of a reference given its // type and name as supplied in URL queries. func getRefHash(repo *git.Repository, refType, refName string) (refHash plumbing.Hash, err error) { var ref *plumbing.Reference switch refType { case "": if ref, err = repo.Head(); err != nil { return } refHash = ref.Hash() case "commit": refHash = plumbing.NewHash(refName) case "branch": if ref, err = repo.Reference(plumbing.NewBranchReferenceName(refName), true); err != nil { return } refHash = ref.Hash() case "tag": if ref, err = repo.Reference(plumbing.NewTagReferenceName(refName), true); err != nil { return } refHash = ref.Hash() default: panic("Invalid ref type " + refType) } return }
/* * SPDX-License-Identifier: AGPL-3.0-only
* SPDX-FileContributor: Runxi Yu <https://runxiyu.org> * SPDX-FileContributor: Test_User <hax@runxiyu.org>
* SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org> * SPDX-FileCopyrightText: Copyright (c) 2025 Test_User <hax@runxiyu.org>
*/ #include <errno.h> #include <stdint.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <sys/socket.h> #include <sys/un.h> #include <sys/stat.h> #include <string.h> #include <fcntl.h> #include <signal.h> #ifdef __linux__ #include <linux/limits.h> #include <sys/sendfile.h> #define USE_SPLICE 1 #else #define USE_SPLICE 0 #endif int main(int argc, char *argv[]) { if (signal(SIGPIPE, SIG_IGN) == SIG_ERR) { perror("signal"); return EXIT_FAILURE; } const char *socket_path = getenv("LINDENII_FORGE_HOOKS_SOCKET_PATH"); if (socket_path == NULL) { dprintf(STDERR_FILENO, "environment variable LINDENII_FORGE_HOOKS_SOCKET_PATH undefined\n"); return EXIT_FAILURE; } const char *cookie = getenv("LINDENII_FORGE_HOOKS_COOKIE"); if (cookie == NULL) { dprintf(STDERR_FILENO, "environment variable LINDENII_FORGE_HOOKS_COOKIE undefined\n"); return EXIT_FAILURE; } if (strlen(cookie) != 64) { dprintf(STDERR_FILENO, "environment variable LINDENII_FORGE_HOOKS_COOKIE is not 64 characters long\n"); return EXIT_FAILURE; } /* * All hooks in git (see builtin/receive-pack.c) use a pipe by setting * .in = -1 on the child_process struct, which enables us to use * splice(2) to move the data to the UNIX domain socket. */ struct stat stdin_stat; if (fstat(STDIN_FILENO, &stdin_stat) == -1) { perror("fstat on stdin"); return EXIT_FAILURE; } if (!S_ISFIFO(stdin_stat.st_mode)) { dprintf(STDERR_FILENO, "stdin must be a pipe\n"); return EXIT_FAILURE; } #if USE_SPLICE int stdin_pipe_size = fcntl(STDIN_FILENO, F_GETPIPE_SZ); if (stdin_pipe_size == -1) { perror("fcntl on stdin"); return EXIT_FAILURE; } #else int stdin_pipe_size = 65536; #endif if (stdin_pipe_size == -1) { perror("fcntl on stdin"); return EXIT_FAILURE; } /* * Same for stderr. */ struct stat stderr_stat; if (fstat(STDERR_FILENO, &stderr_stat) == -1) { perror("fstat on stderr"); return EXIT_FAILURE; } if (!S_ISFIFO(stderr_stat.st_mode)) { dprintf(STDERR_FILENO, "stderr must be a pipe\n"); return EXIT_FAILURE; } #if USE_SPLICE int stderr_pipe_size = fcntl(STDERR_FILENO, F_GETPIPE_SZ); if (stderr_pipe_size == -1) { perror("fcntl on stderr"); return EXIT_FAILURE; } #else int stderr_pipe_size = 65536; #endif if (stderr_pipe_size == -1) { perror("fcntl on stderr"); return EXIT_FAILURE; } /* Connecting back to the main daemon */ int sock; struct sockaddr_un addr; sock = socket(AF_UNIX, SOCK_STREAM, 0); if (sock == -1) { perror("internal socket creation"); return EXIT_FAILURE; } memset(&addr, 0, sizeof(struct sockaddr_un)); addr.sun_family = AF_UNIX; strncpy(addr.sun_path, socket_path, sizeof(addr.sun_path) - 1); if (connect(sock, (struct sockaddr *)&addr, sizeof(struct sockaddr_un)) == -1) { perror("internal socket connect"); close(sock); return EXIT_FAILURE; } /* * Send the 64-byte cookit back. */ ssize_t cookie_bytes_sent = send(sock, cookie, 64, 0); switch (cookie_bytes_sent) { case -1: perror("send cookie"); close(sock); return EXIT_FAILURE; case 64: break; default: dprintf(STDERR_FILENO, "send returned unexpected value on internal socket\n"); close(sock); return EXIT_FAILURE; } /* * Report arguments. */ uint64_t argc64 = (uint64_t)argc; ssize_t bytes_sent = send(sock, &argc64, sizeof(argc64), 0); switch (bytes_sent) { case -1: perror("send argc"); close(sock); return EXIT_FAILURE; case sizeof(argc64): break; default: dprintf(STDERR_FILENO, "send returned unexpected value on internal socket\n"); close(sock); return EXIT_FAILURE; } for (int i = 0; i < argc; i++) { unsigned long len = strlen(argv[i]) + 1; bytes_sent = send(sock, argv[i], len, 0); if (bytes_sent == -1) { perror("send argv"); close(sock); exit(EXIT_FAILURE); } else if ((unsigned long)bytes_sent == len) { } else { dprintf(STDERR_FILENO, "send returned unexpected value on internal socket\n"); close(sock); exit(EXIT_FAILURE); } } /* * Report GIT_* environment. */ extern char **environ; for (char **env = environ; *env != NULL; env++) { if (strncmp(*env, "GIT_", 4) == 0) { unsigned long len = strlen(*env) + 1; bytes_sent = send(sock, *env, len, 0); if (bytes_sent == -1) { perror("send env"); close(sock); exit(EXIT_FAILURE); } else if ((unsigned long)bytes_sent == len) { } else { dprintf(STDERR_FILENO, "send returned unexpected value on internal socket\n"); close(sock); exit(EXIT_FAILURE); } } } bytes_sent = send(sock, "", 1, 0); if (bytes_sent == -1) { perror("send env terminator"); close(sock); exit(EXIT_FAILURE); } else if (bytes_sent == 1) { } else { dprintf(STDERR_FILENO, "send returned unexpected value on internal socket\n"); close(sock); exit(EXIT_FAILURE); } /* * Splice stdin to the daemon. For pre-receive it's just old/new/ref. */ #if USE_SPLICE ssize_t stdin_bytes_spliced; while ((stdin_bytes_spliced = splice(STDIN_FILENO, NULL, sock, NULL, stdin_pipe_size, SPLICE_F_MORE)) > 0) { } if (stdin_bytes_spliced == -1) { perror("splice stdin to internal socket"); close(sock); return EXIT_FAILURE; } #else char buf[65536]; ssize_t n; while ((n = read(STDIN_FILENO, buf, sizeof(buf))) > 0) { if (write(sock, buf, n) != n) { perror("write to internal socket"); close(sock); return EXIT_FAILURE; } } if (n < 0) { perror("read from stdin"); close(sock); return EXIT_FAILURE; } #endif /* * The sending part of the UNIX socket should be shut down, to let * io.Copy on the Go side return. */ if (shutdown(sock, SHUT_WR) == -1) { perror("shutdown internal socket"); close(sock); return EXIT_FAILURE; } /* * The first byte of the response from the UNIX domain socket is the * status code to return. * * FIXME: It doesn't make sense to require the return value to be * sent before the log message. However, if we were to keep splicing, * it's difficult to get the last byte before EOF. Perhaps we could * hack together some sort of OOB message or ancillary data, or perhaps * even use signals. */ char status_buf[1]; ssize_t bytes_read = read(sock, status_buf, 1); switch (bytes_read) { case -1: perror("read status code from internal socket"); close(sock); return EXIT_FAILURE; case 0: dprintf(STDERR_FILENO, "unexpected EOF on internal socket\n"); close(sock); return EXIT_FAILURE; case 1: break; default: dprintf(STDERR_FILENO, "read returned unexpected value on internal socket\n"); close(sock); return EXIT_FAILURE; } /* * Now we can splice data from the UNIX domain socket to stderr. * This data is directly passed to the user (with "remote: " prepended). * * We usually don't actually use this as the daemon could easily write * to the SSH connection's stderr directly anyway. */ #if USE_SPLICE ssize_t stderr_bytes_spliced; while ((stderr_bytes_spliced = splice(sock, NULL, STDERR_FILENO, NULL, stderr_pipe_size, SPLICE_F_MORE)) > 0) { } if (stderr_bytes_spliced == -1 && errno != ECONNRESET) { perror("splice internal socket to stderr"); close(sock); return EXIT_FAILURE; } #else while ((n = read(sock, buf, sizeof(buf))) > 0) { if (write(STDERR_FILENO, buf, n) != n) { perror("write to stderr"); close(sock); return EXIT_FAILURE; } } if (n < 0 && errno != ECONNRESET) { perror("read from internal socket"); close(sock); return EXIT_FAILURE; } #endif close(sock); return *status_buf; }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" ) func getUserFromRequest(request *http.Request) (id int, username string, err error) { var sessionCookie *http.Cookie if sessionCookie, err = request.Cookie("session"); err != nil { return } err = database.QueryRow( request.Context(), "SELECT user_id, COALESCE(username, '') FROM users u JOIN sessions s ON u.id = s.user_id WHERE s.session_id = $1;", sessionCookie.Value, ).Scan(&id, &username) return }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" ) func errorPage404(w http.ResponseWriter, params map[string]any) { w.WriteHeader(http.StatusNotFound) _ = templates.ExecuteTemplate(w, "404", params) } func errorPage400(w http.ResponseWriter, params map[string]any, msg string) { w.WriteHeader(http.StatusBadRequest) params["complete_error_msg"] = msg _ = templates.ExecuteTemplate(w, "400", params) } func errorPage403(w http.ResponseWriter, params map[string]any, msg string) { w.WriteHeader(http.StatusForbidden) params["complete_error_msg"] = msg _ = templates.ExecuteTemplate(w, "403", params) } func errorPage451(w http.ResponseWriter, params map[string]any, msg string) { w.WriteHeader(http.StatusUnavailableForLegalReasons) params["complete_error_msg"] = msg _ = templates.ExecuteTemplate(w, "451", params) } func errorPage500(w http.ResponseWriter, params map[string]any, msg string) { w.WriteHeader(http.StatusInternalServerError) params["complete_error_msg"] = msg _ = templates.ExecuteTemplate(w, "500", params) } func errorPage501(w http.ResponseWriter, params map[string]any) { w.WriteHeader(http.StatusNotImplemented) _ = templates.ExecuteTemplate(w, "501", params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main // globalData is passed as "global" when rendering HTML templates. var globalData = map[string]any{ "server_public_key_string": &serverPubkeyString, "server_public_key_fingerprint": &serverPubkeyFP, "forge_version": VERSION, // Some other ones are populated after config parsing }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" "strings" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/storer" ) func httpHandleRepoBranches(writer http.ResponseWriter, _ *http.Request, params map[string]any) { var repo *git.Repository var repoName string var groupPath []string var err error var notes []string var branches []string var branchesIter storer.ReferenceIter repo, repoName, groupPath = params["repo"].(*git.Repository), params["repo_name"].(string), params["group_path"].([]string) if strings.Contains(repoName, "\n") || sliceContainsNewlines(groupPath) { notes = append(notes, "Path contains newlines; HTTP Git access impossible") } branchesIter, err = repo.Branches() if err == nil { _ = branchesIter.ForEach(func(branch *plumbing.Reference) error { branches = append(branches, branch.Name().Short()) return nil }) } params["branches"] = branches params["http_clone_url"] = genHTTPRemoteURL(groupPath, repoName) params["ssh_clone_url"] = genSSHRemoteURL(groupPath, repoName) params["notes"] = notes renderTemplate(writer, "repo_branches", params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" "runtime" ) func httpHandleGC(writer http.ResponseWriter, request *http.Request, _ map[string]any) { runtime.GC() http.Redirect(writer, request, "/", http.StatusSeeOther) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "errors" "net/http" "path/filepath" "strconv" "github.com/jackc/pgx/v5" "github.com/jackc/pgx/v5/pgtype" ) func httpHandleGroupIndex(writer http.ResponseWriter, request *http.Request, params map[string]any) { var groupPath []string var repos []nameDesc var subgroups []nameDesc var err error var groupID int var groupDesc string groupPath = params["group_path"].([]string) // The group itself err = database.QueryRow(request.Context(), ` WITH RECURSIVE group_path_cte AS ( SELECT id, parent_group, name, 1 AS depth FROM groups WHERE name = ($1::text[])[1] AND parent_group IS NULL UNION ALL SELECT g.id, g.parent_group, g.name, group_path_cte.depth + 1 FROM groups g JOIN group_path_cte ON g.parent_group = group_path_cte.id WHERE g.name = ($1::text[])[group_path_cte.depth + 1] AND group_path_cte.depth + 1 <= cardinality($1::text[]) ) SELECT c.id, COALESCE(g.description, '') FROM group_path_cte c JOIN groups g ON g.id = c.id WHERE c.depth = cardinality($1::text[]) `, pgtype.FlatArray[string](groupPath), ).Scan(&groupID, &groupDesc) if errors.Is(err, pgx.ErrNoRows) { errorPage404(writer, params) return } else if err != nil { errorPage500(writer, params, "Error getting group: "+err.Error()) return } // ACL var count int err = database.QueryRow(request.Context(), ` SELECT COUNT(*) FROM user_group_roles WHERE user_id = $1 AND group_id = $2 `, params["user_id"].(int), groupID).Scan(&count) if err != nil { errorPage500(writer, params, "Error checking access: "+err.Error()) return } directAccess := (count > 0) if request.Method == http.MethodPost { if !directAccess { errorPage403(writer, params, "You do not have direct access to this group") return } repoName := request.FormValue("repo_name") repoDesc := request.FormValue("repo_desc") contribReq := request.FormValue("repo_contrib") if repoName == "" { errorPage400(writer, params, "Repo name is required") return } var newRepoID int err := database.QueryRow( request.Context(), `INSERT INTO repos (name, description, group_id, contrib_requirements) VALUES ($1, $2, $3, $4) RETURNING id`, repoName, repoDesc, groupID, contribReq, ).Scan(&newRepoID) if err != nil { errorPage500(writer, params, "Error creating repo: "+err.Error()) return } filePath := filepath.Join(config.Git.RepoDir, strconv.Itoa(newRepoID)+".git") _, err = database.Exec( request.Context(), `UPDATE repos SET filesystem_path = $1 WHERE id = $2`, filePath, newRepoID, ) if err != nil { errorPage500(writer, params, "Error updating repo path: "+err.Error()) return } if err = gitInit(filePath); err != nil { errorPage500(writer, params, "Error initializing repo: "+err.Error()) return } redirectUnconditionally(writer, request) return } // Repos var rows pgx.Rows rows, err = database.Query(request.Context(), ` SELECT name, COALESCE(description, '') FROM repos WHERE group_id = $1 `, groupID) if err != nil { errorPage500(writer, params, "Error getting repos: "+err.Error()) return } defer rows.Close() for rows.Next() { var name, description string if err = rows.Scan(&name, &description); err != nil { errorPage500(writer, params, "Error getting repos: "+err.Error()) return } repos = append(repos, nameDesc{name, description}) } if err = rows.Err(); err != nil { errorPage500(writer, params, "Error getting repos: "+err.Error()) return } // Subgroups rows, err = database.Query(request.Context(), ` SELECT name, COALESCE(description, '') FROM groups WHERE parent_group = $1 `, groupID) if err != nil { errorPage500(writer, params, "Error getting subgroups: "+err.Error()) return } defer rows.Close() for rows.Next() { var name, description string if err = rows.Scan(&name, &description); err != nil { errorPage500(writer, params, "Error getting subgroups: "+err.Error()) return } subgroups = append(subgroups, nameDesc{name, description}) } if err = rows.Err(); err != nil { errorPage500(writer, params, "Error getting subgroups: "+err.Error()) return } params["repos"] = repos params["subgroups"] = subgroups params["description"] = groupDesc params["direct_access"] = directAccess renderTemplate(writer, "group", params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" "runtime" "github.com/dustin/go-humanize" ) func httpHandleIndex(writer http.ResponseWriter, request *http.Request, params map[string]any) { var err error var groups []nameDesc groups, err = queryNameDesc(request.Context(), "SELECT name, COALESCE(description, '') FROM groups WHERE parent_group IS NULL") if err != nil { errorPage500(writer, params, "Error querying groups: "+err.Error()) return } params["groups"] = groups // Memory currently allocated memstats := runtime.MemStats{} //exhaustruct:ignore runtime.ReadMemStats(&memstats) params["mem"] = humanize.IBytes(memstats.Alloc) renderTemplate(writer, "index", params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "crypto/rand" "encoding/base64" "errors" "fmt" "net/http" "time" "github.com/alexedwards/argon2id" "github.com/jackc/pgx/v5" ) func httpHandleLogin(writer http.ResponseWriter, request *http.Request, params map[string]any) { var username, password string var userID int var passwordHash string var err error var passwordMatches bool var cookieValue string var now time.Time var expiry time.Time var cookie http.Cookie if request.Method != http.MethodPost { renderTemplate(writer, "login", params) return } username = request.PostFormValue("username") password = request.PostFormValue("password") err = database.QueryRow(request.Context(), "SELECT id, COALESCE(password, '') FROM users WHERE username = $1", username, ).Scan(&userID, &passwordHash) if err != nil { if errors.Is(err, pgx.ErrNoRows) { params["login_error"] = "Unknown username" renderTemplate(writer, "login", params) return } errorPage500(writer, params, "Error querying user information: "+err.Error()) return } if passwordHash == "" { params["login_error"] = "User has no password" renderTemplate(writer, "login", params) return } if passwordMatches, err = argon2id.ComparePasswordAndHash(password, passwordHash); err != nil { errorPage500(writer, params, "Error comparing password and hash: "+err.Error()) return } if !passwordMatches { params["login_error"] = "Invalid password" renderTemplate(writer, "login", params) return } if cookieValue, err = randomUrlsafeStr(16); err != nil { errorPage500(writer, params, "Error getting random string: "+err.Error()) return } now = time.Now() expiry = now.Add(time.Duration(config.HTTP.CookieExpiry) * time.Second) cookie = http.Cookie{ Name: "session", Value: cookieValue, SameSite: http.SameSiteLaxMode, HttpOnly: true, Secure: false, // TODO Expires: expiry, Path: "/", } //exhaustruct:ignore http.SetCookie(writer, &cookie) _, err = database.Exec(request.Context(), "INSERT INTO sessions (user_id, session_id) VALUES ($1, $2)", userID, cookieValue) if err != nil { errorPage500(writer, params, "Error inserting session: "+err.Error()) return } http.Redirect(writer, request, "/", http.StatusSeeOther) } // randomUrlsafeStr generates a random string of the given entropic size // using the URL-safe base64 encoding. The actual size of the string returned // will be 4*sz. func randomUrlsafeStr(sz int) (string, error) { r := make([]byte, 3*sz) _, err := rand.Read(r) if err != nil { return "", fmt.Errorf("error generating random string: %w", err) } return base64.RawURLEncoding.EncodeToString(r), nil }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "fmt" "net/http" "strings" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/filemode" "github.com/go-git/go-git/v5/plumbing/format/diff" "github.com/go-git/go-git/v5/plumbing/object" "go.lindenii.runxiyu.org/lindenii-common/misc" ) // The file patch type from go-git isn't really usable in HTML templates // either. type usableFilePatch struct { From diff.File To diff.File Chunks []usableChunk } type usableChunk struct { Operation diff.Operation Content string } func httpHandleRepoCommit(writer http.ResponseWriter, request *http.Request, params map[string]any) { var repo *git.Repository var commitIDStrSpec, commitIDStrSpecNoSuffix string var commitID plumbing.Hash var parentCommitHash plumbing.Hash var commitObj *object.Commit var commitIDStr string var err error var patch *object.Patch repo, commitIDStrSpec = params["repo"].(*git.Repository), params["commit_id"].(string) commitIDStrSpecNoSuffix = strings.TrimSuffix(commitIDStrSpec, ".patch") commitID = plumbing.NewHash(commitIDStrSpecNoSuffix) if commitObj, err = repo.CommitObject(commitID); err != nil { errorPage500(writer, params, "Error getting commit object: "+err.Error()) return } if commitIDStrSpecNoSuffix != commitIDStrSpec { var patchStr string if patchStr, err = fmtCommitPatch(commitObj); err != nil { errorPage500(writer, params, "Error formatting patch: "+err.Error()) return } fmt.Fprintln(writer, patchStr) return } commitIDStr = commitObj.Hash.String() if commitIDStr != commitIDStrSpec { http.Redirect(writer, request, commitIDStr, http.StatusSeeOther) return } params["commit_object"] = commitObj params["commit_id"] = commitIDStr parentCommitHash, patch, err = fmtCommitAsPatch(commitObj) if err != nil { errorPage500(writer, params, "Error getting patch from commit: "+err.Error()) return } params["parent_commitHash"] = parentCommitHash.String() params["patch"] = patch params["file_patches"] = makeUsableFilePatches(patch) renderTemplate(writer, "repo_commit", params) } type fakeDiffFile struct { hash plumbing.Hash mode filemode.FileMode path string } func (f fakeDiffFile) Hash() plumbing.Hash { return f.hash } func (f fakeDiffFile) Mode() filemode.FileMode { return f.mode } func (f fakeDiffFile) Path() string { return f.path } var nullFakeDiffFile = fakeDiffFile{ hash: plumbing.NewHash("0000000000000000000000000000000000000000"), mode: misc.FirstOrPanic(filemode.New("100644")), path: "", } func makeUsableFilePatches(patch diff.Patch) (usableFilePatches []usableFilePatch) { // TODO: Remove unnecessary context // TODO: Prepend "+"/"-"/" " instead of solely distinguishing based on color for _, filePatch := range patch.FilePatches() { var fromFile, toFile diff.File var ufp usableFilePatch chunks := []usableChunk{} fromFile, toFile = filePatch.Files() if fromFile == nil { fromFile = nullFakeDiffFile } if toFile == nil { toFile = nullFakeDiffFile } for _, chunk := range filePatch.Chunks() { var content string content = chunk.Content() if len(content) > 0 && content[0] == '\n' { content = "\n" + content } // Horrible hack to fix how browsers newlines that immediately proceed <pre> chunks = append(chunks, usableChunk{ Operation: chunk.Type(), Content: content, }) } ufp = usableFilePatch{ Chunks: chunks, From: fromFile, To: toFile, } usableFilePatches = append(usableFilePatches, ufp) } return }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" "github.com/jackc/pgx/v5" ) type idTitleStatus struct { ID int Title string Status string } func httpHandleRepoContribIndex(writer http.ResponseWriter, request *http.Request, params map[string]any) { var rows pgx.Rows var result []idTitleStatus var err error if rows, err = database.Query(request.Context(), "SELECT id, COALESCE(title, 'Untitled'), status FROM merge_requests WHERE repo_id = $1", params["repo_id"], ); err != nil { errorPage500(writer, params, "Error querying merge requests: "+err.Error()) return } defer rows.Close() for rows.Next() { var mrID int var mrTitle, mrStatus string if err = rows.Scan(&mrID, &mrTitle, &mrStatus); err != nil { errorPage500(writer, params, "Error scanning merge request: "+err.Error()) return } result = append(result, idTitleStatus{mrID, mrTitle, mrStatus}) } if err = rows.Err(); err != nil { errorPage500(writer, params, "Error ranging over merge requests: "+err.Error()) return } params["merge_requests"] = result renderTemplate(writer, "repo_contrib_index", params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" "strconv" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" ) func httpHandleRepoContribOne(writer http.ResponseWriter, request *http.Request, params map[string]any) { var mrIDStr string var mrIDInt int var err error var title, status, srcRefStr, dstBranchStr string var repo *git.Repository var srcRefHash plumbing.Hash var dstBranchHash plumbing.Hash var srcCommit, dstCommit, mergeBaseCommit *object.Commit var mergeBases []*object.Commit mrIDStr = params["mr_id"].(string) mrIDInt64, err := strconv.ParseInt(mrIDStr, 10, strconv.IntSize) if err != nil { errorPage400(writer, params, "Merge request ID not an integer") return } mrIDInt = int(mrIDInt64) if err = database.QueryRow(request.Context(), "SELECT COALESCE(title, ''), status, source_ref, COALESCE(destination_branch, '') FROM merge_requests WHERE id = $1", mrIDInt, ).Scan(&title, &status, &srcRefStr, &dstBranchStr); err != nil { errorPage500(writer, params, "Error querying merge request: "+err.Error()) return } repo = params["repo"].(*git.Repository) if srcRefHash, err = getRefHash(repo, "branch", srcRefStr); err != nil { errorPage500(writer, params, "Error getting source ref hash: "+err.Error()) return } if srcCommit, err = repo.CommitObject(srcRefHash); err != nil { errorPage500(writer, params, "Error getting source commit: "+err.Error()) return } params["source_commit"] = srcCommit if dstBranchStr == "" { dstBranchStr = "HEAD" dstBranchHash, err = getRefHash(repo, "", "") } else { dstBranchHash, err = getRefHash(repo, "branch", dstBranchStr) } if err != nil { errorPage500(writer, params, "Error getting destination branch hash: "+err.Error()) return } if dstCommit, err = repo.CommitObject(dstBranchHash); err != nil { errorPage500(writer, params, "Error getting destination commit: "+err.Error()) return } params["destination_commit"] = dstCommit if mergeBases, err = srcCommit.MergeBase(dstCommit); err != nil { errorPage500(writer, params, "Error getting merge base: "+err.Error()) return } if len(mergeBases) < 1 { errorPage500(writer, params, "No merge base found for this merge request; these two branches do not share any common history") // TODO return } mergeBaseCommit = mergeBases[0] params["merge_base"] = mergeBaseCommit patch, err := mergeBaseCommit.Patch(srcCommit) if err != nil { errorPage500(writer, params, "Error getting patch: "+err.Error()) return } params["file_patches"] = makeUsableFilePatches(patch) params["mr_title"], params["mr_status"], params["mr_source_ref"], params["mr_destination_branch"] = title, status, srcRefStr, dstBranchStr renderTemplate(writer, "repo_contrib_one", params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" "strings" "time" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/plumbing/storer" ) func httpHandleRepoIndex(writer http.ResponseWriter, _ *http.Request, params map[string]any) { var repo *git.Repository var repoName string var groupPath []string var refHash plumbing.Hash var refHashSlice []byte var err error var commitObj *object.Commit var tree *object.Tree var notes []string var branches []string var branchesIter storer.ReferenceIter var commits []commitDisplay repo, repoName, groupPath = params["repo"].(*git.Repository), params["repo_name"].(string), params["group_path"].([]string) if strings.Contains(repoName, "\n") || sliceContainsNewlines(groupPath) { notes = append(notes, "Path contains newlines; HTTP Git access impossible") } refHash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)) if err != nil { goto no_ref } refHashSlice = refHash[:] branchesIter, err = repo.Branches() if err == nil { _ = branchesIter.ForEach(func(branch *plumbing.Reference) error { branches = append(branches, branch.Name().Short()) return nil }) } params["branches"] = branches if value, found := indexCommitsDisplayCache.Get(refHashSlice); found { if value != nil { commits = value } else { goto readme } } else { start := time.Now() commits, err = getRecentCommitsDisplay(repo, refHash, 5) if err != nil { commits = nil } cost := time.Since(start).Nanoseconds() indexCommitsDisplayCache.Set(refHashSlice, commits, cost) if err != nil { goto readme } } params["commits"] = commits readme: if value, found := treeReadmeCache.Get(refHashSlice); found { params["files"] = value.DisplayTree params["readme_filename"] = value.ReadmeFilename params["readme"] = value.ReadmeRendered } else { start := time.Now() if commitObj, err = repo.CommitObject(refHash); err != nil { goto no_ref } if tree, err = commitObj.Tree(); err != nil { goto no_ref } displayTree := makeDisplayTree(tree) readmeFilename, readmeRendered := renderReadmeAtTree(tree) cost := time.Since(start).Nanoseconds() params["files"] = displayTree params["readme_filename"] = readmeFilename params["readme"] = readmeRendered entry := treeReadmeCacheEntry{ DisplayTree: displayTree, ReadmeFilename: readmeFilename, ReadmeRendered: readmeRendered, } treeReadmeCache.Set(refHashSlice, entry, cost) } no_ref: params["http_clone_url"] = genHTTPRemoteURL(groupPath, repoName) params["ssh_clone_url"] = genSSHRemoteURL(groupPath, repoName) params["notes"] = notes renderTemplate(writer, "repo_index", params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "fmt" "io" "net/http" "os/exec" "github.com/jackc/pgx/v5/pgtype" ) func httpHandleRepoInfo(writer http.ResponseWriter, request *http.Request, params map[string]any) (err error) { groupPath := params["group_path"].([]string) repoName := params["repo_name"].(string) var repoPath string if err := database.QueryRow(request.Context(), ` WITH RECURSIVE group_path_cte AS ( -- Start: match the first name in the path where parent_group IS NULL SELECT id, parent_group, name, 1 AS depth FROM groups WHERE name = ($1::text[])[1] AND parent_group IS NULL UNION ALL -- Recurse: jion next segment of the path SELECT g.id, g.parent_group, g.name, group_path_cte.depth + 1 FROM groups g JOIN group_path_cte ON g.parent_group = group_path_cte.id WHERE g.name = ($1::text[])[group_path_cte.depth + 1] AND group_path_cte.depth + 1 <= cardinality($1::text[]) ) SELECT r.filesystem_path FROM group_path_cte c JOIN repos r ON r.group_id = c.id WHERE c.depth = cardinality($1::text[]) AND r.name = $2 `, pgtype.FlatArray[string](groupPath), repoName, ).Scan(&repoPath); err != nil { return err } writer.Header().Set("Content-Type", "application/x-git-upload-pack-advertisement") writer.WriteHeader(http.StatusOK) cmd := exec.Command("git", "upload-pack", "--stateless-rpc", "--advertise-refs", repoPath) stdout, err := cmd.StdoutPipe() if err != nil { return err } defer func() { _ = stdout.Close() }() cmd.Stderr = cmd.Stdout if err = cmd.Start(); err != nil { return err } if err = packLine(writer, "# service=git-upload-pack\n"); err != nil { return err } if err = packFlush(writer); err != nil { return } if _, err = io.Copy(writer, stdout); err != nil { return err } if err = cmd.Wait(); err != nil { return err } return nil } // Taken from https://github.com/icyphox/legit, MIT license. func packLine(w io.Writer, s string) error { _, err := fmt.Fprintf(w, "%04x%s", len(s)+4, s) return err } // Taken from https://github.com/icyphox/legit, MIT license. func packFlush(w io.Writer) error { _, err := fmt.Fprint(w, "0000") return err }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" ) // TODO: I probably shouldn't include *all* commits here... func httpHandleRepoLog(writer http.ResponseWriter, _ *http.Request, params map[string]any) { var repo *git.Repository var refHash plumbing.Hash var err error repo = params["repo"].(*git.Repository) if refHash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)); err != nil { errorPage500(writer, params, "Error getting ref hash: "+err.Error()) return } logOptions := git.LogOptions{From: refHash} //exhaustruct:ignore commitIter, err := repo.Log(&logOptions) if err != nil { errorPage500(writer, params, "Error getting recent commits: "+err.Error()) return } params["commits"], params["commits_err"] = commitIterSeqErr(commitIter) renderTemplate(writer, "repo_log", params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "fmt" "net/http" "strings" "time" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" ) func httpHandleRepoRaw(writer http.ResponseWriter, request *http.Request, params map[string]any) { var rawPathSpec, pathSpec string var repo *git.Repository var refHash plumbing.Hash var refHashSlice []byte var commitObj *object.Commit var tree *object.Tree var err error rawPathSpec = params["rest"].(string) repo, pathSpec = params["repo"].(*git.Repository), strings.TrimSuffix(rawPathSpec, "/") params["path_spec"] = pathSpec if refHash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)); err != nil { errorPage500(writer, params, "Error getting ref hash: "+err.Error()) return } refHashSlice = refHash[:] cacheHandle := append(refHashSlice, stringToBytes(pathSpec)...) //nolint:gocritic if value, found := treeReadmeCache.Get(cacheHandle); found { params["files"] = value.DisplayTree renderTemplate(writer, "repo_raw_dir", params) return } if value, found := commitPathFileRawCache.Get(cacheHandle); found { fmt.Fprint(writer, value) return } if commitObj, err = repo.CommitObject(refHash); err != nil { errorPage500(writer, params, "Error getting commit object: "+err.Error()) return } if tree, err = commitObj.Tree(); err != nil { errorPage500(writer, params, "Error getting file tree: "+err.Error()) return } start := time.Now() var target *object.Tree if pathSpec == "" { target = tree } else { if target, err = tree.Tree(pathSpec); err != nil { var file *object.File var fileContent string if file, err = tree.File(pathSpec); err != nil { errorPage500(writer, params, "Error retrieving path: "+err.Error()) return } if redirectNoDir(writer, request) { return } if fileContent, err = file.Contents(); err != nil { errorPage500(writer, params, "Error reading file: "+err.Error()) return } cost := time.Since(start).Nanoseconds() commitPathFileRawCache.Set(cacheHandle, fileContent, cost) fmt.Fprint(writer, fileContent) return } } if redirectDir(writer, request) { return } displayTree := makeDisplayTree(target) readmeFilename, readmeRendered := renderReadmeAtTree(target) cost := time.Since(start).Nanoseconds() params["files"] = displayTree params["readme_filename"] = readmeFilename params["readme"] = readmeRendered treeReadmeCache.Set(cacheHandle, treeReadmeCacheEntry{ DisplayTree: displayTree, ReadmeFilename: readmeFilename, ReadmeRendered: readmeRendered, }, cost) renderTemplate(writer, "repo_raw_dir", params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "bytes" "html/template" "net/http" "path" "strings" "time" "github.com/alecthomas/chroma/v2" chromaHTML "github.com/alecthomas/chroma/v2/formatters/html" chromaLexers "github.com/alecthomas/chroma/v2/lexers" chromaStyles "github.com/alecthomas/chroma/v2/styles" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" ) func httpHandleRepoTree(writer http.ResponseWriter, request *http.Request, params map[string]any) { var rawPathSpec, pathSpec string var repo *git.Repository var refHash plumbing.Hash var refHashSlice []byte var commitObject *object.Commit var tree *object.Tree var err error rawPathSpec = params["rest"].(string) repo, pathSpec = params["repo"].(*git.Repository), strings.TrimSuffix(rawPathSpec, "/") params["path_spec"] = pathSpec if refHash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)); err != nil { errorPage500(writer, params, "Error getting ref hash: "+err.Error()) return } refHashSlice = refHash[:] cacheHandle := append(refHashSlice, stringToBytes(pathSpec)...) //nolint:gocritic if value, found := treeReadmeCache.Get(cacheHandle); found { params["files"] = value.DisplayTree params["readme_filename"] = value.ReadmeFilename params["readme"] = value.ReadmeRendered renderTemplate(writer, "repo_tree_dir", params) return } if value, found := commitPathFileHTMLCache.Get(cacheHandle); found { params["file_contents"] = value renderTemplate(writer, "repo_tree_file", params) return } start := time.Now() var target *object.Tree if pathSpec == "" { if commitObject, err = repo.CommitObject(refHash); err != nil { errorPage500(writer, params, "Error getting commit object: "+err.Error()) return } if tree, err = commitObject.Tree(); err != nil { errorPage500(writer, params, "Error getting file tree: "+err.Error()) return } displayTree := makeDisplayTree(tree) readmeFilename, readmeRendered := renderReadmeAtTree(tree) cost := time.Since(start).Nanoseconds() params["files"] = displayTree params["readme_filename"] = readmeFilename params["readme"] = readmeRendered entry := treeReadmeCacheEntry{ DisplayTree: displayTree, ReadmeFilename: readmeFilename, ReadmeRendered: readmeRendered, } treeReadmeCache.Set(cacheHandle, entry, cost) renderTemplate(writer, "repo_tree_dir", params) return } if commitObject, err = repo.CommitObject(refHash); err != nil { errorPage500(writer, params, "Error getting commit object: "+err.Error()) return } if tree, err = commitObject.Tree(); err != nil { errorPage500(writer, params, "Error getting file tree: "+err.Error()) return } if target, err = tree.Tree(pathSpec); err != nil { var file *object.File var fileContent string var lexer chroma.Lexer var iterator chroma.Iterator var style *chroma.Style var formatter *chromaHTML.Formatter var formattedHTML template.HTML if file, err = tree.File(pathSpec); err != nil { errorPage500(writer, params, "Error retrieving path: "+err.Error()) return } if redirectNoDir(writer, request) { return } if fileContent, err = file.Contents(); err != nil { errorPage500(writer, params, "Error reading file: "+err.Error()) return } lexer = chromaLexers.Match(pathSpec) if lexer == nil { lexer = chromaLexers.Fallback } if iterator, err = lexer.Tokenise(nil, fileContent); err != nil { errorPage500(writer, params, "Error tokenizing code: "+err.Error()) return } var formattedHTMLStr bytes.Buffer style = chromaStyles.Get("autumn") formatter = chromaHTML.New(chromaHTML.WithClasses(true), chromaHTML.TabWidth(8)) if err = formatter.Format(&formattedHTMLStr, style, iterator); err != nil { errorPage500(writer, params, "Error formatting code: "+err.Error()) return } formattedHTML = template.HTML(formattedHTMLStr.Bytes()) //#nosec G203 cost := time.Since(start).Nanoseconds() commitPathFileHTMLCache.Set(cacheHandle, formattedHTML, cost) params["file_contents"] = formattedHTML renderTemplate(writer, "repo_tree_file", params) return } if len(rawPathSpec) != 0 && rawPathSpec[len(rawPathSpec)-1] != '/' { http.Redirect(writer, request, path.Base(pathSpec)+"/", http.StatusSeeOther) return } displayTree := makeDisplayTree(target) readmeFilename, readmeRendered := renderReadmeAtTree(target) cost := time.Since(start).Nanoseconds() entry := treeReadmeCacheEntry{ DisplayTree: displayTree, ReadmeFilename: readmeFilename, ReadmeRendered: readmeRendered, } treeReadmeCache.Set(cacheHandle, entry, cost) params["readme_filename"], params["readme"] = readmeFilename, readmeRendered params["files"] = displayTree renderTemplate(writer, "repo_tree_dir", params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "io" "net/http" "os" "os/exec" "github.com/jackc/pgx/v5/pgtype" ) func httpHandleUploadPack(writer http.ResponseWriter, request *http.Request, params map[string]any) (err error) { var groupPath []string var repoName string var repoPath string var stdout io.ReadCloser var stdin io.WriteCloser var cmd *exec.Cmd groupPath, repoName = params["group_path"].([]string), params["repo_name"].(string) if err := database.QueryRow(request.Context(), ` WITH RECURSIVE group_path_cte AS ( -- Start: match the first name in the path where parent_group IS NULL SELECT id, parent_group, name, 1 AS depth FROM groups WHERE name = ($1::text[])[1] AND parent_group IS NULL UNION ALL -- Recurse: jion next segment of the path SELECT g.id, g.parent_group, g.name, group_path_cte.depth + 1 FROM groups g JOIN group_path_cte ON g.parent_group = group_path_cte.id WHERE g.name = ($1::text[])[group_path_cte.depth + 1] AND group_path_cte.depth + 1 <= cardinality($1::text[]) ) SELECT r.filesystem_path FROM group_path_cte c JOIN repos r ON r.group_id = c.id WHERE c.depth = cardinality($1::text[]) AND r.name = $2 `, pgtype.FlatArray[string](groupPath), repoName, ).Scan(&repoPath); err != nil { return err } writer.Header().Set("Content-Type", "application/x-git-upload-pack-result") writer.Header().Set("Connection", "Keep-Alive") writer.Header().Set("Transfer-Encoding", "chunked") writer.WriteHeader(http.StatusOK) cmd = exec.Command("git", "upload-pack", "--stateless-rpc", repoPath) cmd.Env = append(os.Environ(), "LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket) if stdout, err = cmd.StdoutPipe(); err != nil { return err } cmd.Stderr = cmd.Stdout defer func() { _ = stdout.Close() }() if stdin, err = cmd.StdinPipe(); err != nil { return err } defer func() { _ = stdin.Close() }() if err = cmd.Start(); err != nil { return err } if _, err = io.Copy(stdin, request.Body); err != nil { return err } if err = stdin.Close(); err != nil { return err } if _, err = io.Copy(writer, stdout); err != nil { return err } if err = cmd.Wait(); err != nil { return err } return nil }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" ) func httpHandleUsers(writer http.ResponseWriter, _ *http.Request, params map[string]any) { errorPage501(writer, params) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "errors" "net/http" "strconv" "strings" "github.com/jackc/pgx/v5" "go.lindenii.runxiyu.org/lindenii-common/clog" ) type forgeHTTPRouter struct{} func (router *forgeHTTPRouter) ServeHTTP(writer http.ResponseWriter, request *http.Request) { var remoteAddr string if config.HTTP.ReverseProxy { remoteAddrs, ok := request.Header["X-Forwarded-For"] if ok && len(remoteAddrs) == 1 { remoteAddr = remoteAddrs[0] } else { remoteAddr = request.RemoteAddr } } else { remoteAddr = request.RemoteAddr } clog.Info("Incoming HTTP: " + remoteAddr + " " + request.Method + " " + request.RequestURI) var segments []string var err error var sepIndex int params := make(map[string]any) if segments, _, err = parseReqURI(request.RequestURI); err != nil { errorPage400(writer, params, "Error parsing request URI: "+err.Error()) return } dirMode := false if segments[len(segments)-1] == "" { dirMode = true segments = segments[:len(segments)-1] } params["url_segments"] = segments params["dir_mode"] = dirMode params["global"] = globalData var userID int // 0 for none userID, params["username"], err = getUserFromRequest(request) params["user_id"] = userID if err != nil && !errors.Is(err, http.ErrNoCookie) && !errors.Is(err, pgx.ErrNoRows) { errorPage500(writer, params, "Error getting user info from request: "+err.Error()) return } if userID == 0 { params["user_id_string"] = "" } else { params["user_id_string"] = strconv.Itoa(userID) } if len(segments) == 0 { httpHandleIndex(writer, request, params) return } if segments[0] == ":" { if len(segments) < 2 { errorPage404(writer, params) return } else if len(segments) == 2 && redirectDir(writer, request) { return } switch segments[1] { case "man": manHandler.ServeHTTP(writer, request) return case "static": staticHandler.ServeHTTP(writer, request) return case "source": sourceHandler.ServeHTTP(writer, request) return } } if segments[0] == ":" { switch segments[1] { case "login": httpHandleLogin(writer, request, params) return case "users": httpHandleUsers(writer, request, params) return case "gc": httpHandleGC(writer, request, params) return default: errorPage404(writer, params) return } } sepIndex = -1 for i, part := range segments { if part == ":" { sepIndex = i break } } params["separator_index"] = sepIndex var groupPath []string var moduleType string var moduleName string if sepIndex > 0 { groupPath = segments[:sepIndex] } else { groupPath = segments } params["group_path"] = groupPath switch { case sepIndex == -1: if redirectDir(writer, request) { return } httpHandleGroupIndex(writer, request, params) case len(segments) == sepIndex+1: errorPage404(writer, params) return case len(segments) == sepIndex+2: errorPage404(writer, params) return default: moduleType = segments[sepIndex+1] moduleName = segments[sepIndex+2] switch moduleType { case "repos": params["repo_name"] = moduleName if len(segments) > sepIndex+3 { switch segments[sepIndex+3] { case "info": if err = httpHandleRepoInfo(writer, request, params); err != nil { errorPage500(writer, params, err.Error()) } return case "git-upload-pack": if err = httpHandleUploadPack(writer, request, params); err != nil { errorPage500(writer, params, err.Error()) } return } } if params["ref_type"], params["ref_name"], err = getParamRefTypeName(request); err != nil { if errors.Is(err, errNoRefSpec) { params["ref_type"] = "" } else { errorPage500(writer, params, "Error querying ref type: "+err.Error()) return } } // TODO: subgroups if params["repo"], params["repo_description"], params["repo_id"], err = openRepo(request.Context(), groupPath, moduleName); err != nil { errorPage500(writer, params, "Error opening repo: "+err.Error()) return } if len(segments) == sepIndex+3 { if redirectDir(writer, request) { return } httpHandleRepoIndex(writer, request, params) return } repoFeature := segments[sepIndex+3] switch repoFeature { case "tree": if anyContain(segments[sepIndex+4:], "/") { errorPage400(writer, params, "Repo tree paths may not contain slashes in any segments") return } if dirMode { params["rest"] = strings.Join(segments[sepIndex+4:], "/") + "/" } else { params["rest"] = strings.Join(segments[sepIndex+4:], "/") } if len(segments) < sepIndex+5 && redirectDir(writer, request) { return } httpHandleRepoTree(writer, request, params) case "branches": if redirectDir(writer, request) { return } httpHandleRepoBranches(writer, request, params) return case "raw": if anyContain(segments[sepIndex+4:], "/") { errorPage400(writer, params, "Repo tree paths may not contain slashes in any segments") return } if dirMode { params["rest"] = strings.Join(segments[sepIndex+4:], "/") + "/" } else { params["rest"] = strings.Join(segments[sepIndex+4:], "/") } if len(segments) < sepIndex+5 && redirectDir(writer, request) { return } httpHandleRepoRaw(writer, request, params) case "log": if len(segments) > sepIndex+4 { errorPage400(writer, params, "Too many parameters") return } if redirectDir(writer, request) { return } httpHandleRepoLog(writer, request, params) case "commit": if len(segments) != sepIndex+5 { errorPage400(writer, params, "Incorrect number of parameters") return } if redirectNoDir(writer, request) { return } params["commit_id"] = segments[sepIndex+4] httpHandleRepoCommit(writer, request, params) case "contrib": if redirectDir(writer, request) { return } switch len(segments) { case sepIndex + 4: httpHandleRepoContribIndex(writer, request, params) case sepIndex + 5: params["mr_id"] = segments[sepIndex+4] httpHandleRepoContribOne(writer, request, params) default: errorPage400(writer, params, "Too many parameters") } default: errorPage404(writer, params) return } default: errorPage404(writer, params) return } } }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/http" "go.lindenii.runxiyu.org/lindenii-common/clog" ) // renderTemplate abstracts out the annoyances of reporting template rendering // errors. func renderTemplate(w http.ResponseWriter, templateName string, params map[string]any) { if err := templates.ExecuteTemplate(w, templateName, params); err != nil { http.Error(w, "error rendering template: "+err.Error(), http.StatusInternalServerError) clog.Error(err.Error()) } }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/url" "path" "strings" ) func firstLine(s string) string { before, _, _ := strings.Cut(s, "\n") return before } func baseName(s string) string { return path.Base(s) } func pathEscape(s string) string { return url.PathEscape(s) } func queryEscape(s string) string { return url.QueryEscape(s) } func dereference[T any](p *T) T { return *p } func dereferenceOrZero[T any](p *T) T { if p != nil { return *p } var z T return z } func minus(a, b int) int { return a - b }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "errors" "flag" "net" "net/http" "syscall" "time" "go.lindenii.runxiyu.org/lindenii-common/clog" ) func main() { configPath := flag.String( "config", "/etc/lindenii/forge.scfg", "path to configuration file", ) flag.Parse() if err := loadConfig(*configPath); err != nil { clog.Fatal(1, "Loading configuration: "+err.Error()) } if err := deployHooks(); err != nil { clog.Fatal(1, "Deploying hooks to filesystem: "+err.Error()) } if err := loadTemplates(); err != nil { clog.Fatal(1, "Loading templates: "+err.Error()) } // UNIX socket listener for hooks var hooksListener net.Listener var err error hooksListener, err = net.Listen("unix", config.Hooks.Socket) if errors.Is(err, syscall.EADDRINUSE) { clog.Warn("Removing existing socket " + config.Hooks.Socket) if err = syscall.Unlink(config.Hooks.Socket); err != nil { clog.Fatal(1, "Removing existing socket: "+err.Error()) } if hooksListener, err = net.Listen("unix", config.Hooks.Socket); err != nil { clog.Fatal(1, "Listening hooks: "+err.Error()) } } else if err != nil { clog.Fatal(1, "Listening hooks: "+err.Error()) } clog.Info("Listening hooks on unix " + config.Hooks.Socket) go func() { if err = serveGitHooks(hooksListener); err != nil { clog.Fatal(1, "Serving hooks: "+err.Error()) } }() // SSH listener sshListener, err := net.Listen(config.SSH.Net, config.SSH.Addr) if errors.Is(err, syscall.EADDRINUSE) && config.SSH.Net == "unix" { clog.Warn("Removing existing socket " + config.SSH.Addr) if err = syscall.Unlink(config.SSH.Addr); err != nil { clog.Fatal(1, "Removing existing socket: "+err.Error()) } if sshListener, err = net.Listen(config.SSH.Net, config.SSH.Addr); err != nil { clog.Fatal(1, "Listening SSH: "+err.Error()) } } else if err != nil { clog.Fatal(1, "Listening SSH: "+err.Error()) } clog.Info("Listening SSH on " + config.SSH.Net + " " + config.SSH.Addr) go func() { if err = serveSSH(sshListener); err != nil { clog.Fatal(1, "Serving SSH: "+err.Error()) } }() // HTTP listener httpListener, err := net.Listen(config.HTTP.Net, config.HTTP.Addr) if errors.Is(err, syscall.EADDRINUSE) && config.HTTP.Net == "unix" { clog.Warn("Removing existing socket " + config.HTTP.Addr) if err = syscall.Unlink(config.HTTP.Addr); err != nil { clog.Fatal(1, "Removing existing socket: "+err.Error()) } if httpListener, err = net.Listen(config.HTTP.Net, config.HTTP.Addr); err != nil { clog.Fatal(1, "Listening HTTP: "+err.Error()) } } else if err != nil { clog.Fatal(1, "Listening HTTP: "+err.Error()) } server := http.Server{ Handler: &forgeHTTPRouter{}, ReadTimeout: time.Duration(config.HTTP.ReadTimeout) * time.Second, WriteTimeout: time.Duration(config.HTTP.ReadTimeout) * time.Second, IdleTimeout: time.Duration(config.HTTP.ReadTimeout) * time.Second, } //exhaustruct:ignore clog.Info("Listening HTTP on " + config.HTTP.Net + " " + config.HTTP.Addr) go func() { if err = server.Serve(httpListener); err != nil && !errors.Is(err, http.ErrServerClosed) { clog.Fatal(1, "Serving HTTP: "+err.Error()) } }() // IRC bot go ircBotLoop() select {} }
.\" SPDX-License-Identifier: AGPL-3.0-only
.\" SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
.\" SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
.Dd March 30, 2025 .Dt FORGE 5 .Os Lindenii Forge .Sh NAME .Nm forge.scfg .Nd configuration file for Lindenii Forge .Sh DESCRIPTION .Nm describes the configuration for .Xr forge 1 instance using the scfg format. .Pp Each directive consists of a name followed by zero or more parameters. Directives may also introduce blocks of subdirectives using braces. .Pp Comments begin with .Sq # and extend to the end of the line. .Sh DIRECTIVES .Bl -tag -width Ds .It Ic http Configures the ingress HTTP server. .Bl -tag -width Ds .It Ic net Network type to listen on (e.g., .Dq tcp , .Dq tcp4 , .Dq unix ) . .It Ic addr Address to listen on (e.g., .Dq :8080 or .Dq /var/run/lindenii/forge/http.sock ) . .It Ic cookie_expiry How long (in seconds) to keep session cookies. .It Ic root Canonical root URL of the web interface (e.g., .Dq https://forge.example.org ) . .It Ic read_timeout , write_timeout , idle_timeout Timeouts, in seconds, for the general HTTP server context. .It Ic reverse_proxy Boolean indicating whether to trust X-Forwarded-For headers. .El .It Ic ssh Configures the SSH server. .Bl -tag -width Ds .It Ic net Network type to listen on .Dq ( tcp is recommended). .It Ic addr Address to listen on (e.g., .Dq :22 ) . .It Ic key Path to the SSH host key (must be passwordless). .It Ic root Canonical SSH URL prefix (e.g., .Dq ssh://forge.example.org ) . .El .It Ic git Configures Git repository storage. .Bl -tag -width Ds .It Ic repo_dir Filesystem path under which new repositories are stored. .El .It Ic db Configures database connection. .Bl -tag -width Ds .It Ic type Database type (currently must be .Dq postgres ) . .It Ic conn Connection string, e.g., .Dq postgresql:///lindenii-forge?host=/var/run/postgresql . .El .It Ic general Miscellaneous settings. .Bl -tag -width Ds .It Ic title A user-facing name for the instance. .El .It Ic hooks Configures Git hook communication with the forge daemon. .Bl -tag -width Ds .It Ic socket Path to a UNIX domain socket for receiving hook events. .It Ic execs Directory where Git hook executables are stored. .El .It Ic irc Optional configuration for IRC presence. .Bl -tag -width Ds .It Ic tls Boolean indicating whether to use TLS. .It Ic net , addr Network type and address (e.g., .Dq tcp , .Dq irc.example.org:6697 ) . .It Ic sendq Maximum send queue size. .It Ic nick , user , gecos Identity fields for the IRC connection. .El .El .Sh FILES .Bl -tag -width Ds .It Pa /etc/lindenii/forge.scfg Default path to the configuration file. .El .Sh SEE ALSO .Xr forge 1 , .Xr hookc 1 , .Lk https://git.sr.ht/~emersion/scfg scfg .Sh AUTHORS .An Runxi Yu Aq Mt https://runxiyu.org .An Test_User Aq Mt hax@runxiyu.org
.\" SPDX-License-Identifier: AGPL-3.0-only
.\" SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
.\" SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
.Dd March 30, 2025 .Dt HOOKC 1 .Os Lindenii Forge .Sh NAME .Nm hookc .Nd helper binary to delegate Git hook behavior to the forge daemon .Sh SYNOPSIS .Nm .Op Ar argument ... .Sh DESCRIPTION .Nm is a helper binary for Git server-side hooks that relays the hook's context to a persistent daemon via a UNIX domain socket and communicates back any relevant responses. .Pp It is intended to be invoked by .Xr git-receive-pack 1 for hooks such as .Pa pre-receive , .Pa update , and .Pa post-receive . .Sh ENVIRONMENT .Bl -tag -width Ds .It Ev LINDENII_FORGE_HOOKS_SOCKET_PATH Absolute path to the UNIX domain socket on which the daemon is listening. .It Ev LINDENII_FORGE_HOOKS_COOKIE 64-character authentication cookie used to validate the hook client to the daemon. .El .Sh OPERATION .Nm collects the following information and sends it to the daemon: .Bl -bullet .It All command-line arguments .It All .Ev GIT_* environment variables .It The raw hook .Pa stdin (e.g., old/new ref triplets for .Pa pre-receive ) .El .Pp After sending this data, it waits for a one-byte status code from the daemon, which becomes .Nm Ns 's own exit status. .Pp If the daemon sends any output afterward, it is forwarded to standard error and will appear as .Dq remote: output to the user. .Sh BUGS .Bl -bullet .It The status byte from the daemon currently must be sent before any stderr output. .It Currently assumes .Pa stdin and .Pa stderr are pipes, which is not guaranteed in future versions of Git. .El .Sh AUTHORS .An Runxi Yu Aq Mt https://runxiyu.org .An Test_User Aq Mt hax@runxiyu.org .Sh SEE ALSO .Xr git-receive-pack 1 , .Xr forge 1
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "bytes" "html" "html/template" "strings" "github.com/go-git/go-git/v5/plumbing/object" "github.com/microcosm-cc/bluemonday" "github.com/niklasfasching/go-org/org" "github.com/yuin/goldmark" "github.com/yuin/goldmark/extension" ) var markdownConverter = goldmark.New(goldmark.WithExtensions(extension.GFM)) func renderReadmeAtTree(tree *object.Tree) (readmeFilename string, readmeRenderedSafeHTML template.HTML) { var readmeRenderedUnsafe bytes.Buffer var readmeFile *object.File var readmeFileContents string var err error if readmeFile, err = tree.File("README"); err == nil { if readmeFileContents, err = readmeFile.Contents(); err != nil { return "Error fetching README", escapeHTML("Unable to fetch contents of README: " + err.Error()) } return "README", template.HTML("<pre>" + html.EscapeString(readmeFileContents) + "</pre>") //#nosec G203 } if readmeFile, err = tree.File("README.md"); err == nil { if readmeFileContents, err = readmeFile.Contents(); err != nil { return "Error fetching README", escapeHTML("Unable to fetch contents of README: " + err.Error()) } if err = markdownConverter.Convert(stringToBytes(readmeFileContents), &readmeRenderedUnsafe); err != nil { return "Error fetching README", escapeHTML("Unable to render README: " + err.Error()) } return "README.md", template.HTML(bluemonday.UGCPolicy().SanitizeBytes(readmeRenderedUnsafe.Bytes())) //#nosec G203 } if readmeFile, err = tree.File("README.org"); err == nil { if readmeFileContents, err = readmeFile.Contents(); err != nil { return "Error fetching README", escapeHTML("Unable to fetch contents of README: " + err.Error()) } orgHTML, err := org.New().Parse(strings.NewReader(readmeFileContents), readmeFilename).Write(org.NewHTMLWriter()) if err != nil { return "Error fetching README", escapeHTML("Unable to render README: " + err.Error()) } return "README.org", template.HTML(bluemonday.UGCPolicy().Sanitize(orgHTML)) //#nosec G203 } return "", "" } func escapeHTML(s string) template.HTML { return template.HTML(html.EscapeString(s)) //#nosec G203 }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "net/url" "strings" ) // We don't use path.Join because it collapses multiple slashes into one. func genSSHRemoteURL(groupPath []string, repoName string) string { return strings.TrimSuffix(config.SSH.Root, "/") + "/" + segmentsToURL(groupPath) + "/:/repos/" + url.PathEscape(repoName) } func genHTTPRemoteURL(groupPath []string, repoName string) string { return strings.TrimSuffix(config.HTTP.Root, "/") + "/" + segmentsToURL(groupPath) + "/:/repos/" + url.PathEscape(repoName) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "embed" "html/template" "io/fs" "net/http" "github.com/tdewolff/minify/v2" "github.com/tdewolff/minify/v2/html" ) //go:embed LICENSE source.tar.gz var sourceFS embed.FS var sourceHandler = http.StripPrefix( "/:/source/", http.FileServer(http.FS(sourceFS)), ) //go:embed templates/* static/* hookc/hookc man/*.html man/*.txt man/*.css var resourcesFS embed.FS var templates *template.Template func loadTemplates() (err error) { minifier := minify.New() minifierOptions := html.Minifier{ TemplateDelims: [2]string{"{{", "}}"}, KeepDefaultAttrVals: true, } //exhaustruct:ignore minifier.Add("text/html", &minifierOptions) templates = template.New("templates").Funcs(template.FuncMap{ "first_line": firstLine, "base_name": baseName, "path_escape": pathEscape, "query_escape": queryEscape, "dereference_error": dereferenceOrZero[error], "minus": minus, }) err = fs.WalkDir(resourcesFS, "templates", func(path string, d fs.DirEntry, err error) error { if err != nil { return err } if !d.IsDir() { content, err := fs.ReadFile(resourcesFS, path) if err != nil { return err } minified, err := minifier.Bytes("text/html", content) if err != nil { return err } _, err = templates.Parse(bytesToString(minified)) if err != nil { return err } } return nil }) return err } var ( staticHandler http.Handler manHandler http.Handler ) func init() { staticFS, err := fs.Sub(resourcesFS, "static") if err != nil { panic(err) } staticHandler = http.StripPrefix("/:/static/", http.FileServer(http.FS(staticFS))) manFS, err := fs.Sub(resourcesFS, "man") if err != nil { panic(err) } manHandler = http.StripPrefix("/:/man/", http.FileServer(http.FS(manFS))) }
#!/bin/sh # SPDX-License-Identifier: AGPL-3.0-only
# SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
# SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
set -eux go get -t -u go mod tidy
-- SPDX-License-Identifier: AGPL-3.0-only
-- SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
-- SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
DROP SCHEMA public CASCADE; CREATE SCHEMA public;
-- SPDX-License-Identifier: AGPL-3.0-only
-- SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
-- SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
CREATE TABLE groups ( id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY, name TEXT NOT NULL, parent_group INTEGER REFERENCES groups(id) ON DELETE CASCADE, description TEXT, UNIQUE NULLS NOT DISTINCT (parent_group, name) ); CREATE TABLE repos ( id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY, group_id INTEGER NOT NULL REFERENCES groups(id) ON DELETE RESTRICT, -- I mean, should be CASCADE but deleting Git repos on disk also needs to be considered contrib_requirements TEXT NOT NULL CHECK (contrib_requirements IN ('closed', 'registered_user', 'federated', 'ssh_pubkey', 'public')), name TEXT NOT NULL, UNIQUE(group_id, name), description TEXT, filesystem_path TEXT ); CREATE TABLE ticket_trackers ( id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY, group_id INTEGER NOT NULL REFERENCES groups(id) ON DELETE RESTRICT, name TEXT NOT NULL, UNIQUE(group_id, name), description TEXT ); CREATE TABLE tickets ( id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY, tracker_id INTEGER NOT NULL REFERENCES ticket_trackers(id) ON DELETE CASCADE, title TEXT NOT NULL, description TEXT ); CREATE TABLE mailing_lists ( id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY, group_id INTEGER NOT NULL REFERENCES groups(id) ON DELETE RESTRICT, name TEXT NOT NULL, UNIQUE(group_id, name), description TEXT ); CREATE TABLE mailing_list_emails ( id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY, list_id INTEGER NOT NULL REFERENCES mailing_lists(id) ON DELETE CASCADE, title TEXT NOT NULL, sender TEXT NOT NULL, date TIMESTAMP NOT NULL, content BYTEA NOT NULL ); CREATE TABLE users ( id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY, username TEXT UNIQUE, type TEXT NOT NULL CHECK (type IN ('pubkey_only', 'federated', 'registered')), password TEXT ); CREATE TABLE ssh_public_keys ( id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY, key_string TEXT NOT NULL, user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, CONSTRAINT unique_key_string EXCLUDE USING HASH (key_string WITH =) ); CREATE TABLE sessions ( user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, session_id TEXT PRIMARY KEY NOT NULL, UNIQUE(user_id, session_id) ); -- TODO: CREATE TABLE merge_requests ( id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY, title TEXT, repo_id INTEGER NOT NULL REFERENCES repos(id) ON DELETE CASCADE, creator INTEGER REFERENCES users(id) ON DELETE SET NULL, source_ref TEXT NOT NULL, destination_branch TEXT, status TEXT NOT NULL CHECK (status IN ('open', 'merged', 'closed')), UNIQUE (repo_id, source_ref, destination_branch), UNIQUE (repo_id, id) ); CREATE TABLE user_group_roles ( group_id INTEGER NOT NULL REFERENCES groups(id) ON DELETE CASCADE, user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, PRIMARY KEY(user_id, group_id) ); CREATE TABLE federated_identities ( user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, service TEXT NOT NULL, remote_username TEXT NOT NULL, PRIMARY KEY(user_id, service) );
-- SPDX-License-Identifier: AGPL-3.0-only
-- SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
-- SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
WITH parent_group AS ( INSERT INTO groups (name, description) VALUES ('lindenii', 'The Lindenii Project') RETURNING id ), child_group AS ( INSERT INTO groups (name, description, parent_group) SELECT 'forge', 'Lindenii Forge', id FROM parent_group RETURNING id ), create_repos AS ( INSERT INTO repos (name, group_id, contrib_requirements, filesystem_path) SELECT 'server', id, 'public', '/home/runxiyu/Lindenii/forge/server/.git' FROM child_group ), new_user AS ( INSERT INTO users (username, type, password) VALUES ('test', 'registered', '$argon2id$v=19$m=4096,t=3,p=1$YWFhYWFhYWFhYWFh$i40k7TPFHqXRH4eQOAYGH3LvzwQ38jqqlfap9Rtiy3c') RETURNING id ), new_ssh AS ( INSERT INTO ssh_public_keys (key_string, user_id) SELECT 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAuavKDhEM1L6CufIecy2P712gp151CqZuwSYahTWvmq', id FROM new_user RETURNING user_id ) INSERT INTO user_group_roles (group_id, user_id) SELECT child_group.id, new_ssh.user_id FROM child_group, new_ssh; SELECT * FROM groups; SELECT * FROM repos; SELECT * FROM users; SELECT * FROM ssh_public_keys; SELECT * FROM user_group_roles;
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "errors" "fmt" "os" "os/exec" gliderSSH "github.com/gliderlabs/ssh" "github.com/go-git/go-git/v5" "go.lindenii.runxiyu.org/lindenii-common/cmap" ) type packPass struct { session gliderSSH.Session repo *git.Repository pubkey string directAccess bool repoPath string userID int userType string repoID int groupPath []string repoName string contribReq string } var packPasses = cmap.Map[string, packPass]{} // sshHandleRecvPack handles attempts to push to repos. func sshHandleRecvPack(session gliderSSH.Session, pubkey, repoIdentifier string) (err error) { groupPath, repoName, repoID, repoPath, directAccess, contribReq, userType, userID, err := getRepoInfo2(session.Context(), repoIdentifier, pubkey) if err != nil { return err } repo, err := git.PlainOpen(repoPath) if err != nil { return err } repoConf, err := repo.Config() if err != nil { return err } repoConfCore := repoConf.Raw.Section("core") if repoConfCore == nil { return errors.New("repository has no core section in config") } hooksPath := repoConfCore.OptionAll("hooksPath") if len(hooksPath) != 1 || hooksPath[0] != config.Hooks.Execs { return errors.New("repository has hooksPath set to an unexpected value") } if !directAccess { switch contribReq { case "closed": if !directAccess { return errors.New("you need direct access to push to this repo") } case "registered_user": if userType != "registered" { return errors.New("you need to be a registered user to push to this repo") } case "ssh_pubkey": fallthrough case "federated": if pubkey == "" { return errors.New("you need to have an SSH public key to push to this repo") } if userType == "" { userID, err = addUserSSH(session.Context(), pubkey) if err != nil { return err } fmt.Fprintln(session.Stderr(), "you are now registered as user ID", userID) userType = "pubkey_only" } case "public": default: panic("unknown contrib_requirements value " + contribReq) } } cookie, err := randomUrlsafeStr(16) if err != nil { fmt.Fprintln(session.Stderr(), "Error while generating cookie:", err) } packPasses.Store(cookie, packPass{ session: session, pubkey: pubkey, directAccess: directAccess, repoPath: repoPath, userID: userID, repoID: repoID, groupPath: groupPath, repoName: repoName, repo: repo, contribReq: contribReq, userType: userType, }) defer packPasses.Delete(cookie) // The Delete won't execute until proc.Wait returns unless something // horribly wrong such as a panic occurs. proc := exec.CommandContext(session.Context(), "git-receive-pack", repoPath) proc.Env = append(os.Environ(), "LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket, "LINDENII_FORGE_HOOKS_COOKIE="+cookie, ) proc.Stdin = session proc.Stdout = session proc.Stderr = session.Stderr() if err = proc.Start(); err != nil { fmt.Fprintln(session.Stderr(), "Error while starting process:", err) return err } err = proc.Wait() if err != nil { fmt.Fprintln(session.Stderr(), "Error while waiting for process:", err) } return err }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "fmt" "os" "os/exec" glider_ssh "github.com/gliderlabs/ssh" ) // sshHandleUploadPack handles clones/fetches. It just uses git-upload-pack // and has no ACL checks. func sshHandleUploadPack(session glider_ssh.Session, pubkey, repoIdentifier string) (err error) { var repoPath string if _, _, _, repoPath, _, _, _, _, err = getRepoInfo2(session.Context(), repoIdentifier, pubkey); err != nil { return err } proc := exec.CommandContext(session.Context(), "git-upload-pack", repoPath) proc.Env = append(os.Environ(), "LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket) proc.Stdin = session proc.Stdout = session proc.Stderr = session.Stderr() if err = proc.Start(); err != nil { fmt.Fprintln(session.Stderr(), "Error while starting process:", err) return err } err = proc.Wait() if err != nil { fmt.Fprintln(session.Stderr(), "Error while waiting for process:", err) } return err }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "fmt" "net" "os" "strings" gliderSSH "github.com/gliderlabs/ssh" "go.lindenii.runxiyu.org/lindenii-common/ansiec" "go.lindenii.runxiyu.org/lindenii-common/clog" goSSH "golang.org/x/crypto/ssh" ) var ( serverPubkeyString string serverPubkeyFP string serverPubkey goSSH.PublicKey ) func serveSSH(listener net.Listener) error { var hostKeyBytes []byte var hostKey goSSH.Signer var err error var server *gliderSSH.Server if hostKeyBytes, err = os.ReadFile(config.SSH.Key); err != nil { return err } if hostKey, err = goSSH.ParsePrivateKey(hostKeyBytes); err != nil { return err } serverPubkey = hostKey.PublicKey() serverPubkeyString = bytesToString(goSSH.MarshalAuthorizedKey(serverPubkey)) serverPubkeyFP = goSSH.FingerprintSHA256(serverPubkey) server = &gliderSSH.Server{ Handler: func(session gliderSSH.Session) { clientPubkey := session.PublicKey() var clientPubkeyStr string if clientPubkey != nil { clientPubkeyStr = strings.TrimSuffix(bytesToString(goSSH.MarshalAuthorizedKey(clientPubkey)), "\n") } clog.Info("Incoming SSH: " + session.RemoteAddr().String() + " " + clientPubkeyStr + " " + session.RawCommand()) fmt.Fprintln(session.Stderr(), ansiec.Blue+"Lindenii Forge "+VERSION+", source at "+strings.TrimSuffix(config.HTTP.Root, "/")+"/:/source/"+ansiec.Reset+"\r") cmd := session.Command() if len(cmd) < 2 { fmt.Fprintln(session.Stderr(), "Insufficient arguments\r") return } switch cmd[0] { case "git-upload-pack": if len(cmd) > 2 { fmt.Fprintln(session.Stderr(), "Too many arguments\r") return } err = sshHandleUploadPack(session, clientPubkeyStr, cmd[1]) case "git-receive-pack": if len(cmd) > 2 { fmt.Fprintln(session.Stderr(), "Too many arguments\r") return } err = sshHandleRecvPack(session, clientPubkeyStr, cmd[1]) default: fmt.Fprintln(session.Stderr(), "Unsupported command: "+cmd[0]+"\r") return } if err != nil { fmt.Fprintln(session.Stderr(), err.Error()) return } }, PublicKeyHandler: func(_ gliderSSH.Context, _ gliderSSH.PublicKey) bool { return true }, KeyboardInteractiveHandler: func(_ gliderSSH.Context, _ goSSH.KeyboardInteractiveChallenge) bool { return true }, // It is intentional that we do not check any credentials and accept all connections. // This allows all users to connect and clone repositories. However, the public key // is passed to handlers, so e.g. the push handler could check the key and reject the // push if it needs to. } //exhaustruct:ignore server.AddHostKey(hostKey) if err = server.Serve(listener); err != nil { clog.Fatal(1, "Serving SSH: "+err.Error()) } return nil }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "context" "errors" "fmt" "io" "net/url" "strings" "go.lindenii.runxiyu.org/lindenii-common/ansiec" ) var errIllegalSSHRepoPath = errors.New("illegal SSH repo path") func getRepoInfo2(ctx context.Context, sshPath, sshPubkey string) (groupPath []string, repoName string, repoID int, repoPath string, directAccess bool, contribReq, userType string, userID int, err error) { var segments []string var sepIndex int var moduleType, moduleName string segments = strings.Split(strings.TrimPrefix(sshPath, "/"), "/") for i, segment := range segments { var err error segments[i], err = url.PathUnescape(segment) if err != nil { return []string{}, "", 0, "", false, "", "", 0, err } } if segments[0] == ":" { return []string{}, "", 0, "", false, "", "", 0, errIllegalSSHRepoPath } sepIndex = -1 for i, part := range segments { if part == ":" { sepIndex = i break } } if segments[len(segments)-1] == "" { segments = segments[:len(segments)-1] } switch { case sepIndex == -1: return []string{}, "", 0, "", false, "", "", 0, errIllegalSSHRepoPath case len(segments) <= sepIndex+2: return []string{}, "", 0, "", false, "", "", 0, errIllegalSSHRepoPath } groupPath = segments[:sepIndex] moduleType = segments[sepIndex+1] moduleName = segments[sepIndex+2] repoName = moduleName switch moduleType { case "repos": _1, _2, _3, _4, _5, _6, _7 := getRepoInfo(ctx, groupPath, moduleName, sshPubkey) return groupPath, repoName, _1, _2, _3, _4, _5, _6, _7 default: return []string{}, "", 0, "", false, "", "", 0, errIllegalSSHRepoPath } } func writeRedError(w io.Writer, format string, args ...any) { fmt.Fprintln(w, ansiec.Red+fmt.Sprintf(format, args...)+ansiec.Reset) }
/* * SPDX-License-Identifier: MIT AND BSD-2-Clause
* SPDX-FileContributor: Pygments and Chroma authors
* SPDX-FileCopyrightText: Copyright (c) 2018-2025 Pygments and Chroma authors
*/ @media (prefers-color-scheme: light) { /* Background */ .bg { ; } /* PreWrapper */ .chroma { ; } /* Error */ .chroma .err { } /* LineLink */ .chroma .lnlinks { outline: none; text-decoration: none; color: inherit } /* LineTableTD */ .chroma .lntd { vertical-align: top; padding: 0; margin: 0; border: 0; } /* LineTable */ .chroma .lntable { border-spacing: 0; padding: 0; margin: 0; border: 0; } /* LineHighlight */ .chroma .hl { background-color: #e5e5e5 } /* LineNumbersTable */ .chroma .lnt { white-space: pre; -webkit-user-select: none; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;color: #7f7f7f } /* LineNumbers */ .chroma .ln { white-space: pre; -webkit-user-select: none; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;color: #7f7f7f } /* Line */ .chroma .line { display: flex; } /* Keyword */ .chroma .k { color: #008000; font-weight: bold } /* KeywordConstant */ .chroma .kc { color: #008000; font-weight: bold } /* KeywordDeclaration */ .chroma .kd { color: #008000; font-weight: bold } /* KeywordNamespace */ .chroma .kn { color: #008000; font-weight: bold } /* KeywordPseudo */ .chroma .kp { color: #008000 } /* KeywordReserved */ .chroma .kr { color: #008000; font-weight: bold } /* KeywordType */ .chroma .kt { color: #b00040 } /* NameAttribute */ .chroma .na { color: #7d9029 } /* NameBuiltin */ .chroma .nb { color: #008000 } /* NameClass */ .chroma .nc { color: #0000ff; font-weight: bold } /* NameConstant */ .chroma .no { color: #880000 } /* NameDecorator */ .chroma .nd { color: #aa22ff } /* NameEntity */ .chroma .ni { color: #999999; font-weight: bold } /* NameException */ .chroma .ne { color: #d2413a; font-weight: bold } /* NameFunction */ .chroma .nf { color: #0000ff } /* NameLabel */ .chroma .nl { color: #a0a000 } /* NameNamespace */ .chroma .nn { color: #0000ff; font-weight: bold } /* NameTag */ .chroma .nt { color: #008000; font-weight: bold } /* NameVariable */ .chroma .nv { color: #19177c } /* LiteralString */ .chroma .s { color: #ba2121 } /* LiteralStringAffix */ .chroma .sa { color: #ba2121 } /* LiteralStringBacktick */ .chroma .sb { color: #ba2121 } /* LiteralStringChar */ .chroma .sc { color: #ba2121 } /* LiteralStringDelimiter */ .chroma .dl { color: #ba2121 } /* LiteralStringDoc */ .chroma .sd { color: #ba2121; font-style: italic } /* LiteralStringDouble */ .chroma .s2 { color: #ba2121 } /* LiteralStringEscape */ .chroma .se { color: #bb6622; font-weight: bold } /* LiteralStringHeredoc */ .chroma .sh { color: #ba2121 } /* LiteralStringInterpol */ .chroma .si { color: #bb6688; font-weight: bold } /* LiteralStringOther */ .chroma .sx { color: #008000 } /* LiteralStringRegex */ .chroma .sr { color: #bb6688 } /* LiteralStringSingle */ .chroma .s1 { color: #ba2121 } /* LiteralStringSymbol */ .chroma .ss { color: #19177c } /* LiteralNumber */ .chroma .m { color: #666666 } /* LiteralNumberBin */ .chroma .mb { color: #666666 } /* LiteralNumberFloat */ .chroma .mf { color: #666666 } /* LiteralNumberHex */ .chroma .mh { color: #666666 } /* LiteralNumberInteger */ .chroma .mi { color: #666666 } /* LiteralNumberIntegerLong */ .chroma .il { color: #666666 } /* LiteralNumberOct */ .chroma .mo { color: #666666 } /* Operator */ .chroma .o { color: #666666 } /* OperatorWord */ .chroma .ow { color: #aa22ff; font-weight: bold } /* Comment */ .chroma .c { color: #408080; font-style: italic } /* CommentHashbang */ .chroma .ch { color: #408080; font-style: italic } /* CommentMultiline */ .chroma .cm { color: #408080; font-style: italic } /* CommentSingle */ .chroma .c1 { color: #408080; font-style: italic } /* CommentSpecial */ .chroma .cs { color: #408080; font-style: italic } /* CommentPreproc */ .chroma .cp { color: #bc7a00 } /* CommentPreprocFile */ .chroma .cpf { color: #bc7a00 } /* GenericDeleted */ .chroma .gd { color: #a00000 } /* GenericEmph */ .chroma .ge { font-style: italic } /* GenericError */ .chroma .gr { color: #ff0000 } /* GenericHeading */ .chroma .gh { color: #000080; font-weight: bold } /* GenericInserted */ .chroma .gi { color: #00a000 } /* GenericOutput */ .chroma .go { color: #888888 } /* GenericPrompt */ .chroma .gp { color: #000080; font-weight: bold } /* GenericStrong */ .chroma .gs { font-weight: bold } /* GenericSubheading */ .chroma .gu { color: #800080; font-weight: bold } /* GenericTraceback */ .chroma .gt { color: #0044dd } /* GenericUnderline */ .chroma .gl { text-decoration: underline } /* TextWhitespace */ .chroma .w { color: #bbbbbb } } @media (prefers-color-scheme: dark) { /* Background */ .bg { color: #e6edf3; background-color: #000000; } /* PreWrapper */ .chroma { color: #e6edf3; background-color: #000000; } /* Error */ .chroma .err { color: #f85149 } /* LineLink */ .chroma .lnlinks { outline: none; text-decoration: none; color: inherit } /* LineTableTD */ .chroma .lntd { vertical-align: top; padding: 0; margin: 0; border: 0; } /* LineTable */ .chroma .lntable { border-spacing: 0; padding: 0; margin: 0; border: 0; } /* LineHighlight */ .chroma .hl { background-color: #6e7681 } /* LineNumbersTable */ .chroma .lnt { white-space: pre; -webkit-user-select: none; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;color: #737679 } /* LineNumbers */ .chroma .ln { white-space: pre; -webkit-user-select: none; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;color: #6e7681 } /* Line */ .chroma .line { display: flex; } /* Keyword */ .chroma .k { color: #ff7b72 } /* KeywordConstant */ .chroma .kc { color: #79c0ff } /* KeywordDeclaration */ .chroma .kd { color: #ff7b72 } /* KeywordNamespace */ .chroma .kn { color: #ff7b72 } /* KeywordPseudo */ .chroma .kp { color: #79c0ff } /* KeywordReserved */ .chroma .kr { color: #ff7b72 } /* KeywordType */ .chroma .kt { color: #ff7b72 } /* NameClass */ .chroma .nc { color: #f0883e; font-weight: bold } /* NameConstant */ .chroma .no { color: #79c0ff; font-weight: bold } /* NameDecorator */ .chroma .nd { color: #d2a8ff; font-weight: bold } /* NameEntity */ .chroma .ni { color: #ffa657 } /* NameException */ .chroma .ne { color: #f0883e; font-weight: bold } /* NameFunction */ .chroma .nf { color: #d2a8ff; font-weight: bold } /* NameLabel */ .chroma .nl { color: #79c0ff; font-weight: bold } /* NameNamespace */ .chroma .nn { color: #ff7b72 } /* NameProperty */ .chroma .py { color: #79c0ff } /* NameTag */ .chroma .nt { color: #7ee787 } /* NameVariable */ .chroma .nv { color: #79c0ff } /* Literal */ .chroma .l { color: #a5d6ff } /* LiteralDate */ .chroma .ld { color: #79c0ff } /* LiteralString */ .chroma .s { color: #a5d6ff } /* LiteralStringAffix */ .chroma .sa { color: #79c0ff } /* LiteralStringBacktick */ .chroma .sb { color: #a5d6ff } /* LiteralStringChar */ .chroma .sc { color: #a5d6ff } /* LiteralStringDelimiter */ .chroma .dl { color: #79c0ff } /* LiteralStringDoc */ .chroma .sd { color: #a5d6ff } /* LiteralStringDouble */ .chroma .s2 { color: #a5d6ff } /* LiteralStringEscape */ .chroma .se { color: #79c0ff } /* LiteralStringHeredoc */ .chroma .sh { color: #79c0ff } /* LiteralStringInterpol */ .chroma .si { color: #a5d6ff } /* LiteralStringOther */ .chroma .sx { color: #a5d6ff } /* LiteralStringRegex */ .chroma .sr { color: #79c0ff } /* LiteralStringSingle */ .chroma .s1 { color: #a5d6ff } /* LiteralStringSymbol */ .chroma .ss { color: #a5d6ff } /* LiteralNumber */ .chroma .m { color: #a5d6ff } /* LiteralNumberBin */ .chroma .mb { color: #a5d6ff } /* LiteralNumberFloat */ .chroma .mf { color: #a5d6ff } /* LiteralNumberHex */ .chroma .mh { color: #a5d6ff } /* LiteralNumberInteger */ .chroma .mi { color: #a5d6ff } /* LiteralNumberIntegerLong */ .chroma .il { color: #a5d6ff } /* LiteralNumberOct */ .chroma .mo { color: #a5d6ff } /* Operator */ .chroma .o { color: #ff7b72; font-weight: bold } /* OperatorWord */ .chroma .ow { color: #ff7b72; font-weight: bold } /* Comment */ .chroma .c { color: #8b949e; font-style: italic } /* CommentHashbang */ .chroma .ch { color: #8b949e; font-style: italic } /* CommentMultiline */ .chroma .cm { color: #8b949e; font-style: italic } /* CommentSingle */ .chroma .c1 { color: #8b949e; font-style: italic } /* CommentSpecial */ .chroma .cs { color: #8b949e; font-weight: bold; font-style: italic } /* CommentPreproc */ .chroma .cp { color: #8b949e; font-weight: bold; font-style: italic } /* CommentPreprocFile */ .chroma .cpf { color: #8b949e; font-weight: bold; font-style: italic } /* GenericDeleted */ .chroma .gd { color: #ffa198; background-color: #490202 } /* GenericEmph */ .chroma .ge { font-style: italic } /* GenericError */ .chroma .gr { color: #ffa198 } /* GenericHeading */ .chroma .gh { color: #79c0ff; font-weight: bold } /* GenericInserted */ .chroma .gi { color: #56d364; background-color: #0f5323 } /* GenericOutput */ .chroma .go { color: #8b949e } /* GenericPrompt */ .chroma .gp { color: #8b949e } /* GenericStrong */ .chroma .gs { font-weight: bold } /* GenericSubheading */ .chroma .gu { color: #79c0ff } /* GenericTraceback */ .chroma .gt { color: #ff7b72 } /* GenericUnderline */ .chroma .gl { text-decoration: underline } /* TextWhitespace */ .chroma .w { color: #6e7681 } }
/* * SPDX-License-Identifier: AGPL-3.0-only
* SPDX-FileContributor: Runxi Yu <https://runxiyu.org> * SPDX-FileContributor: luk3yx <https://luk3yx.github.io> * SPDX-FileContributor: Drew DeVault <https://drewdevault.com>
* SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org> * SPDX-FileCopyrightText: Copyright (c) 2025 luk3yx <https://luk3yx.github.io> * SPDX-FileCopyrightText: Copyright (c) 2017-2025 Drew DeVault <https://drewdevault.com>
* * Drew did not directly contribute here but we took significant portions of * SourceHut's CSS. */ * { box-sizing: border-box; } /* Base styles and variables */ html { font-family: sans-serif; background-color: var(--background-color); color: var(--text-color); font-size: 1rem; --background-color: hsl(0, 0%, 100%); --text-color: hsl(0, 0%, 0%); --link-color: hsl(320, 50%, 36%); --light-text-color: hsl(0, 0%, 45%); --darker-border-color: hsl(0, 0%, 72%); --lighter-border-color: hsl(0, 0%, 85%); --text-decoration-color: hsl(0, 0%, 72%); --darker-box-background-color: hsl(0, 0%, 92%); --lighter-box-background-color: hsl(0, 0%, 95%); --primary-color: hsl(320, 50%, 36%); --primary-color-contrast: hsl(320, 0%, 100%); --danger-color: #ff0000; --danger-color-contrast: #ffffff; } /* Dark mode overrides */ @media (prefers-color-scheme: dark) { html { --background-color: hsl(0, 0%, 0%); --text-color: hsl(0, 0%, 100%); --link-color: hsl(320, 50%, 76%); --light-text-color: hsl(0, 0%, 78%); --darker-border-color: hsl(0, 0%, 35%); --lighter-border-color: hsl(0, 0%, 25%); --text-decoration-color: hsl(0, 0%, 30%); --darker-box-background-color: hsl(0, 0%, 20%); --lighter-box-background-color: hsl(0, 0%, 15%); } } /* Global layout */ body { margin: 0; } html, code, pre { font-size: 0.96rem; /* TODO: Not always correct */ } /* Toggle table controls */ .toggle-table-off, .toggle-table-on { opacity: 0; position: absolute; } .toggle-table-off:focus-visible + table > thead > tr > th > label, .toggle-table-on:focus-visible + table > thead > tr > th > label { outline: 1.5px var(--primary-color) solid; } .toggle-table-off + table > thead > tr > th, .toggle-table-on + table > thead > tr > th { padding: 0; } .toggle-table-off + table > thead > tr > th > label, .toggle-table-on + table > thead > tr > th > label { width: 100%; display: inline-block; padding: 3px 0; cursor: pointer; } .toggle-table-off:checked + table > tbody { display: none; } .toggle-table-on + table > tbody { display: none; } .toggle-table-on:checked + table > tbody { display: table-row-group; } /* Footer styles */ footer { margin-top: 1rem; margin-left: auto; margin-right: auto; display: block; padding: 0 5px; width: fit-content; text-align: center; color: var(--light-text-color); } footer a:link, footer a:visited { color: inherit; } .padding { padding: 0 1rem; } /* Link styles */ a:link, a:visited { text-decoration-color: var(--text-decoration-color); color: var(--link-color); } /* Readme inline code styling */ #readme code:not(pre > code) { background-color: var(--lighter-box-background-color); border-radius: 2px; padding: 2px; } /* Readme word breaks to avoid overfull hboxes */ #readme { word-break: break-word; } /* Table styles */ table { border: var(--lighter-border-color) solid 1px; border-spacing: 0px; border-collapse: collapse; } table.wide { width: 100%; } td, th { padding: 3px 5px; border: var(--lighter-border-color) solid 1px; } .pad { padding: 3px 5px; } th, thead, tfoot { background-color: var(--lighter-box-background-color); } th[scope=row] { text-align: left; } th { font-weight: normal; } tr.title-row > th, th.title-row, .title-row { background-color: var(--lighter-box-background-color); font-weight: bold; } td > pre { margin: 0; } #readme > *:last-child { margin-bottom: 0; } #readme > *:first-child { margin-top: 0; } /* Table misc and scrolling */ .commit-id { font-family: monospace; word-break: break-word; } .scroll { overflow-x: auto; } /* Diff/chunk styles */ .chunk-unchanged { color: grey; } .chunk-addition { color: green; } @media (prefers-color-scheme: dark) { .chunk-addition { color: lime; } } .chunk-deletion { color: red; } .chunk-unknown { color: yellow; } pre.chunk { margin-top: 0; margin-bottom: 0; } .centering { text-align: center; } /* Toggle content sections */ .toggle-off-wrapper, .toggle-on-wrapper { border: var(--lighter-border-color) solid 1px; } .toggle-off-toggle, .toggle-on-toggle { opacity: 0; position: absolute; } .toggle-off-header, .toggle-on-header { font-weight: bold; cursor: pointer; display: block; width: 100%; background-color: var(--lighter-box-background-color); } .toggle-off-header > div, .toggle-on-header > div { padding: 3px 5px; display: block; } .toggle-on-content { display: none; } .toggle-on-toggle:focus-visible + .toggle-on-header, .toggle-off-toggle:focus-visible + .toggle-off-header { outline: 1.5px var(--primary-color) solid; } .toggle-on-toggle:checked + .toggle-on-header + .toggle-on-content { display: block; } .toggle-off-content { display: block; } .toggle-off-toggle:checked + .toggle-off-header + .toggle-off-content { display: none; } *:focus-visible { outline: 1.5px var(--primary-color) solid; } /* File display styles */ .file-patch + .file-patch { margin-top: 0.5rem; } .file-content { padding: 3px 5px; } .file-header { font-family: monospace; display: flex; flex-direction: row; align-items: center; } .file-header::after { content: "\25b6"; font-family: sans-serif; margin-left: auto; line-height: 100%; margin-right: 0.25em; } .file-toggle:checked + .file-header::after { content: "\25bc"; } /* Form elements */ textarea { box-sizing: border-box; background-color: var(--lighter-box-background-color); resize: vertical; } textarea, input[type=text], input[type=password] { font-family: sans-serif; background-color: var(--lighter-box-background-color); color: var(--text-color); border: none; padding: 0.3rem; width: 100%; box-sizing: border-box; } td.tdinput, th.tdinput { padding: 0; position: relative; } td.tdinput textarea, td.tdinput input[type=text], td.tdinput input[type=password], th.tdinput textarea, th.tdinput input[type=text], th.tdinput input[type=password] { background-color: transparent; } td.tdinput select { position: absolute; background-color: var(--background-color); border: none; /* width: 100%; height: 100%; */ box-sizing: border-box; top: 0; left: 0; right: 0; bottom: 0; } select:active { outline: 1.5px var(--primary-color) solid; } /* Button styles */ .btn-primary, a.btn-primary { background: var(--primary-color); color: var(--primary-color-contrast); border: var(--lighter-border-color) 1px solid; font-weight: bold; } .btn-danger, a.btn-danger { background: var(--danger-color); color: var(--danger-color-contrast); border: var(--lighter-border-color) 1px solid; font-weight: bold; } .btn-white, a.btn-white { background: var(--primary-color-contrast); color: var(--primary-color); border: var(--lighter-border-color) 1px solid; } .btn-normal, a.btn-normal, input[type=file]::file-selector-button { background: var(--lighter-box-background-color); border: var(--lighter-border-color) 1px solid !important; color: var(--text-color); } .btn, .btn-white, .btn-danger, .btn-normal, .btn-primary, input[type=submit], input[type=file]::file-selector-button { display: inline-block; width: auto; min-width: fit-content; padding: .1rem .75rem; transition: background .1s linear; cursor: pointer; } a.btn, a.btn-white, a.btn-danger, a.btn-normal, a.btn-primary { text-decoration: none; } /* Header layout */ header#main-header { /* background-color: var(--lighter-box-background-color); */ display: flex; flex-direction: row; align-items: center; justify-content: space-between; flex-wrap: wrap; padding-top: 1rem; padding-bottom: 1rem; gap: 0.5rem; } #main-header a, #main-header a:link, main-header a:visited { text-decoration: none; color: inherit; } #main-header-forge-title { white-space: nowrap; } #breadcrumb-nav { display: flex; align-items: center; flex: 1 1 auto; min-width: 0; overflow-x: auto; gap: 0.25rem; white-space: nowrap; } .breadcrumb-separator { margin: 0 0.25rem; } #main-header-user { display: flex; align-items: center; white-space: nowrap; } @media (max-width: 37.5rem) { header#main-header { flex-direction: column; align-items: flex-start; } #breadcrumb-nav { width: 100%; overflow-x: auto; } } /* Uncategorized */ table + table { margin-top: 1rem; } td > ul { padding-left: 1.5rem; margin-top: 0; margin-bottom: 0; } .complete-error-page { font-family: 'Comic Sans MS', 'Chalkboard SE', 'Comic Neue', sans-serif; } .complete-error-page hr { border: 0; border-bottom: 1px dashed; } .key-val-grid { display: grid; grid-template-columns: auto 1fr; gap: 0; border: var(--lighter-border-color) 1px solid; overflow: auto; } .key-val-grid > .title-row { grid-column: 1 / -1; background-color: var(--lighter-box-background-color); font-weight: bold; padding: 3px 5px; border-bottom: var(--lighter-border-color) 1px solid; } .key-val-grid > .row-label { background-color: var(--lighter-box-background-color); padding: 3px 5px; border-bottom: var(--lighter-border-color) 1px solid; border-right: var(--lighter-border-color) 1px solid; text-align: left; font-weight: normal; } .key-val-grid > .row-value { padding: 3px 5px; border-bottom: var(--lighter-border-color) 1px solid; word-break: break-word; } .key-val-grid code { font-family: monospace; } .key-val-grid ul { margin: 0; padding-left: 1.5rem; } .key-val-grid > .row-label:nth-last-of-type(2), .key-val-grid > .row-value:last-of-type { border-bottom: none; } @media (max-width: 37.5rem) { .key-val-grid { grid-template-columns: 1fr; } .key-val-grid > .row-label { border-right: none; } } .key-val-grid > .title-row { grid-column: 1 / -1; background-color: var(--lighter-box-background-color); font-weight: bold; padding: 3px 5px; border-bottom: var(--lighter-border-color) 1px solid; margin: 0; text-align: center; } .key-val-grid-wrapper { max-width: 100%; width: fit-content; } /* Tab navigation */ .nav-tabs-standalone { border: none; list-style: none; margin: 0; flex-grow: 1; display: inline-flex; flex-wrap: nowrap; padding: 0; border-bottom: 0.25rem var(--darker-box-background-color) solid; width: 100%; max-width: 100%; min-width: 100%; } .nav-tabs-standalone > li { align-self: flex-end; } .nav-tabs-standalone > li > a { padding: 0 1rem; } .nav-item a.active { background-color: var(--darker-box-background-color); } .nav-item a, .nav-item a:link, .nav-item a:visited { text-decoration: none; color: inherit; } .repo-header-extension { margin-bottom: 1rem; background-color: var(--darker-box-background-color); } .repo-header > h2 { display: inline; margin: 0; padding-right: 1rem; } .repo-header > .nav-tabs-standalone { border: none; margin: 0; flex-grow: 1; display: inline-flex; flex-wrap: nowrap; padding: 0; } .repo-header { display: flex; flex-wrap: nowrap; } .repo-header-extension-content { padding-top: 0.3rem; padding-bottom: 0.2rem; } .repo-header, .padding-wrapper, .repo-header-extension-content, #main-header, .readingwidth { padding-left: 1rem; padding-right: 1rem; max-width: 60rem; width: 100%; margin-left: auto; margin-right: auto; } .padding-wrapper { margin-bottom: 1rem; }
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "400" -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>400 Bad Request – {{ .global.forge_title }}</title> </head> <body class="400"> {{- template "header" . -}} <div class="padding-wrapper complete-error-page"> <h1>400 Bad Request</h1> <p>{{- .complete_error_msg -}}</p> <hr /> <address>Lindenii Forge</address> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "403" -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>403 Forbidden – {{ .global.forge_title }}</title> </head> <body class="403"> {{- template "header" . -}} <div class="padding-wrapper complete-error-page"> <h1>403 Forbidden</h1> <p>{{- .complete_error_msg -}}</p> <hr /> <address>Lindenii Forge</address> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "404" -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>404 Not Found – {{ .global.forge_title }}</title> </head> <body class="404"> {{- template "header" . -}} <div class="padding-wrapper complete-error-page"> <h1>404 Not Found</h1> <hr /> <address>Lindenii Forge</address> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "451" -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>451 Unavailable For Legal Reasons – {{ .global.forge_title }}</title> </head> <body class="451"> {{- template "header" . -}} <div class="padding-wrapper complete-error-page"> <h1>451 Unavailable For Legal Reasons</h1> <p>{{- .complete_error_msg -}}</p> <hr /> <address>Lindenii Forge</address> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "500" -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>500 Internal Server Error – {{ .global.forge_title }}</title> </head> <body class="500"> {{- template "header" . -}} <div class="padding-wrapper complete-error-page"> <h1>500 Internal Server Error</h1> <p>{{- .complete_error_msg -}}</p> <hr /> <address>Lindenii Forge</address> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "501" -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>501 Not Implemented – {{ .global.forge_title }}</title> </head> <body class="501"> {{- template "header" . -}} <div class="padding-wrapper complete-error-page"> <h1>501 Not Implemented</h1> <hr /> <address>Lindenii Forge</address> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "footer" -}} <a href="https://lindenii.runxiyu.org/forge/">Lindenii Forge</a> {{ .global.forge_version }} (<a href="/:/source/source.tar.gz">source</a>, <a href="https://forge.lindenii.runxiyu.org/lindenii/forge/:/repos/server/">upstream</a>, <a href="/:/source/LICENSE">license</a>) {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "group_path_plain" -}} {{- $p := . -}} {{- range $i, $s := . -}}{{- $s -}}{{- if ne $i (minus (len $p) 1) -}}/{{- end -}}{{- end -}} {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "group_view" -}} {{- if .subgroups -}} <table class="wide"> <thead> <tr> <th colspan="2" class="title-row">Subgroups</th> </tr> <tr> <th scope="col">Name</th> <th scope="col">Description</th> </tr> </thead> <tbody> {{- range .subgroups -}} <tr> <td> <a href="{{- .Name | path_escape -}}/">{{- .Name -}}</a> </td> <td> {{- .Description -}} </td> </tr> {{- end -}} </tbody> </table> {{- end -}} {{- if .repos -}} <table class="wide"> <thead> <tr> <th colspan="2" class="title-row">Repos</th> <tr> <th scope="col">Name</th> <th scope="col">Description</th> </tr> </tr> </thead> <tbody> {{- range .repos -}} <tr> <td> <a href=":/repos/{{- .Name | path_escape -}}/">{{- .Name -}}</a> </td> <td> {{- .Description -}} </td> </tr> {{- end -}} </tbody> </table> {{- end -}} {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "head_common" -}} <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1" /> <link rel="stylesheet" href="/:/static/style.css" /> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "header" -}} <header id="main-header"> <div id="main-header-forge-title"> <a href="/">{{- .global.forge_title -}}</a> </div> <nav id="breadcrumb-nav"> {{- $path := "" -}} {{- $url_segments := .url_segments -}} {{- $dir_mode := .dir_mode -}} {{- $ref_type := .ref_type -}} {{- $ref := .ref_name -}} {{- $separator_index := .separator_index -}} {{- if eq $separator_index -1 -}} {{- $separator_index = len $url_segments -}} {{- end -}} {{- range $i := $separator_index -}} {{- $segment := index $url_segments $i -}} {{- $path = printf "%s/%s" $path $segment -}} <span class="breadcrumb-separator">/</span> <a href="{{ $path }}{{ if or (ne $i (minus (len $url_segments) 1)) $dir_mode }}/{{ end }}{{- if $ref_type -}}?{{- $ref_type -}}={{- $ref -}}{{- end -}}">{{ $segment }}</a> {{- end -}} </nav> <div id="main-header-user"> {{- if ne .user_id_string "" -}} <a href="/:/users/{{- .user_id_string -}}">{{- .username -}}</a> {{- else -}} <a href="/:/login/">Login</a> {{- end -}} </div> </header> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "group" -}} {{- $group_path := .group_path -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>{{- range $i, $s := .group_path -}}{{- $s -}}{{- if ne $i (len $group_path) -}}/{{- end -}}{{- end }} – {{ .global.forge_title -}}</title> </head> <body class="group"> {{- template "header" . -}} <div class="padding-wrapper"> {{- if .description -}} <p>{{- .description -}}</p> {{- end -}} {{- template "group_view" . -}} </div> {{- if .direct_access -}} <div class="padding-wrapper"> <form method="POST" enctype="application/x-www-form-urlencoded"> <table> <thead> <tr> <th class="title-row" colspan="2"> Create repo </th> </tr> </thead> <tbody> <tr> <th scope="row">Name</th> <td class="tdinput"> <input id="repo-name-input" name="repo_name" type="text" /> </td> </tr> <tr> <th scope="row">Description</th> <td class="tdinput"> <input id="repo-desc-input" name="repo_desc" type="text" /> </td> </tr> <tr> <th scope="row">Contrib</th> <td class="tdinput"> <select id="repo-contrib-input" name="repo_contrib"> <option value="public">Public</option> <option value="ssh_pubkey">SSH public key</option> <option value="federated">Federated service</option> <option value="registered_user">Registered user</option> <option value="closed">Closed</option> </select> </td> </tr> </tbody> <tfoot> <tr> <td class="th-like" colspan="2"> <div class="flex-justify"> <div class="left"> </div> <div class="right"> <input class="btn-primary" type="submit" value="Create" /> </div> </div> </td> </tr> </tfoot> </table> </form> </div> {{- end -}} <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "index" -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>Index – {{ .global.forge_title -}}</title> </head> <body class="index"> {{- template "header" . -}} <div class="padding-wrapper"> <table class="wide"> <thead> <tr> <th colspan="2" class="title-row">Groups</th> </tr> <tr> <th scope="col">Name</th> <th scope="col">Description</th> </tr> </thead> <tbody> {{- range .groups -}} <tr> <td> <a href="{{- .Name | path_escape -}}/">{{- .Name -}}</a> </td> <td> {{- .Description -}} </td> </tr> {{- end -}} </tbody> </table> <table class="wide"> <thead> <tr> <th colspan="2" class="title-row"> Info </th> </tr> </thead> <tbody> <tr> <th scope="row">SSH public key</th> <td><code>{{- .global.server_public_key_string -}}</code></td> </tr> <tr> <th scope="row">SSH fingerprint</th> <td><code>{{- .global.server_public_key_fingerprint -}}</code></td> </tr> <tr> <th scope="row">Memory usage</th> <td>Allocated {{ .mem }} <a href="/:/gc/" class="btn btn-danger">Run the garbage collector</a></td> </tr> </tbody> </table> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "login" -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>Login – {{ .global.forge_title -}}</title> </head> <body class="index"> {{- .login_error -}} <div class="padding-wrapper"> <form method="POST" enctype="application/x-www-form-urlencoded"> <table> <thead> <tr> <th class="title-row" colspan="2"> Password authentication </th> </tr> </thead> <tbody> <tr> <th scope="row">Username</th> <td class="tdinput"> <input id="usernameinput" name="username" type="text" /> </td> </tr> <tr> <th scope="row">Password</th> <td class="tdinput"> <input id="passwordinput" name="password" type="password" /> </td> </tr> </tbody> <tfoot> <tr> <td class="th-like" colspan="2"> <div class="flex-justify"> <div class="left"> </div> <div class="right"> <input class="btn-primary" type="submit" value="Submit" /> </div> </div> </td> </tr> </tfoot> </table> </form> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "repo_branches" -}} {{- $root := . -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>{{ .repo_name }} – {{ template "group_path_plain" .group_path }} – {{ .global.forge_title -}}</title> </head> <body class="repo-branches"> {{- template "header" . -}} <div class="repo-header"> <h2>{{- .repo_name -}}</h2> <ul class="nav-tabs-standalone"> <li class="nav-item"> <a class="nav-link" href="../{{- template "ref_query" $root -}}">Summary</a> </li> <li class="nav-item"> <a class="nav-link" href="../tree/{{- template "ref_query" $root -}}">Tree</a> </li> <li class="nav-item"> <a class="nav-link" href="../log/{{- template "ref_query" $root -}}">Log</a> </li> <li class="nav-item"> <a class="nav-link active" href="../branches/">Branches</a> </li> <li class="nav-item"> <a class="nav-link" href="../tags/">Tags</a> </li> <li class="nav-item"> <a class="nav-link" href="../contrib/">Merge requests</a> </li> <li class="nav-item"> <a class="nav-link" href="../settings/">Settings</a> </li> </ul> </div> <div class="repo-header-extension"> <div class="repo-header-extension-content"> {{- .repo_description -}} </div> </div> <div class="padding-wrapper"> <table id="branches"> <thead> <tr class="title-row"> <th colspan="1">Branches</th> </tr> </thead> <tbody> {{- range .branches -}} <tr> <td> <a href="./?branch={{ . }}">{{ . }}</a> </td> </tr> {{- end -}} </tbody> </table> </div> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "repo_commit" -}} {{- $root := . -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>Commit {{ .commit_id }} – {{ .repo_name }} – {{ template "group_path_plain" .group_path }} – {{ .global.forge_title -}}</title> </head> <body class="repo-commit"> {{- template "header" . -}} <div class="repo-header"> <h2>{{- .repo_name -}}</h2> <ul class="nav-tabs-standalone"> <li class="nav-item"> <a class="nav-link" href="../{{- template "ref_query" $root -}}">Summary</a> </li> <li class="nav-item"> <a class="nav-link" href="../tree/{{- template "ref_query" $root -}}">Tree</a> </li> <li class="nav-item"> <a class="nav-link" href="../log/{{- template "ref_query" $root -}}">Log</a> </li> <li class="nav-item"> <a class="nav-link" href="../branches/">Branches</a> </li> <li class="nav-item"> <a class="nav-link" href="../tags/">Tags</a> </li> <li class="nav-item"> <a class="nav-link" href="../contrib/">Merge requests</a> </li> <li class="nav-item"> <a class="nav-link" href="../settings/">Settings</a> </li> </ul> </div> <div class="repo-header-extension"> <div class="repo-header-extension-content"> {{- .repo_description -}} </div> </div> <div class="padding-wrapper scroll"> <div class="key-val-grid-wrapper"> <section id="commit-info" class="key-val-grid"> <div class="title-row">Commit info</div> <div class="row-label">ID</div> <div class="row-value">{{- .commit_id -}}</div> <div class="row-label">Author</div> <div class="row-value"> <span>{{- .commit_object.Author.Name -}}</span> <span><<a href="mailto:{{- .commit_object.Author.Email -}}">{{- .commit_object.Author.Email -}}</a>></span> </div> <div class="row-label">Author date</div> <div class="row-value">{{- .commit_object.Author.When.Format "Mon, 02 Jan 2006 15:04:05 -0700" -}}</div> <div class="row-label">Committer</div> <div class="row-value"> <span>{{- .commit_object.Committer.Name -}}</span> <span><<a href="mailto:{{- .commit_object.Committer.Email -}}">{{- .commit_object.Committer.Email -}}</a>></span> </div> <div class="row-label">Committer date</div> <div class="row-value">{{- .commit_object.Committer.When.Format "Mon, 02 Jan 2006 15:04:05 -0700" -}}</div> <div class="row-label">Actions</div> <div class="row-value"> <a href="{{- .commit_object.Hash -}}.patch">Get patch</a> </div> </section> </div> </div> <div class="padding-wrapper scroll" id="this-commit-message"> <pre>{{- .commit_object.Message -}}</pre> </div> <div class="padding-wrapper"> {{- $parent_commit_hash := .parent_commit_hash -}} {{- $commit_object := .commit_object -}} {{- range .file_patches -}} <div class="file-patch toggle-on-wrapper"> <input type="checkbox" id="toggle-{{- .From.Hash -}}{{- .To.Hash -}}" class="file-toggle toggle-on-toggle"> <label for="toggle-{{- .From.Hash -}}{{- .To.Hash -}}" class="file-header toggle-on-header"> <div> {{- if eq .From.Path "" -}} --- /dev/null {{- else -}} --- a/<a href="../tree/{{- .From.Path -}}?commit={{- $parent_commit_hash -}}">{{- .From.Path -}}</a> {{ .From.Mode -}} {{- end -}} <br /> {{- if eq .To.Path "" -}} +++ /dev/null {{- else -}} +++ b/<a href="../tree/{{- .To.Path -}}?commit={{- $commit_object.Hash -}}">{{- .To.Path -}}</a> {{ .To.Mode -}} {{- end -}} </div> </label> <div class="file-content toggle-on-content scroll"> {{- range .Chunks -}} {{- if eq .Operation 0 -}} <pre class="chunk chunk-unchanged">{{ .Content }}</pre> {{- else if eq .Operation 1 -}} <pre class="chunk chunk-addition">{{ .Content }}</pre> {{- else if eq .Operation 2 -}} <pre class="chunk chunk-deletion">{{ .Content }}</pre> {{- else -}} <pre class="chunk chunk-unknown">{{ .Content }}</pre> {{- end -}} {{- end -}} </div> </div> {{- end -}} </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "repo_contrib_index" -}} {{- $root := . -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>Merge requests – {{ .repo_name }} – {{ template "group_path_plain" .group_path }} – {{ .global.forge_title -}}</title> </head> <body class="repo-contrib-index"> {{- template "header" . -}} <div class="repo-header"> <h2>{{- .repo_name -}}</h2> <ul class="nav-tabs-standalone"> <li class="nav-item"> <a class="nav-link" href="../{{- template "ref_query" $root -}}">Summary</a> </li> <li class="nav-item"> <a class="nav-link" href="../tree/{{- template "ref_query" $root -}}">Tree</a> </li> <li class="nav-item"> <a class="nav-link" href="../log/{{- template "ref_query" $root -}}">Log</a> </li> <li class="nav-item"> <a class="nav-link" href="../branches/">Branches</a> </li> <li class="nav-item"> <a class="nav-link" href="../tags/">Tags</a> </li> <li class="nav-item"> <a class="nav-link active" href="../contrib/">Merge requests</a> </li> <li class="nav-item"> <a class="nav-link" href="../settings/">Settings</a> </li> </ul> </div> <div class="repo-header-extension"> <div class="repo-header-extension-content"> {{- .repo_description -}} </div> </div> <div class="padding-wrapper"> <table id="recent-merge_requests" class="wide"> <thead> <tr class="title-row"> <th colspan="3">Merge requests</th> </tr> </thead> <tr> <th scope="col">Name</th> <th scope="col">Description</th> <th scope="col">Status</th> </tr> <tbody> {{- range .merge_requests -}} <tr> <td class="merge_request-id">{{- .ID -}}</td> <td class="merge_request-title"><a href="{{- .ID -}}/">{{- .Title -}}</a></td> <td class="merge_request-status">{{- .Status -}}</td> </tr> {{- end -}} </tbody> </table> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "repo_contrib_one" -}} {{- $root := . -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>Merge requests – {{ .repo_name }} – {{ template "group_path_plain" .group_path }} – {{ .global.forge_title -}}</title> </head> <body class="repo-contrib-one"> {{- template "header" . -}} <div class="repo-header"> <h2>{{- .repo_name -}}</h2> <ul class="nav-tabs-standalone"> <li class="nav-item"> <a class="nav-link" href="../{{- template "ref_query" $root -}}">Summary</a> </li> <li class="nav-item"> <a class="nav-link" href="../tree/{{- template "ref_query" $root -}}">Tree</a> </li> <li class="nav-item"> <a class="nav-link" href="../log/{{- template "ref_query" $root -}}">Log</a> </li> <li class="nav-item"> <a class="nav-link" href="../branches/">Branches</a> </li> <li class="nav-item"> <a class="nav-link" href="../tags/">Tags</a> </li> <li class="nav-item"> <a class="nav-link active" href="../contrib/">Merge requests</a> </li> <li class="nav-item"> <a class="nav-link" href="../settings/">Settings</a> </li> </ul> </div> <div class="repo-header-extension"> <div class="repo-header-extension-content"> {{- .repo_description -}} </div> </div> <div class="padding-wrapper"> <table id="mr-info-table"> <thead> <tr class="title-row"> <th colspan="2">Merge request info</th> </tr> </thead> <tbody> <tr> <th scope="row">ID</th> <td>{{- .mr_id -}}</td> </tr> <tr> <th scope="row">Status</th> <td>{{- .mr_status -}}</td> </tr> <tr> <th scope="row">Title</th> <td>{{- .mr_title -}}</td> </tr> <tr> <th scope="row">Source ref</th> <td>{{- .mr_source_ref -}}</td> </tr> <tr> <th scope="row">Destination branch</th> <td>{{- .mr_destination_branch -}}</td> </tr> <tr> <th scope="row">Merge base</th> <td>{{- .merge_base.Hash.String -}}</td> </tr> </tbody> </table> </div> <div class="padding-wrapper"> {{- $merge_base := .merge_base -}} {{- $source_commit := .source_commit -}} {{- range .file_patches -}} <div class="file-patch toggle-on-wrapper"> <input type="checkbox" id="toggle-{{- .From.Hash -}}{{- .To.Hash -}}" class="file-toggle toggle-on-toggle"> <label for="toggle-{{- .From.Hash -}}{{- .To.Hash -}}" class="file-header toggle-on-header"> <div> {{- if eq .From.Path "" -}} --- /dev/null {{- else -}} --- a/<a href="../../tree/{{- .From.Path -}}?commit={{- $merge_base.Hash -}}">{{- .From.Path -}}</a> {{ .From.Mode -}} {{- end -}} <br /> {{- if eq .To.Path "" -}} +++ /dev/null {{- else -}} +++ b/<a href="../../tree/{{- .To.Path -}}?commit={{- $source_commit.Hash -}}">{{- .To.Path -}}</a> {{ .To.Mode -}} {{- end -}} </div> </label> <div class="file-content toggle-on-content scroll"> {{- range .Chunks -}} {{- if eq .Operation 0 -}} <pre class="chunk chunk-unchanged">{{ .Content }}</pre> {{- else if eq .Operation 1 -}} <pre class="chunk chunk-addition">{{ .Content }}</pre> {{- else if eq .Operation 2 -}} <pre class="chunk chunk-deletion">{{ .Content }}</pre> {{- else -}} <pre class="chunk chunk-unknown">{{ .Content }}</pre> {{- end -}} {{- end -}} </div> </div> {{- end -}} </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "repo_index" -}} {{- $root := . -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>{{ .repo_name }} – {{ template "group_path_plain" .group_path }} – {{ .global.forge_title -}}</title> </head> <body class="repo-index"> {{- template "header" . -}} <div class="repo-header"> <h2>{{- .repo_name -}}</h2> <ul class="nav-tabs-standalone"> <li class="nav-item"> <a class="nav-link active" href="./{{- template "ref_query" $root -}}">Summary</a> </li> <li class="nav-item"> <a class="nav-link" href="tree/{{- template "ref_query" $root -}}">Tree</a> </li> <li class="nav-item"> <a class="nav-link" href="log/{{- template "ref_query" $root -}}">Log</a> </li> <li class="nav-item"> <a class="nav-link" href="branches/">Branches</a> </li> <li class="nav-item"> <a class="nav-link" href="tags/">Tags</a> </li> <li class="nav-item"> <a class="nav-link" href="contrib/">Merge requests</a> </li> <li class="nav-item"> <a class="nav-link" href="settings/">Settings</a> </li> </ul> </div> <div class="repo-header-extension"> <div class="repo-header-extension-content"> {{- .repo_description -}} </div> </div> {{- if .notes -}} <div id="notes">Notes</div> <ul> {{- range .notes -}}<li>{{- . -}}</li>{{- end -}} </ul> </div> {{- end -}} <p class="readingwidth"><code>{{- .ssh_clone_url -}}</code></p> {{- if .commits -}} <div class="padding-wrapper scroll"> <table id="recent-commits" class="wide"> <thead> <tr class="title-row"> <th colspan="3">Recent commits (<a href="log/{{- template "ref_query" $root -}}">see all</a>)</th> </tr> <tr> <th scope="col">Title</th> <th scope="col">Author</th> <th scope="col">Author date</th> </tr> </thead> <tbody> {{- range .commits -}} <tr> <td class="commit-title"><a href="commit/{{- .Hash -}}">{{- .Message | first_line -}}</a></td> <td class="commit-author"> <a class="email-name" href="mailto:{{- .Author.Email -}}">{{- .Author.Name -}}</a> </td> <td class="commit-time"> {{- .Author.When.Format "2006-01-02 15:04:05 -0700" -}} </td> </tr> {{- end -}} {{- if dereference_error .commits_err -}} Error while obtaining commit log: {{ .commits_err }} {{- end -}} </tbody> </table> </div> {{- end -}} {{- if .readme -}} <div class="padding-wrapper" id="readme"> {{- .readme -}} </div> {{- end -}} <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "repo_log" -}} {{- $root := . -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>Log – {{ .repo_name }} – {{ template "group_path_plain" .group_path }} – {{ .global.forge_title -}}</title> </head> <body class="repo-log"> {{- template "header" . -}} <div class="repo-header"> <h2>{{- .repo_name -}}</h2> <ul class="nav-tabs-standalone"> <li class="nav-item"> <a class="nav-link" href="../{{- template "ref_query" $root -}}">Summary</a> </li> <li class="nav-item"> <a class="nav-link" href="../tree/{{- template "ref_query" $root -}}">Tree</a> </li> <li class="nav-item"> <a class="nav-link active" href="../log/{{- template "ref_query" $root -}}">Log</a> </li> <li class="nav-item"> <a class="nav-link" href="../branches/">Branches</a> </li> <li class="nav-item"> <a class="nav-link" href="../tags/">Tags</a> </li> <li class="nav-item"> <a class="nav-link" href="../contrib/">Merge requests</a> </li> <li class="nav-item"> <a class="nav-link" href="../settings/">Settings</a> </li> </ul> </div> <div class="repo-header-extension"> <div class="repo-header-extension-content"> {{- .repo_description -}} </div> </div> <div class="scroll"> <table id="commits" class="wide"> <thead> <tr class="title-row"> <th colspan="4">Commits {{ if .ref_name }} on {{ .ref_name }}{{ end -}}</th> </tr> <tr> <th scope="col">ID</th> <th scope="col">Title</th> <th scope="col">Author</th> <th scope="col">Author date</th> </tr> </thead> <tbody> {{- range .commits -}} <tr> <td class="commit-id"><a href="../commit/{{- .Hash -}}">{{- .Hash -}}</a></td> <td class="commit-title">{{- .Message | first_line -}}</td> <td class="commit-author"> <a class="email-name" href="mailto:{{- .Author.Email -}}">{{- .Author.Name -}}</a> </td> <td class="commit-time"> {{- .Author.When.Format "2006-01-02 15:04:05 -0700" -}} </td> </tr> {{- end -}} {{- if dereference_error .commits_err -}} Error while obtaining commit log: {{ .commits_err }} {{- end -}} </tbody> </table> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "repo_raw_dir" -}} {{- $root := . -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>/{{ .path_spec }}{{ if ne .path_spec "" }}/{{ end }} – {{ .repo_name }} – {{ template "group_path_plain" .group_path }} – {{ .global.forge_title -}}</title> </head> <body class="repo-raw-dir"> {{- template "header" . -}} <div class="repo-header"> <h2>{{- .repo_name -}}</h2> <ul class="nav-tabs-standalone"> <li class="nav-item"> <a class="nav-link" href="../{{- template "ref_query" $root -}}">Summary</a> </li> <li class="nav-item"> <a class="nav-link active" href="../tree/{{- template "ref_query" $root -}}">Tree</a> </li> <li class="nav-item"> <a class="nav-link" href="../log/{{- template "ref_query" $root -}}">Log</a> </li> <li class="nav-item"> <a class="nav-link" href="../branches/">Branches</a> </li> <li class="nav-item"> <a class="nav-link" href="../tags/">Tags</a> </li> <li class="nav-item"> <a class="nav-link" href="../contrib/">Merge requests</a> </li> <li class="nav-item"> <a class="nav-link" href="../settings/">Settings</a> </li> </ul> </div> <div class="repo-header-extension"> <div class="repo-header-extension-content"> {{- .repo_description -}} </div> </div> <div class="padding-wrapper scroll"> <table id="file-tree" class="wide"> <thead> <tr class="title-row"> <th colspan="3"> (Raw) /{{ .path_spec }}{{ if ne .path_spec "" }}/{{ end }}{{ if .ref_name }} on {{ .ref_name }}{{ end -}} </th> </tr> <tr> <th scope="col">Mode</th> <th scope="col">Filename</th> <th scope="col">Size</th> </tr> </thead> <tbody> {{- $path_spec := .path_spec -}} {{- range .files -}} <tr> <td class="file-mode">{{- .Mode -}}</td> <td class="file-name"><a href="{{- .Name -}}{{- if not .IsFile -}}/{{- end -}}{{- template "ref_query" $root -}}">{{- .Name -}}</a>{{- if not .IsFile -}}/{{- end -}}</td> <td class="file-size">{{- .Size -}}</td> </tr> {{- end -}} </tbody> </table> </div> <div class="padding-wrapper"> <div id="refs"> </div> </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "repo_tree_dir" -}} {{- $root := . -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <title>/{{ .path_spec }}{{ if ne .path_spec "" }}/{{ end }} – {{ .repo_name }} – {{ template "group_path_plain" .group_path }} – {{ .global.forge_title -}}</title> </head> <body class="repo-tree-dir"> {{- template "header" . -}} <div class="repo-header"> <h2>{{- .repo_name -}}</h2> <ul class="nav-tabs-standalone"> <li class="nav-item"> <a class="nav-link" href="../{{- template "ref_query" $root -}}">Summary</a> </li> <li class="nav-item"> <a class="nav-link active" href="../tree/{{- template "ref_query" $root -}}">Tree</a> </li> <li class="nav-item"> <a class="nav-link" href="../log/{{- template "ref_query" $root -}}">Log</a> </li> <li class="nav-item"> <a class="nav-link" href="../branches/">Branches</a> </li> <li class="nav-item"> <a class="nav-link" href="../tags/">Tags</a> </li> <li class="nav-item"> <a class="nav-link" href="../contrib/">Merge requests</a> </li> <li class="nav-item"> <a class="nav-link" href="../settings/">Settings</a> </li> </ul> </div> <div class="repo-header-extension"> <div class="repo-header-extension-content"> {{- .repo_description -}} </div> </div> <div class="padding-wrapper scroll"> <table id="file-tree" class="wide"> <thead> <tr class="title-row"> <th colspan="3"> /{{ .path_spec }}{{ if ne .path_spec "" }}/{{ end }}{{ if .ref_name }} on {{ .ref_name }}{{ end -}} </th> <tr> <th scope="col">Mode</th> <th scope="col">Filename</th> <th scope="col">Size</th> </tr> </tr> </thead> <tbody> {{- $path_spec := .path_spec -}} {{- range .files -}} <tr> <td class="file-mode">{{- .Mode -}}</td> <td class="file-name"><a href="{{- .Name -}}{{- if not .IsFile -}}/{{- end -}}{{- template "ref_query" $root -}}">{{- .Name -}}</a>{{- if not .IsFile -}}/{{- end -}}</td> <td class="file-size">{{- .Size -}}</td> </tr> {{- end -}} </tbody> </table> </div> <div class="padding-wrapper"> <div id="refs"> </div> </div> {{- if .readme -}} <div class="padding-wrapper" id="readme"> {{- .readme -}} </div> {{- end -}} <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
{{/* SPDX-License-Identifier: AGPL-3.0-only
SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}} {{- define "repo_tree_file" -}} {{- $root := . -}} <!DOCTYPE html> <html lang="en"> <head> {{- template "head_common" . -}} <link rel="stylesheet" href="/:/static/chroma.css" /> <title>/{{ .path_spec }} – {{ .repo_name }} – {{ template "group_path_plain" .group_path }} – {{ .global.forge_title -}}</title> </head> <body class="repo-tree-file"> {{- template "header" . -}} <div class="repo-header"> <h2>{{- .repo_name -}}</h2> <ul class="nav-tabs-standalone"> <li class="nav-item"> <a class="nav-link" href="../{{- template "ref_query" $root -}}">Summary</a> </li> <li class="nav-item"> <a class="nav-link active" href="../tree/{{- template "ref_query" $root -}}">Tree</a> </li> <li class="nav-item"> <a class="nav-link" href="../log/{{- template "ref_query" $root -}}">Log</a> </li> <li class="nav-item"> <a class="nav-link" href="../branches/">Branches</a> </li> <li class="nav-item"> <a class="nav-link" href="../tags/">Tags</a> </li> <li class="nav-item"> <a class="nav-link" href="../contrib/">Merge requests</a> </li> <li class="nav-item"> <a class="nav-link" href="../settings/">Settings</a> </li> </ul> </div> <div class="repo-header-extension"> <div class="repo-header-extension-content"> {{- .repo_description -}} </div> </div> <div class="padding"> <p> /{{ .path_spec }} (<a href="/{{ template "group_path_plain" .group_path }}/:/repos/{{ .repo_name }}/raw/{{ .path_spec }}{{- template "ref_query" $root -}}">raw</a>) </p> {{- .file_contents -}} </div> <footer> {{- template "footer" . -}} </footer> </body> </html> {{- end -}}
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "errors" "net/http" "net/url" "strings" ) var ( errDupRefSpec = errors.New("duplicate ref spec") errNoRefSpec = errors.New("no ref spec") ) func getParamRefTypeName(request *http.Request) (retRefType, retRefName string, err error) { rawQuery := request.URL.RawQuery queryValues, err := url.ParseQuery(rawQuery) if err != nil { return } done := false for _, refType := range []string{"commit", "branch", "tag"} { refName, ok := queryValues[refType] if ok { if done { err = errDupRefSpec return } done = true if len(refName) != 1 { err = errDupRefSpec return } retRefName = refName[0] retRefType = refType } } if !done { err = errNoRefSpec } return } func parseReqURI(requestURI string) (segments []string, params url.Values, err error) { path, paramsStr, _ := strings.Cut(requestURI, "?") segments = strings.Split(strings.TrimPrefix(path, "/"), "/") for i, segment := range segments { segments[i], err = url.PathUnescape(segment) if err != nil { return } } params, err = url.ParseQuery(paramsStr) return } func redirectDir(writer http.ResponseWriter, request *http.Request) bool { requestURI := request.RequestURI pathEnd := strings.IndexAny(requestURI, "?#") var path, rest string if pathEnd == -1 { path = requestURI } else { path = requestURI[:pathEnd] rest = requestURI[pathEnd:] } if !strings.HasSuffix(path, "/") { http.Redirect(writer, request, path+"/"+rest, http.StatusSeeOther) return true } return false } func redirectNoDir(writer http.ResponseWriter, request *http.Request) bool { requestURI := request.RequestURI pathEnd := strings.IndexAny(requestURI, "?#") var path, rest string if pathEnd == -1 { path = requestURI } else { path = requestURI[:pathEnd] rest = requestURI[pathEnd:] } if strings.HasSuffix(path, "/") { http.Redirect(writer, request, strings.TrimSuffix(path, "/")+rest, http.StatusSeeOther) return true } return false } func redirectUnconditionally(writer http.ResponseWriter, request *http.Request) { requestURI := request.RequestURI pathEnd := strings.IndexAny(requestURI, "?#") var path, rest string if pathEnd == -1 { path = requestURI } else { path = requestURI[:pathEnd] rest = requestURI[pathEnd:] } http.Redirect(writer, request, path+rest, http.StatusSeeOther) } func segmentsToURL(segments []string) string { for i, segment := range segments { segments[i] = url.PathEscape(segment) } return strings.Join(segments, "/") } func anyContain(ss []string, c string) bool { for _, s := range ss { if strings.Contains(s, c) { return true } } return false }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import ( "context" "github.com/jackc/pgx/v5" ) func addUserSSH(ctx context.Context, pubkey string) (userID int, err error) { var txn pgx.Tx if txn, err = database.Begin(ctx); err != nil { return } defer func() { _ = txn.Rollback(ctx) }() if err = txn.QueryRow(ctx, `INSERT INTO users (type) VALUES ('pubkey_only') RETURNING id`).Scan(&userID); err != nil { return } if _, err = txn.Exec(ctx, `INSERT INTO ssh_public_keys (key_string, user_id) VALUES ($1, $2)`, pubkey, userID); err != nil { return } err = txn.Commit(ctx) return }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
package main import "strings" func sliceContainsNewlines(s []string) bool { for _, v := range s { if strings.Contains(v, "\n") { return true } } return false }