Hi… I am well aware that this diff view is very suboptimal. It will be fixed when the refactored server comes along!
Remove underscores from Go code, pt 4
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "context" "github.com/jackc/pgx/v5/pgtype" ) // getRepoInfo returns the filesystem path and direct // access permission for a given repo and a provided ssh public key.
func getRepoInfo(ctx context.Context, group_path []string, repoName, sshPubkey string) (repoID int, fsPath string, access bool, contribReq, userType string, userID int, err error) {
func getRepoInfo(ctx context.Context, groupPath []string, repoName, sshPubkey string) (repoID int, fsPath string, access bool, contribReq, userType string, userID int, err error) {
err = database.QueryRow(ctx, ` WITH RECURSIVE group_path_cte AS ( -- Start: match the first name in the path where parent_group IS NULL SELECT id, parent_group, name, 1 AS depth FROM groups WHERE name = ($1::text[])[1] AND parent_group IS NULL UNION ALL -- Recurse: join next segment of the path SELECT g.id, g.parent_group, g.name, group_path_cte.depth + 1 FROM groups g JOIN group_path_cte ON g.parent_group = group_path_cte.id WHERE g.name = ($1::text[])[group_path_cte.depth + 1] AND group_path_cte.depth + 1 <= cardinality($1::text[]) ) SELECT r.id, r.filesystem_path, CASE WHEN ugr.user_id IS NOT NULL THEN TRUE ELSE FALSE END AS has_role_in_group, r.contrib_requirements, COALESCE(u.type, ''), COALESCE(u.id, 0) FROM group_path_cte g JOIN repos r ON r.group_id = g.id LEFT JOIN ssh_public_keys s ON s.key_string = $3 LEFT JOIN users u ON u.id = s.user_id LEFT JOIN user_group_roles ugr ON ugr.group_id = g.id AND ugr.user_id = u.id WHERE g.depth = cardinality($1::text[]) AND r.name = $2
`, pgtype.FlatArray[string](group_path), repoName, sshPubkey,
`, pgtype.FlatArray[string](groupPath), repoName, sshPubkey,
).Scan(&repoID, &fsPath, &access, &contribReq, &userType, &userID) return }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
package main
import (
"bytes"
"context"
"encoding/binary"
"errors"
"fmt"
"io"
"net"
"os"
"path/filepath"
"strconv"
"strings"
"syscall"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object"
"github.com/jackc/pgx/v5"
"go.lindenii.runxiyu.org/lindenii-common/ansiec"
)
var (
errGetFD = errors.New("unable to get file descriptor")
errGetUcred = errors.New("failed getsockopt")
)
// hooksHandler handles a connection from hookc via the
// unix socket.
func hooksHandler(conn net.Conn) {
var ctx context.Context
var cancel context.CancelFunc
var ucred *syscall.Ucred
var err error
var cookie []byte
var packPass packPass
var sshStderr io.Writer
var ok bool
var hookRet byte
defer conn.Close()
ctx, cancel = context.WithCancel(context.Background())
defer cancel()
// There aren't reasonable cases where someone would run this as
// another user.
if ucred, err = getUcred(conn); err != nil {
if _, err = conn.Write([]byte{1}); err != nil {
return
}
writeRedError(conn, "\nUnable to get peer credentials: %v", err)
return
}
if ucred.Uid != uint32(os.Getuid()) {
if _, err = conn.Write([]byte{1}); err != nil {
return
}
writeRedError(conn, "\nUID mismatch")
return
}
cookie = make([]byte, 64)
if _, err = conn.Read(cookie); err != nil {
if _, err = conn.Write([]byte{1}); err != nil {
return
}
writeRedError(conn, "\nFailed to read cookie: %v", err)
return
}
packPass, ok = packPasses.Load(string(cookie))
if !ok {
if _, err = conn.Write([]byte{1}); err != nil {
return
}
writeRedError(conn, "\nInvalid handler cookie")
return
}
sshStderr = packPass.session.Stderr()
_, _ = sshStderr.Write([]byte{'\n'})
hookRet = func() byte {
var argc64 uint64
if err = binary.Read(conn, binary.NativeEndian, &argc64); err != nil {
writeRedError(sshStderr, "Failed to read argc: %v", err)
return 1
}
var args []string
for i := uint64(0); i < argc64; i++ {
var arg bytes.Buffer
for {
b := make([]byte, 1)
n, err := conn.Read(b)
if err != nil || n != 1 {
writeRedError(sshStderr, "Failed to read arg: %v", err)
return 1
}
if b[0] == 0 {
break
}
arg.WriteByte(b[0])
}
args = append(args, arg.String())
}
gitEnv := make(map[string]string)
for {
var envLine bytes.Buffer
for {
b := make([]byte, 1)
n, err := conn.Read(b)
if err != nil || n != 1 {
writeRedError(sshStderr, "Failed to read environment variable: %v", err)
return 1
}
if b[0] == 0 {
break
}
envLine.WriteByte(b[0])
}
if envLine.Len() == 0 {
break
}
kv := envLine.String()
parts := strings.SplitN(kv, "=", 2)
if len(parts) < 2 {
writeRedError(sshStderr, "Invalid environment variable line: %v", kv)
return 1
}
gitEnv[parts[0]] = parts[1]
}
var stdin bytes.Buffer
if _, err = io.Copy(&stdin, conn); err != nil {
writeRedError(conn, "Failed to read to the stdin buffer: %v", err)
}
switch filepath.Base(args[0]) {
case "pre-receive":
if packPass.directAccess {
return 0
} else {
allOK := true
for {
var line, oldOID, rest, newIOID, refName string
var found bool
var oldHash, newHash plumbing.Hash
var oldCommit, newCommit *object.Commit
var pushOptCount int
}
allOK := true
for {
var line, oldOID, rest, newIOID, refName string
var found bool
var oldHash, newHash plumbing.Hash
var oldCommit, newCommit *object.Commit
var pushOptCount int
pushOptCount, err = strconv.Atoi(gitEnv["GIT_PUSH_OPTION_COUNT"])
if err != nil {
writeRedError(sshStderr, "Failed to parse GIT_PUSH_OPTION_COUNT: %v", err)
pushOptCount, err = strconv.Atoi(gitEnv["GIT_PUSH_OPTION_COUNT"])
if err != nil {
writeRedError(sshStderr, "Failed to parse GIT_PUSH_OPTION_COUNT: %v", err)
return 1
}
// TODO: Allow existing users (even if they are already federated or registered) to add a federated user ID... though perhaps this should be in the normal SSH interface instead of the git push interface?
// Also it'd be nice to be able to combine users or whatever
if packPass.contribReq == "federated" && packPass.userType != "federated" && packPass.userType != "registered" {
if pushOptCount == 0 {
writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
return 1 }
// TODO: Allow existing users (even if they are already federated or registered) to add a federated user ID... though perhaps this should be in the normal SSH interface instead of the git push interface?
// Also it'd be nice to be able to combine users or whatever
if packPass.contribReq == "federated" && packPass.userType != "federated" && packPass.userType != "registered" {
if pushOptCount == 0 {
writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
for i := 0; i < pushOptCount; i++ {
pushOpt, ok := gitEnv[fmt.Sprintf("GIT_PUSH_OPTION_%d", i)]
if !ok {
writeRedError(sshStderr, "Failed to get push option %d", i)
return 1 }
for i := 0; i < pushOptCount; i++ {
pushOpt, ok := gitEnv[fmt.Sprintf("GIT_PUSH_OPTION_%d", i)]
if !ok {
writeRedError(sshStderr, "Failed to get push option %d", i)
if strings.HasPrefix(pushOpt, "fedid=") {
fedUserID := strings.TrimPrefix(pushOpt, "fedid=")
service, username, found := strings.Cut(fedUserID, ":")
if !found {
writeRedError(sshStderr, "Invalid federated user identifier %#v does not contain a colon", fedUserID)
return 1 }
if strings.HasPrefix(pushOpt, "fedid=") {
fedUserID := strings.TrimPrefix(pushOpt, "fedid=")
service, username, found := strings.Cut(fedUserID, ":")
if !found {
writeRedError(sshStderr, "Invalid federated user identifier %#v does not contain a colon", fedUserID)
return 1
}
ok, err := fedauth(ctx, packPass.userID, service, username, packPass.pubkey)
if err != nil {
writeRedError(sshStderr, "Failed to verify federated user identifier %#v: %v", fedUserID, err)
return 1
}
if !ok {
writeRedError(sshStderr, "Failed to verify federated user identifier %#v: you don't seem to be on the list", fedUserID)
return 1
}
break
ok, err := fedauth(ctx, packPass.userID, service, username, packPass.pubkey)
if err != nil {
writeRedError(sshStderr, "Failed to verify federated user identifier %#v: %v", fedUserID, err)
return 1
}
if i == pushOptCount-1 {
writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
if !ok {
writeRedError(sshStderr, "Failed to verify federated user identifier %#v: you don't seem to be on the list", fedUserID)
return 1 }
break
}
if i == pushOptCount-1 {
writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
return 1
} }
}
line, err = stdin.ReadString('\n')
if errors.Is(err, io.EOF) {
break
} else if err != nil {
writeRedError(sshStderr, "Failed to read pre-receive line: %v", err)
return 1
}
line = line[:len(line)-1]
line, err = stdin.ReadString('\n')
if errors.Is(err, io.EOF) {
break
} else if err != nil {
writeRedError(sshStderr, "Failed to read pre-receive line: %v", err)
return 1
}
line = line[:len(line)-1]
oldOID, rest, found = strings.Cut(line, " ")
if !found {
writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
return 1
}
oldOID, rest, found = strings.Cut(line, " ")
if !found {
writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
return 1
}
newIOID, refName, found = strings.Cut(rest, " ")
if !found {
writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
return 1
}
newIOID, refName, found = strings.Cut(rest, " ")
if !found {
writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
return 1
}
if strings.HasPrefix(refName, "refs/heads/contrib/") {
if allZero(oldOID) { // New branch
fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
var newMRID int
if strings.HasPrefix(refName, "refs/heads/contrib/") {
if allZero(oldOID) { // New branch
fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
var newMRID int
err = database.QueryRow(ctx,
"INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING id",
packPass.repoID, packPass.userID, strings.TrimPrefix(refName, "refs/heads/"),
).Scan(&newMRID)
if err != nil {
writeRedError(sshStderr, "Error creating merge request: %v", err)
return 1
}
fmt.Fprintln(sshStderr, ansiec.Blue+"Created merge request at", generate_http_remote_url(packPass.group_path, packPass.repo_name)+"/contrib/"+strconv.FormatUint(uint64(newMRID), 10)+"/"+ansiec.Reset)
} else { // Existing contrib branch
var existingMRUser int
var isAncestor bool
err = database.QueryRow(ctx,
"SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2",
strings.TrimPrefix(refName, "refs/heads/"), packPass.repoID,
).Scan(&existingMRUser)
if err != nil {
if errors.Is(err, pgx.ErrNoRows) {
writeRedError(sshStderr, "No existing merge request for existing contrib branch: %v", err)
} else {
writeRedError(sshStderr, "Error querying for existing merge request: %v", err)
}
return 1
}
if existingMRUser == 0 {
allOK = false
fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs to unowned MR)")
continue
}
err = database.QueryRow(ctx,
"INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING id",
packPass.repoID, packPass.userID, strings.TrimPrefix(refName, "refs/heads/"),
).Scan(&newMRID)
if err != nil {
writeRedError(sshStderr, "Error creating merge request: %v", err)
return 1
}
fmt.Fprintln(sshStderr, ansiec.Blue+"Created merge request at", genHTTPRemoteURL(packPass.groupPath, packPass.repoName)+"/contrib/"+strconv.FormatUint(uint64(newMRID), 10)+"/"+ansiec.Reset)
} else { // Existing contrib branch
var existingMRUser int
var isAncestor bool
if existingMRUser != packPass.userID {
allOK = false
fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs another user's MR)")
continue
err = database.QueryRow(ctx,
"SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2",
strings.TrimPrefix(refName, "refs/heads/"), packPass.repoID,
).Scan(&existingMRUser)
if err != nil {
if errors.Is(err, pgx.ErrNoRows) {
writeRedError(sshStderr, "No existing merge request for existing contrib branch: %v", err)
} else {
writeRedError(sshStderr, "Error querying for existing merge request: %v", err)
}
return 1
}
if existingMRUser == 0 {
allOK = false
fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs to unowned MR)")
continue
}
oldHash = plumbing.NewHash(oldOID)
if existingMRUser != packPass.userID {
allOK = false
fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs another user's MR)")
continue
}
if oldCommit, err = packPass.repo.CommitObject(oldHash); err != nil {
writeRedError(sshStderr, "Daemon failed to get old commit: %v", err)
return 1
}
oldHash = plumbing.NewHash(oldOID)
// Potential BUG: I'm not sure if new_commit is guaranteed to be
// detectable as they haven't been merged into the main repo's
// objects yet. But it seems to work, and I don't think there's
// any reason for this to only work intermitently.
newHash = plumbing.NewHash(newIOID)
if newCommit, err = packPass.repo.CommitObject(newHash); err != nil {
writeRedError(sshStderr, "Daemon failed to get new commit: %v", err)
return 1
}
if oldCommit, err = packPass.repo.CommitObject(oldHash); err != nil {
writeRedError(sshStderr, "Daemon failed to get old commit: %v", err)
return 1
}
if isAncestor, err = oldCommit.IsAncestor(newCommit); err != nil {
writeRedError(sshStderr, "Daemon failed to check if old commit is ancestor: %v", err)
return 1
}
// Potential BUG: I'm not sure if new_commit is guaranteed to be
// detectable as they haven't been merged into the main repo's
// objects yet. But it seems to work, and I don't think there's
// any reason for this to only work intermitently.
newHash = plumbing.NewHash(newIOID)
if newCommit, err = packPass.repo.CommitObject(newHash); err != nil {
writeRedError(sshStderr, "Daemon failed to get new commit: %v", err)
return 1
}
if !isAncestor {
// TODO: Create MR snapshot ref instead
allOK = false
fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(force pushes are not supported yet)")
continue
}
if isAncestor, err = oldCommit.IsAncestor(newCommit); err != nil {
writeRedError(sshStderr, "Daemon failed to check if old commit is ancestor: %v", err)
return 1
}
fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
if !isAncestor {
// TODO: Create MR snapshot ref instead
allOK = false
fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(force pushes are not supported yet)")
continue
}
} else { // Non-contrib branch
allOK = false
fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(you cannot push to branches outside of contrib/*)")
fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
}
} else { // Non-contrib branch
allOK = false
fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(you cannot push to branches outside of contrib/*)")
}
}
fmt.Fprintln(sshStderr)
if allOK {
fmt.Fprintln(sshStderr, "Overall "+ansiec.Green+"ACK"+ansiec.Reset+" (all checks passed)")
return 0
} else {
fmt.Fprintln(sshStderr, "Overall "+ansiec.Red+"NAK"+ansiec.Reset+" (one or more branches failed checks)")
return 1
}
fmt.Fprintln(sshStderr)
if allOK {
fmt.Fprintln(sshStderr, "Overall "+ansiec.Green+"ACK"+ansiec.Reset+" (all checks passed)")
return 0
} else {
fmt.Fprintln(sshStderr, "Overall "+ansiec.Red+"NAK"+ansiec.Reset+" (one or more branches failed checks)")
return 1
}
default:
fmt.Fprintln(sshStderr, ansiec.Red+"Invalid hook:", args[0]+ansiec.Reset)
return 1
}
}()
fmt.Fprintln(sshStderr)
_, _ = conn.Write([]byte{hookRet})
}
func serveGitHooks(listener net.Listener) error {
for {
conn, err := listener.Accept()
if err != nil {
return err
}
go hooksHandler(conn)
}
}
func getUcred(conn net.Conn) (ucred *syscall.Ucred, err error) {
var unixConn *net.UnixConn = conn.(*net.UnixConn)
var fd *os.File
if fd, err = unixConn.File(); err != nil {
return nil, errGetFD
}
defer fd.Close()
if ucred, err = syscall.GetsockoptUcred(int(fd.Fd()), syscall.SOL_SOCKET, syscall.SO_PEERCRED); err != nil {
return nil, errGetUcred
}
return ucred, nil
}
func allZero(s string) bool {
for _, r := range s {
if r != '0' {
return false
}
}
return true
}
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
package main
import (
"context"
"errors"
"io"
"os"
"strings"
"github.com/go-git/go-git/v5"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object"
"github.com/jackc/pgx/v5/pgtype"
)
// openRepo opens a git repository by group and repo name.
func openRepo(ctx context.Context, groupPath []string, repoName string) (repo *git.Repository, description string, repoID int, err error) {
var fsPath string
err = database.QueryRow(ctx, `
WITH RECURSIVE group_path_cte AS (
-- Start: match the first name in the path where parent_group IS NULL
SELECT
id,
parent_group,
name,
1 AS depth
FROM groups
WHERE name = ($1::text[])[1]
AND parent_group IS NULL
UNION ALL
-- Recurse: join next segment of the path
SELECT
g.id,
g.parent_group,
g.name,
group_path_cte.depth + 1
FROM groups g
JOIN group_path_cte ON g.parent_group = group_path_cte.id
WHERE g.name = ($1::text[])[group_path_cte.depth + 1]
AND group_path_cte.depth + 1 <= cardinality($1::text[])
)
SELECT
r.filesystem_path,
COALESCE(r.description, ''),
r.id
FROM group_path_cte g
JOIN repos r ON r.group_id = g.id
WHERE g.depth = cardinality($1::text[])
AND r.name = $2
`, pgtype.FlatArray[string](groupPath), repoName).Scan(&fsPath, &description, &repoID)
if err != nil {
return
}
repo, err = git.PlainOpen(fsPath)
return
}
// go-git's tree entries are not friendly for use in HTML templates.
type displayTreeEntry struct {
Name string Mode string Size int64
Name string Mode string Size int64
IsFile bool
IsSubtree bool
}
func makeDisplayTree(tree *object.Tree) (displayTree []displayTreeEntry) {
for _, entry := range tree.Entries {
displayEntry := displayTreeEntry{}
var err error
var osMode os.FileMode
if osMode, err = entry.Mode.ToOSFileMode(); err != nil {
displayEntry.Mode = "x---------"
} else {
displayEntry.Mode = osMode.String()
}
displayEntry.IsFile = entry.Mode.IsFile()
if displayEntry.Size, err = tree.Size(entry.Name); err != nil {
displayEntry.Size = 0
}
displayEntry.Name = strings.TrimPrefix(entry.Name, "/")
displayTree = append(displayTree, displayEntry)
}
return displayTree
}
func getRecentCommits(repo *git.Repository, headHash plumbing.Hash, numCommits int) (recentCommits []*object.Commit, err error) {
var commitIter object.CommitIter
var thisCommit *object.Commit
commitIter, err = repo.Log(&git.LogOptions{From: headHash})
if err != nil {
return nil, err
}
recentCommits = make([]*object.Commit, 0)
defer commitIter.Close()
if numCommits < 0 {
for {
thisCommit, err = commitIter.Next()
if errors.Is(err, io.EOF) {
return recentCommits, nil
} else if err != nil {
return nil, err
}
recentCommits = append(recentCommits, thisCommit)
}
} else {
for range numCommits {
thisCommit, err = commitIter.Next()
if errors.Is(err, io.EOF) {
return recentCommits, nil
} else if err != nil {
return nil, err
}
recentCommits = append(recentCommits, thisCommit)
}
}
return recentCommits, err
}
func fmtCommitAsPatch(commit *object.Commit) (parentCommitHash plumbing.Hash, patch *object.Patch, err error) {
var parentCommit *object.Commit
var commitTree *object.Tree
parentCommit, err = commit.Parent(0)
if errors.Is(err, object.ErrParentNotFound) {
if commitTree, err = commit.Tree(); err != nil {
return
}
if patch, err = (&object.Tree{}).Patch(commitTree); err != nil {
return
}
} else if err != nil {
return
} else {
parentCommitHash = parentCommit.Hash
if patch, err = parentCommit.Patch(commit); err != nil {
return
}
}
return
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" ) // getRefHash returns the hash of a reference given its // type and name as supplied in URL queries.
func getRefHash(repo *git.Repository, ref_type, ref_name string) (ref_hash plumbing.Hash, err error) {
func getRefHash(repo *git.Repository, refType, refName string) (refHash plumbing.Hash, err error) {
var ref *plumbing.Reference
switch ref_type {
switch refType {
case "":
if ref, err = repo.Head(); err != nil {
return
}
ref_hash = ref.Hash()
refHash = ref.Hash()
case "commit":
ref_hash = plumbing.NewHash(ref_name)
refHash = plumbing.NewHash(refName)
case "branch":
if ref, err = repo.Reference(plumbing.NewBranchReferenceName(ref_name), true); err != nil {
if ref, err = repo.Reference(plumbing.NewBranchReferenceName(refName), true); err != nil {
return }
ref_hash = ref.Hash()
refHash = ref.Hash()
case "tag":
if ref, err = repo.Reference(plumbing.NewTagReferenceName(ref_name), true); err != nil {
if ref, err = repo.Reference(plumbing.NewTagReferenceName(refName), true); err != nil {
return }
ref_hash = ref.Hash()
refHash = ref.Hash()
default:
panic("Invalid ref type " + ref_type)
panic("Invalid ref type " + refType)
} return }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/http" )
func get_user_info_from_request(r *http.Request) (id int, username string, err error) {
var session_cookie *http.Cookie
func getUserFromRequest(r *http.Request) (id int, username string, err error) {
var sessionCookie *http.Cookie
if session_cookie, err = r.Cookie("session"); err != nil {
if sessionCookie, err = r.Cookie("session"); err != nil {
return } err = database.QueryRow( r.Context(), "SELECT user_id, COALESCE(username, '') FROM users u JOIN sessions s ON u.id = s.user_id WHERE s.session_id = $1;",
session_cookie.Value,
sessionCookie.Value,
).Scan(&id, &username) return }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/http" "runtime" )
func handle_gc(w http.ResponseWriter, r *http.Request, params map[string]any) {
func httpHandleGC(w http.ResponseWriter, r *http.Request, params map[string]any) {
runtime.GC() http.Redirect(w, r, "/", http.StatusSeeOther) }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/http" "path/filepath" "strconv" "github.com/jackc/pgx/v5" "github.com/jackc/pgx/v5/pgtype" )
func handle_group_index(w http.ResponseWriter, r *http.Request, params map[string]any) {
var group_path []string
func httpHandleGroupIndex(w http.ResponseWriter, r *http.Request, params map[string]any) {
var groupPath []string
var repos []nameDesc var subgroups []nameDesc var err error
var group_id int var group_description string
var groupID int var groupDesc string
group_path = params["group_path"].([]string)
groupPath = params["group_path"].([]string)
// The group itself err = database.QueryRow(r.Context(), ` WITH RECURSIVE group_path_cte AS ( SELECT id, parent_group, name, 1 AS depth FROM groups WHERE name = ($1::text[])[1] AND parent_group IS NULL UNION ALL SELECT g.id, g.parent_group, g.name, group_path_cte.depth + 1 FROM groups g JOIN group_path_cte ON g.parent_group = group_path_cte.id WHERE g.name = ($1::text[])[group_path_cte.depth + 1] AND group_path_cte.depth + 1 <= cardinality($1::text[]) ) SELECT c.id, COALESCE(g.description, '') FROM group_path_cte c JOIN groups g ON g.id = c.id WHERE c.depth = cardinality($1::text[]) `,
pgtype.FlatArray[string](group_path), ).Scan(&group_id, &group_description)
pgtype.FlatArray[string](groupPath), ).Scan(&groupID, &groupDesc)
if err == pgx.ErrNoRows {
http.Error(w, "Group not found", http.StatusNotFound)
return
} else if err != nil {
http.Error(w, "Error getting group: "+err.Error(), http.StatusInternalServerError)
return
}
// ACL
var count int
err = database.QueryRow(r.Context(), `
SELECT COUNT(*)
FROM user_group_roles
WHERE user_id = $1
AND group_id = $2
`, params["user_id"].(int), group_id).Scan(&count)
`, params["user_id"].(int), groupID).Scan(&count)
if err != nil {
http.Error(w, "Error checking access: "+err.Error(), http.StatusInternalServerError)
return
}
direct_access := (count > 0)
directAccess := (count > 0)
if r.Method == "POST" {
if !direct_access {
if !directAccess {
http.Error(w, "You do not have direct access to this group", http.StatusForbidden) return }
repo_name := r.FormValue("repo_name")
repo_description := r.FormValue("repo_desc")
contrib_requirements := r.FormValue("repo_contrib")
if repo_name == "" {
repoName := r.FormValue("repo_name")
repoDesc := r.FormValue("repo_desc")
contribReq := r.FormValue("repo_contrib")
if repoName == "" {
http.Error(w, "Repo name is required", http.StatusBadRequest) return }
var new_repo_id int
var newRepoID int
err := database.QueryRow( r.Context(), `INSERT INTO repos (name, description, group_id, contrib_requirements) VALUES ($1, $2, $3, $4) RETURNING id`,
repo_name, repo_description, group_id, contrib_requirements, ).Scan(&new_repo_id)
repoName, repoDesc, groupID, contribReq, ).Scan(&newRepoID)
if err != nil {
http.Error(w, "Error creating repo: "+err.Error(), http.StatusInternalServerError)
return
}
file_path := filepath.Join(config.Git.RepoDir, strconv.Itoa(new_repo_id)+".git")
filePath := filepath.Join(config.Git.RepoDir, strconv.Itoa(newRepoID)+".git")
_, err = database.Exec( r.Context(), `UPDATE repos SET filesystem_path = $1 WHERE id = $2`,
file_path, new_repo_id,
filePath, newRepoID,
)
if err != nil {
http.Error(w, "Error updating repo path: "+err.Error(), http.StatusInternalServerError)
return
}
if err = gitInit(file_path); err != nil {
if err = gitInit(filePath); err != nil {
http.Error(w, "Error initializing repo: "+err.Error(), http.StatusInternalServerError) return }
redirect_unconditionally(w, r)
redirectUnconditionally(w, r)
return } // Repos var rows pgx.Rows rows, err = database.Query(r.Context(), ` SELECT name, COALESCE(description, '') FROM repos WHERE group_id = $1
`, group_id)
`, groupID)
if err != nil {
http.Error(w, "Error getting repos: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
for rows.Next() {
var name, description string
if err = rows.Scan(&name, &description); err != nil {
http.Error(w, "Error getting repos: "+err.Error(), http.StatusInternalServerError)
return
}
repos = append(repos, nameDesc{name, description})
}
if err = rows.Err(); err != nil {
http.Error(w, "Error getting repos: "+err.Error(), http.StatusInternalServerError)
return
}
// Subgroups
rows, err = database.Query(r.Context(), `
SELECT name, COALESCE(description, '')
FROM groups
WHERE parent_group = $1
`, group_id)
`, groupID)
if err != nil {
http.Error(w, "Error getting subgroups: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
for rows.Next() {
var name, description string
if err = rows.Scan(&name, &description); err != nil {
http.Error(w, "Error getting subgroups: "+err.Error(), http.StatusInternalServerError)
return
}
subgroups = append(subgroups, nameDesc{name, description})
}
if err = rows.Err(); err != nil {
http.Error(w, "Error getting subgroups: "+err.Error(), http.StatusInternalServerError)
return
}
params["repos"] = repos
params["subgroups"] = subgroups
params["description"] = group_description params["direct_access"] = direct_access
params["description"] = groupDesc params["direct_access"] = directAccess
render_template(w, "group", params)
renderTemplate(w, "group", params)
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/http" "runtime" "github.com/dustin/go-humanize" )
func handle_index(w http.ResponseWriter, r *http.Request, params map[string]any) {
func httpHandleIndex(w http.ResponseWriter, r *http.Request, params map[string]any) {
var err error
var groups []nameDesc
groups, err = queryNameDesc(r.Context(), "SELECT name, COALESCE(description, '') FROM groups WHERE parent_group IS NULL")
if err != nil {
http.Error(w, "Error querying groups: "+err.Error(), http.StatusInternalServerError)
return
}
params["groups"] = groups
// Memory currently allocated
memstats := runtime.MemStats{}
runtime.ReadMemStats(&memstats)
params["mem"] = humanize.IBytes(memstats.Alloc)
render_template(w, "index", params)
renderTemplate(w, "index", params)
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "crypto/rand" "encoding/base64" "errors" "fmt" "net/http" "time" "github.com/alexedwards/argon2id" "github.com/jackc/pgx/v5" )
func handle_login(w http.ResponseWriter, r *http.Request, params map[string]any) {
func httpHandleLogin(w http.ResponseWriter, r *http.Request, params map[string]any) {
var username, password string
var user_id int var password_hash string
var userID int var passwordHash string
var err error
var password_matches bool var cookie_value string
var passwordMatches bool var cookieValue string
var now time.Time
var expiry time.Time
var cookie http.Cookie
if r.Method != "POST" {
render_template(w, "login", params)
renderTemplate(w, "login", params)
return
}
username = r.PostFormValue("username")
password = r.PostFormValue("password")
err = database.QueryRow(r.Context(),
"SELECT id, COALESCE(password, '') FROM users WHERE username = $1",
username,
).Scan(&user_id, &password_hash)
).Scan(&userID, &passwordHash)
if err != nil {
if errors.Is(err, pgx.ErrNoRows) {
params["login_error"] = "Unknown username"
render_template(w, "login", params)
renderTemplate(w, "login", params)
return } http.Error(w, "Error querying user information: "+err.Error(), http.StatusInternalServerError) return }
if password_hash == "" {
if passwordHash == "" {
params["login_error"] = "User has no password"
render_template(w, "login", params)
renderTemplate(w, "login", params)
return }
if password_matches, err = argon2id.ComparePasswordAndHash(password, password_hash); err != nil {
if passwordMatches, err = argon2id.ComparePasswordAndHash(password, passwordHash); err != nil {
http.Error(w, "Error comparing password and hash: "+err.Error(), http.StatusInternalServerError) return }
if !password_matches {
if !passwordMatches {
params["login_error"] = "Invalid password"
render_template(w, "login", params)
renderTemplate(w, "login", params)
return }
if cookie_value, err = random_urlsafe_string(16); err != nil {
if cookieValue, err = randomUrlsafeStr(16); err != nil {
http.Error(w, "Error getting random string: "+err.Error(), http.StatusInternalServerError)
return
}
now = time.Now()
expiry = now.Add(time.Duration(config.HTTP.CookieExpiry) * time.Second)
cookie = http.Cookie{
Name: "session",
Value: cookie_value,
Value: cookieValue,
SameSite: http.SameSiteLaxMode, HttpOnly: true, Secure: false, // TODO Expires: expiry, Path: "/", // TODO: Expire } http.SetCookie(w, &cookie)
_, err = database.Exec(r.Context(), "INSERT INTO sessions (user_id, session_id) VALUES ($1, $2)", user_id, cookie_value)
_, err = database.Exec(r.Context(), "INSERT INTO sessions (user_id, session_id) VALUES ($1, $2)", userID, cookieValue)
if err != nil {
http.Error(w, "Error inserting session: "+err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, r, "/", http.StatusSeeOther)
}
// random_urlsafe_string generates a random string of the given entropic size
// randomUrlsafeStr generates a random string of the given entropic size
// using the URL-safe base64 encoding. The actual size of the string returned // will be 4*sz.
func random_urlsafe_string(sz int) (string, error) {
func randomUrlsafeStr(sz int) (string, error) {
r := make([]byte, 3*sz)
_, err := rand.Read(r)
if err != nil {
return "", fmt.Errorf("error generating random string: %w", err)
}
return base64.RawURLEncoding.EncodeToString(r), nil
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "fmt" "net/http" "strings" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/filemode" "github.com/go-git/go-git/v5/plumbing/format/diff" "github.com/go-git/go-git/v5/plumbing/object" "go.lindenii.runxiyu.org/lindenii-common/misc" ) // The file patch type from go-git isn't really usable in HTML templates // either.
type usable_file_patch_t struct {
type usableFilePatch struct {
From diff.File To diff.File
Chunks []usable_chunk
Chunks []usableChunk
}
type usable_chunk struct {
type usableChunk struct {
Operation diff.Operation Content string }
func handle_repo_commit(w http.ResponseWriter, r *http.Request, params map[string]any) {
func httpHandleRepoCommit(w http.ResponseWriter, r *http.Request, params map[string]any) {
var repo *git.Repository
var commit_id_specified_string, commit_id_specified_string_without_suffix string var commit_id plumbing.Hash var parent_commit_hash plumbing.Hash var commit_object *object.Commit var commit_id_string string
var commitIDStrSpec, commitIDStrSpecNoSuffix string var commitID plumbing.Hash var parentCommitHash plumbing.Hash var commitObj *object.Commit var commitIDStr string
var err error var patch *object.Patch
repo, commit_id_specified_string = params["repo"].(*git.Repository), params["commit_id"].(string)
repo, commitIDStrSpec = params["repo"].(*git.Repository), params["commit_id"].(string)
commit_id_specified_string_without_suffix = strings.TrimSuffix(commit_id_specified_string, ".patch")
commit_id = plumbing.NewHash(commit_id_specified_string_without_suffix)
if commit_object, err = repo.CommitObject(commit_id); err != nil {
commitIDStrSpecNoSuffix = strings.TrimSuffix(commitIDStrSpec, ".patch")
commitID = plumbing.NewHash(commitIDStrSpecNoSuffix)
if commitObj, err = repo.CommitObject(commitID); err != nil {
http.Error(w, "Error getting commit object: "+err.Error(), http.StatusInternalServerError) return }
if commit_id_specified_string_without_suffix != commit_id_specified_string {
var formatted_patch string
if formatted_patch, err = fmtCommitPatch(commit_object); err != nil {
if commitIDStrSpecNoSuffix != commitIDStrSpec {
var patchStr string
if patchStr, err = fmtCommitPatch(commitObj); err != nil {
http.Error(w, "Error formatting patch: "+err.Error(), http.StatusInternalServerError) return }
fmt.Fprintln(w, formatted_patch)
fmt.Fprintln(w, patchStr)
return }
commit_id_string = commit_object.Hash.String()
commitIDStr = commitObj.Hash.String()
if commit_id_string != commit_id_specified_string {
http.Redirect(w, r, commit_id_string, http.StatusSeeOther)
if commitIDStr != commitIDStrSpec {
http.Redirect(w, r, commitIDStr, http.StatusSeeOther)
return }
params["commit_object"] = commit_object params["commit_id"] = commit_id_string
params["commit_object"] = commitObj params["commit_id"] = commitIDStr
parent_commit_hash, patch, err = fmtCommitAsPatch(commit_object)
parentCommitHash, patch, err = fmtCommitAsPatch(commitObj)
if err != nil {
http.Error(w, "Error getting patch from commit: "+err.Error(), http.StatusInternalServerError)
return
}
params["parent_commit_hash"] = parent_commit_hash.String()
params["parent_commit_hash"] = parentCommitHash.String()
params["patch"] = patch
params["file_patches"] = make_usable_file_patches(patch)
params["file_patches"] = makeUsableFilePatches(patch)
render_template(w, "repo_commit", params)
renderTemplate(w, "repo_commit", params)
}
type fake_diff_file struct {
type fakeDiffFile struct {
hash plumbing.Hash mode filemode.FileMode path string }
func (f fake_diff_file) Hash() plumbing.Hash {
func (f fakeDiffFile) Hash() plumbing.Hash {
return f.hash }
func (f fake_diff_file) Mode() filemode.FileMode {
func (f fakeDiffFile) Mode() filemode.FileMode {
return f.mode }
func (f fake_diff_file) Path() string {
func (f fakeDiffFile) Path() string {
return f.path }
var fake_diff_file_null = fake_diff_file{
var nullFakeDiffFile = fakeDiffFile{
hash: plumbing.NewHash("0000000000000000000000000000000000000000"),
mode: misc.First_or_panic(filemode.New("100644")),
path: "",
}
func make_usable_file_patches(patch diff.Patch) (usable_file_patches []usable_file_patch_t) {
func makeUsableFilePatches(patch diff.Patch) (usableFilePatches []usableFilePatch) {
// TODO: Remove unnecessary context // TODO: Prepend "+"/"-"/" " instead of solely distinguishing based on color
for _, file_patch := range patch.FilePatches() {
for _, filePatch := range patch.FilePatches() {
var from, to diff.File
var usable_file_patch usable_file_patch_t
chunks := []usable_chunk{}
var ufp usableFilePatch
chunks := []usableChunk{}
from, to = file_patch.Files()
from, to = filePatch.Files()
if from == nil {
from = fake_diff_file_null
from = nullFakeDiffFile
}
if to == nil {
to = fake_diff_file_null
to = nullFakeDiffFile
}
for _, chunk := range file_patch.Chunks() {
for _, chunk := range filePatch.Chunks() {
var content string
content = chunk.Content()
if len(content) > 0 && content[0] == '\n' {
content = "\n" + content
} // Horrible hack to fix how browsers newlines that immediately proceed <pre>
chunks = append(chunks, usable_chunk{
chunks = append(chunks, usableChunk{
Operation: chunk.Type(), Content: content, }) }
usable_file_patch = usable_file_patch_t{
ufp = usableFilePatch{
Chunks: chunks, From: from, To: to, }
usable_file_patches = append(usable_file_patches, usable_file_patch)
usableFilePatches = append(usableFilePatches, ufp)
} return }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
package main
import (
"net/http"
"github.com/jackc/pgx/v5"
)
type id_title_status_t struct {
ID int
Title string
Status string
}
func handle_repo_contrib_index(w http.ResponseWriter, r *http.Request, params map[string]any) {
func httpHandleRepoContribIndex(w http.ResponseWriter, r *http.Request, params map[string]any) {
var rows pgx.Rows
var result []id_title_status_t
var err error
if rows, err = database.Query(r.Context(),
"SELECT id, COALESCE(title, 'Untitled'), status FROM merge_requests WHERE repo_id = $1",
params["repo_id"],
); err != nil {
http.Error(w, "Error querying merge requests: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
for rows.Next() {
var id int
var title, status string
if err = rows.Scan(&id, &title, &status); err != nil {
http.Error(w, "Error scanning merge request: "+err.Error(), http.StatusInternalServerError)
return
}
result = append(result, id_title_status_t{id, title, status})
}
if err = rows.Err(); err != nil {
http.Error(w, "Error ranging over merge requests: "+err.Error(), http.StatusInternalServerError)
return
}
params["merge_requests"] = result
render_template(w, "repo_contrib_index", params)
renderTemplate(w, "repo_contrib_index", params)
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/http" "strconv" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" )
func handle_repo_contrib_one(w http.ResponseWriter, r *http.Request, params map[string]any) {
var mr_id_string string
var mr_id int
func httpHandleRepoContribOne(w http.ResponseWriter, r *http.Request, params map[string]any) {
var mrIDStr string
var mrIDInt int
var err error
var title, status, source_ref, destination_branch string
var title, status, srcRefStr, dstBranchStr string
var repo *git.Repository
var source_ref_hash plumbing.Hash var source_commit, destination_commit, merge_base *object.Commit var merge_bases []*object.Commit
var srcRefHash plumbing.Hash var dstBranchHash plumbing.Hash var srcCommit, dstCommit, mergeBaseCommit *object.Commit var mergeBases []*object.Commit
mr_id_string = params["mr_id"].(string) mr_id_int64, err := strconv.ParseInt(mr_id_string, 10, strconv.IntSize)
mrIDStr = params["mr_id"].(string) mrIDInt64, err := strconv.ParseInt(mrIDStr, 10, strconv.IntSize)
if err != nil {
http.Error(w, "Merge request ID not an integer: "+err.Error(), http.StatusBadRequest)
return
}
mr_id = int(mr_id_int64)
mrIDInt = int(mrIDInt64)
if err = database.QueryRow(r.Context(), "SELECT COALESCE(title, ''), status, source_ref, COALESCE(destination_branch, '') FROM merge_requests WHERE id = $1",
mr_id,
).Scan(&title, &status, &source_ref, &destination_branch); err != nil {
mrIDInt,
).Scan(&title, &status, &srcRefStr, &dstBranchStr); err != nil {
http.Error(w, "Error querying merge request: "+err.Error(), http.StatusInternalServerError) return } repo = params["repo"].(*git.Repository)
if source_ref_hash, err = getRefHash(repo, "branch", source_ref); err != nil {
if srcRefHash, err = getRefHash(repo, "branch", srcRefStr); err != nil {
http.Error(w, "Error getting source ref hash: "+err.Error(), http.StatusInternalServerError) return }
if source_commit, err = repo.CommitObject(source_ref_hash); err != nil {
if srcCommit, err = repo.CommitObject(srcRefHash); err != nil {
http.Error(w, "Error getting source commit: "+err.Error(), http.StatusInternalServerError) return }
params["source_commit"] = source_commit
params["source_commit"] = srcCommit
var destination_branch_hash plumbing.Hash
if destination_branch == "" {
destination_branch = "HEAD"
destination_branch_hash, err = getRefHash(repo, "", "")
if dstBranchStr == "" {
dstBranchStr = "HEAD"
dstBranchHash, err = getRefHash(repo, "", "")
} else {
destination_branch_hash, err = getRefHash(repo, "branch", destination_branch)
dstBranchHash, err = getRefHash(repo, "branch", dstBranchStr)
}
if err != nil {
http.Error(w, "Error getting destination branch hash: "+err.Error(), http.StatusInternalServerError)
return
}
if destination_commit, err = repo.CommitObject(destination_branch_hash); err != nil {
if dstCommit, err = repo.CommitObject(dstBranchHash); err != nil {
http.Error(w, "Error getting destination commit: "+err.Error(), http.StatusInternalServerError) return }
params["destination_commit"] = destination_commit
params["destination_commit"] = dstCommit
if merge_bases, err = source_commit.MergeBase(destination_commit); err != nil {
if mergeBases, err = srcCommit.MergeBase(dstCommit); err != nil {
http.Error(w, "Error getting merge base: "+err.Error(), http.StatusInternalServerError) return }
merge_base = merge_bases[0] params["merge_base"] = merge_base
mergeBaseCommit = mergeBases[0] params["merge_base"] = mergeBaseCommit
patch, err := merge_base.Patch(source_commit)
patch, err := mergeBaseCommit.Patch(srcCommit)
if err != nil {
http.Error(w, "Error getting patch: "+err.Error(), http.StatusInternalServerError)
return
}
params["file_patches"] = make_usable_file_patches(patch)
params["file_patches"] = makeUsableFilePatches(patch)
params["mr_title"], params["mr_status"], params["mr_source_ref"], params["mr_destination_branch"] = title, status, source_ref, destination_branch
params["mr_title"], params["mr_status"], params["mr_source_ref"], params["mr_destination_branch"] = title, status, srcRefStr, dstBranchStr
render_template(w, "repo_contrib_one", params)
renderTemplate(w, "repo_contrib_one", params)
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/http" "strings" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/plumbing/storer" )
func handle_repo_index(w http.ResponseWriter, r *http.Request, params map[string]any) {
func httpHandleRepoIndex(w http.ResponseWriter, r *http.Request, params map[string]any) {
var repo *git.Repository
var repo_name string
var group_path []string
var ref_hash plumbing.Hash
var err error
var recent_commits []*object.Commit
var commit_object *object.Commit
var tree *object.Tree
var notes []string
var branches []string
var branches_ storer.ReferenceIter
repo, repo_name, group_path = params["repo"].(*git.Repository), params["repo_name"].(string), params["group_path"].([]string)
if strings.Contains(repo_name, "\n") || slice_contains_newline(group_path) {
notes = append(notes, "Path contains newlines; HTTP Git access impossible")
}
ref_hash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string))
if err != nil {
goto no_ref
}
branches_, err = repo.Branches()
if err != nil {
}
err = branches_.ForEach(func(branch *plumbing.Reference) error {
branches = append(branches, branch.Name().Short())
return nil
})
if err != nil {
}
params["branches"] = branches
if recent_commits, err = getRecentCommits(repo, ref_hash, 3); err != nil {
goto no_ref
}
params["commits"] = recent_commits
if commit_object, err = repo.CommitObject(ref_hash); err != nil {
goto no_ref
}
if tree, err = commit_object.Tree(); err != nil {
goto no_ref
}
params["files"] = makeDisplayTree(tree)
params["readme_filename"], params["readme"] = render_readme_at_tree(tree)
params["readme_filename"], params["readme"] = renderReadmeAtTree(tree)
no_ref:
params["http_clone_url"] = generate_http_remote_url(group_path, repo_name) params["ssh_clone_url"] = generate_ssh_remote_url(group_path, repo_name)
params["http_clone_url"] = genHTTPRemoteURL(group_path, repo_name) params["ssh_clone_url"] = genSSHRemoteURL(group_path, repo_name)
params["notes"] = notes
render_template(w, "repo_index", params)
renderTemplate(w, "repo_index", params)
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "fmt" "io" "net/http" "os/exec" "github.com/jackc/pgx/v5/pgtype" )
func handle_repo_info(w http.ResponseWriter, r *http.Request, params map[string]any) (err error) {
var group_path []string
var repo_name, repo_path string
func httpHandleRepoInfo(w http.ResponseWriter, r *http.Request, params map[string]any) (err error) {
var groupPath []string
var repoName, repoPath string
if err := database.QueryRow(r.Context(), ` WITH RECURSIVE group_path_cte AS ( -- Start: match the first name in the path where parent_group IS NULL SELECT id, parent_group, name, 1 AS depth FROM groups WHERE name = ($1::text[])[1] AND parent_group IS NULL UNION ALL -- Recurse: jion next segment of the path SELECT g.id, g.parent_group, g.name, group_path_cte.depth + 1 FROM groups g JOIN group_path_cte ON g.parent_group = group_path_cte.id WHERE g.name = ($1::text[])[group_path_cte.depth + 1] AND group_path_cte.depth + 1 <= cardinality($1::text[]) ) SELECT r.filesystem_path FROM group_path_cte c JOIN repos r ON r.group_id = c.id WHERE c.depth = cardinality($1::text[]) AND r.name = $2 `,
pgtype.FlatArray[string](group_path),
repo_name,
).Scan(&repo_path); err != nil {
pgtype.FlatArray[string](groupPath),
repoName,
).Scan(&repoPath); err != nil {
return err
}
w.Header().Set("Content-Type", "application/x-git-upload-pack-advertisement")
w.WriteHeader(http.StatusOK)
cmd := exec.Command("git", "upload-pack", "--stateless-rpc", "--advertise-refs", repo_path)
cmd := exec.Command("git", "upload-pack", "--stateless-rpc", "--advertise-refs", repoPath)
stdout, err := cmd.StdoutPipe()
if err != nil {
return err
}
defer func() {
_ = stdout.Close()
}()
cmd.Stderr = cmd.Stdout
if err = cmd.Start(); err != nil {
return err
}
if err = pack_line(w, "# service=git-upload-pack\n"); err != nil {
if err = packLine(w, "# service=git-upload-pack\n"); err != nil {
return err }
if err = pack_flush(w); err != nil {
if err = packFlush(w); err != nil {
return
}
if _, err = io.Copy(w, stdout); err != nil {
return err
}
if err = cmd.Wait(); err != nil {
return err
}
return nil
}
// Taken from https://github.com/icyphox/legit, MIT license
func pack_line(w io.Writer, s string) error {
func packLine(w io.Writer, s string) error {
_, err := fmt.Fprintf(w, "%04x%s", len(s)+4, s) return err } // Taken from https://github.com/icyphox/legit, MIT license
func pack_flush(w io.Writer) error {
func packFlush(w io.Writer) error {
_, err := fmt.Fprint(w, "0000") return err }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/http" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" ) // TODO: I probably shouldn't include *all* commits here...
func handle_repo_log(w http.ResponseWriter, r *http.Request, params map[string]any) {
func httpHandleRepoLog(w http.ResponseWriter, r *http.Request, params map[string]any) {
var repo *git.Repository
var ref_hash plumbing.Hash
var err error
var commits []*object.Commit
repo = params["repo"].(*git.Repository)
if ref_hash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)); err != nil {
http.Error(w, "Error getting ref hash: "+err.Error(), http.StatusInternalServerError)
return
}
if commits, err = getRecentCommits(repo, ref_hash, -1); err != nil {
http.Error(w, "Error getting recent commits: "+err.Error(), http.StatusInternalServerError)
return
}
params["commits"] = commits
render_template(w, "repo_log", params)
renderTemplate(w, "repo_log", params)
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "fmt" "net/http" "path" "strings" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" )
func handle_repo_raw(w http.ResponseWriter, r *http.Request, params map[string]any) {
func httpHandleRepoRaw(w http.ResponseWriter, r *http.Request, params map[string]any) {
var raw_path_spec, path_spec string
var repo *git.Repository
var ref_hash plumbing.Hash
var commit_object *object.Commit
var tree *object.Tree
var err error
raw_path_spec = params["rest"].(string)
repo, path_spec = params["repo"].(*git.Repository), strings.TrimSuffix(raw_path_spec, "/")
params["path_spec"] = path_spec
if ref_hash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)); err != nil {
http.Error(w, "Error getting ref hash: "+err.Error(), http.StatusInternalServerError)
return
}
if commit_object, err = repo.CommitObject(ref_hash); err != nil {
http.Error(w, "Error getting commit object: "+err.Error(), http.StatusInternalServerError)
return
}
if tree, err = commit_object.Tree(); err != nil {
http.Error(w, "Error getting file tree: "+err.Error(), http.StatusInternalServerError)
return
}
var target *object.Tree
if path_spec == "" {
target = tree
} else {
if target, err = tree.Tree(path_spec); err != nil {
var file *object.File
var file_contents string
if file, err = tree.File(path_spec); err != nil {
http.Error(w, "Error retrieving path: "+err.Error(), http.StatusInternalServerError)
return
}
if len(raw_path_spec) != 0 && raw_path_spec[len(raw_path_spec)-1] == '/' {
http.Redirect(w, r, "../"+path_spec, http.StatusSeeOther)
return
}
if file_contents, err = file.Contents(); err != nil {
http.Error(w, "Error reading file: "+err.Error(), http.StatusInternalServerError)
return
}
fmt.Fprint(w, file_contents)
return
}
}
if len(raw_path_spec) != 0 && raw_path_spec[len(raw_path_spec)-1] != '/' {
http.Redirect(w, r, path.Base(path_spec)+"/", http.StatusSeeOther)
return
}
params["files"] = makeDisplayTree(target)
render_template(w, "repo_raw_dir", params)
renderTemplate(w, "repo_raw_dir", params)
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "bytes" "html/template" "net/http" "path" "strings" "github.com/alecthomas/chroma/v2"
chroma_formatters_html "github.com/alecthomas/chroma/v2/formatters/html" chroma_lexers "github.com/alecthomas/chroma/v2/lexers" chroma_styles "github.com/alecthomas/chroma/v2/styles"
chromaHTML "github.com/alecthomas/chroma/v2/formatters/html" chromaLexers "github.com/alecthomas/chroma/v2/lexers" chromaStyles "github.com/alecthomas/chroma/v2/styles"
"github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" )
func handle_repo_tree(w http.ResponseWriter, r *http.Request, params map[string]any) {
var raw_path_spec, path_spec string
func httpHandleRepoTree(w http.ResponseWriter, r *http.Request, params map[string]any) {
var rawPathSpec, pathSpec string
var repo *git.Repository
var ref_hash plumbing.Hash var commit_object *object.Commit
var refHash plumbing.Hash var commitObject *object.Commit
var tree *object.Tree var err error
raw_path_spec = params["rest"].(string) repo, path_spec = params["repo"].(*git.Repository), strings.TrimSuffix(raw_path_spec, "/") params["path_spec"] = path_spec
rawPathSpec = params["rest"].(string) repo, pathSpec = params["repo"].(*git.Repository), strings.TrimSuffix(rawPathSpec, "/") params["path_spec"] = pathSpec
if ref_hash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)); err != nil {
if refHash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)); err != nil {
http.Error(w, "Error getting ref hash: "+err.Error(), http.StatusInternalServerError) return }
if commit_object, err = repo.CommitObject(ref_hash); err != nil {
if commitObject, err = repo.CommitObject(refHash); err != nil {
http.Error(w, "Error getting commit object: "+err.Error(), http.StatusInternalServerError) return }
if tree, err = commit_object.Tree(); err != nil {
if tree, err = commitObject.Tree(); err != nil {
http.Error(w, "Error getting file tree: "+err.Error(), http.StatusInternalServerError) return } var target *object.Tree
if path_spec == "" {
if pathSpec == "" {
target = tree
} else {
if target, err = tree.Tree(path_spec); err != nil {
if target, err = tree.Tree(pathSpec); err != nil {
var file *object.File
var file_contents string
var fileContent string
var lexer chroma.Lexer var iterator chroma.Iterator var style *chroma.Style
var formatter *chroma_formatters_html.Formatter var formatted_encapsulated template.HTML
var formatter *chromaHTML.Formatter var formattedHTML template.HTML
if file, err = tree.File(path_spec); err != nil {
if file, err = tree.File(pathSpec); err != nil {
http.Error(w, "Error retrieving path: "+err.Error(), http.StatusInternalServerError) return }
if len(raw_path_spec) != 0 && raw_path_spec[len(raw_path_spec)-1] == '/' {
http.Redirect(w, r, "../"+path_spec, http.StatusSeeOther)
if len(rawPathSpec) != 0 && rawPathSpec[len(rawPathSpec)-1] == '/' {
http.Redirect(w, r, "../"+pathSpec, http.StatusSeeOther)
return }
if file_contents, err = file.Contents(); err != nil {
if fileContent, err = file.Contents(); err != nil {
http.Error(w, "Error reading file: "+err.Error(), http.StatusInternalServerError) return }
lexer = chroma_lexers.Match(path_spec)
lexer = chromaLexers.Match(pathSpec)
if lexer == nil {
lexer = chroma_lexers.Fallback
lexer = chromaLexers.Fallback
}
if iterator, err = lexer.Tokenise(nil, file_contents); err != nil {
if iterator, err = lexer.Tokenise(nil, fileContent); err != nil {
http.Error(w, "Error tokenizing code: "+err.Error(), http.StatusInternalServerError) return }
var formatted_unencapsulated bytes.Buffer
style = chroma_styles.Get("autumn")
formatter = chroma_formatters_html.New(chroma_formatters_html.WithClasses(true), chroma_formatters_html.TabWidth(8))
if err = formatter.Format(&formatted_unencapsulated, style, iterator); err != nil {
var formattedHTMLStr bytes.Buffer
style = chromaStyles.Get("autumn")
formatter = chromaHTML.New(chromaHTML.WithClasses(true), chromaHTML.TabWidth(8))
if err = formatter.Format(&formattedHTMLStr, style, iterator); err != nil {
http.Error(w, "Error formatting code: "+err.Error(), http.StatusInternalServerError) return }
formatted_encapsulated = template.HTML(formatted_unencapsulated.Bytes()) //#nosec G203 params["file_contents"] = formatted_encapsulated
formattedHTML = template.HTML(formattedHTMLStr.Bytes()) //#nosec G203 params["file_contents"] = formattedHTML
render_template(w, "repo_tree_file", params)
renderTemplate(w, "repo_tree_file", params)
return } }
if len(raw_path_spec) != 0 && raw_path_spec[len(raw_path_spec)-1] != '/' {
http.Redirect(w, r, path.Base(path_spec)+"/", http.StatusSeeOther)
if len(rawPathSpec) != 0 && rawPathSpec[len(rawPathSpec)-1] != '/' {
http.Redirect(w, r, path.Base(pathSpec)+"/", http.StatusSeeOther)
return }
params["readme_filename"], params["readme"] = render_readme_at_tree(target)
params["readme_filename"], params["readme"] = renderReadmeAtTree(target)
params["files"] = makeDisplayTree(target)
render_template(w, "repo_tree_dir", params)
renderTemplate(w, "repo_tree_dir", params)
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "io" "net/http" "os" "os/exec" "github.com/jackc/pgx/v5/pgtype" )
func handle_upload_pack(w http.ResponseWriter, r *http.Request, params map[string]any) (err error) {
func httpHandleUploadPack(w http.ResponseWriter, r *http.Request, params map[string]any) (err error) {
var group_path []string
var repo_name string
var repo_path string
var stdout io.ReadCloser
var stdin io.WriteCloser
var cmd *exec.Cmd
group_path, repo_name = params["group_path"].([]string), params["repo_name"].(string)
if err := database.QueryRow(r.Context(), `
WITH RECURSIVE group_path_cte AS (
-- Start: match the first name in the path where parent_group IS NULL
SELECT
id,
parent_group,
name,
1 AS depth
FROM groups
WHERE name = ($1::text[])[1]
AND parent_group IS NULL
UNION ALL
-- Recurse: jion next segment of the path
SELECT
g.id,
g.parent_group,
g.name,
group_path_cte.depth + 1
FROM groups g
JOIN group_path_cte ON g.parent_group = group_path_cte.id
WHERE g.name = ($1::text[])[group_path_cte.depth + 1]
AND group_path_cte.depth + 1 <= cardinality($1::text[])
)
SELECT r.filesystem_path
FROM group_path_cte c
JOIN repos r ON r.group_id = c.id
WHERE c.depth = cardinality($1::text[])
AND r.name = $2
`,
pgtype.FlatArray[string](group_path),
repo_name,
).Scan(&repo_path); err != nil {
return err
}
w.Header().Set("Content-Type", "application/x-git-upload-pack-result")
w.Header().Set("Connection", "Keep-Alive")
w.Header().Set("Transfer-Encoding", "chunked")
w.WriteHeader(http.StatusOK)
cmd = exec.Command("git", "upload-pack", "--stateless-rpc", repo_path)
cmd.Env = append(os.Environ(), "LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket)
if stdout, err = cmd.StdoutPipe(); err != nil {
return err
}
cmd.Stderr = cmd.Stdout
defer func() {
_ = stdout.Close()
}()
if stdin, err = cmd.StdinPipe(); err != nil {
return err
}
defer func() {
_ = stdin.Close()
}()
if err = cmd.Start(); err != nil {
return err
}
if _, err = io.Copy(stdin, r.Body); err != nil {
return err
}
if err = stdin.Close(); err != nil {
return err
}
if _, err = io.Copy(w, stdout); err != nil {
return err
}
if err = cmd.Wait(); err != nil {
return err
}
return nil
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/http" )
func handle_users(w http.ResponseWriter, r *http.Request, params map[string]any) {
func httpHandleUsers(w http.ResponseWriter, r *http.Request, params map[string]any) {
http.Error(w, "Not implemented", http.StatusNotImplemented) }
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
package main
import (
"errors"
"fmt"
"net/http"
"strconv"
"strings"
"github.com/jackc/pgx/v5"
"go.lindenii.runxiyu.org/lindenii-common/clog"
)
type httpRouter struct{}
func (router *httpRouter) ServeHTTP(w http.ResponseWriter, r *http.Request) {
clog.Info("Incoming HTTP: " + r.RemoteAddr + " " + r.Method + " " + r.RequestURI)
var segments []string
var err error
var non_empty_last_segments_len int var separator_index int
var contentfulSegmentsLen int var sepIndex int
params := make(map[string]any)
if segments, _, err = parse_request_uri(r.RequestURI); err != nil {
if segments, _, err = parseReqURI(r.RequestURI); err != nil {
http.Error(w, err.Error(), http.StatusBadRequest) return }
non_empty_last_segments_len = len(segments)
contentfulSegmentsLen = len(segments)
if segments[len(segments)-1] == "" {
non_empty_last_segments_len--
contentfulSegmentsLen--
}
if segments[0] == ":" {
if len(segments) < 2 {
http.Error(w, "Blank system endpoint", http.StatusNotFound)
return
} else if len(segments) == 2 && redirect_with_slash(w, r) {
} else if len(segments) == 2 && redirectDir(w, r) {
return
}
switch segments[1] {
case "static":
static_handler.ServeHTTP(w, r)
staticHandler.ServeHTTP(w, r)
return case "source":
source_handler.ServeHTTP(w, r)
sourceHandler.ServeHTTP(w, r)
return } } params["url_segments"] = segments params["global"] = globalData
var _user_id int // 0 for none _user_id, params["username"], err = get_user_info_from_request(r) params["user_id"] = _user_id
var userID int // 0 for none userID, params["username"], err = getUserFromRequest(r) params["user_id"] = userID
if errors.Is(err, http.ErrNoCookie) {
} else if errors.Is(err, pgx.ErrNoRows) {
} else if err != nil {
http.Error(w, "Error getting user info from request: "+err.Error(), http.StatusInternalServerError)
return
}
if _user_id == 0 {
if userID == 0 {
params["user_id_string"] = ""
} else {
params["user_id_string"] = strconv.Itoa(_user_id)
params["user_id_string"] = strconv.Itoa(userID)
}
if segments[0] == ":" {
switch segments[1] {
case "login":
handle_login(w, r, params)
httpHandleLogin(w, r, params)
return case "users":
handle_users(w, r, params)
httpHandleUsers(w, r, params)
return case "gc":
handle_gc(w, r, params)
httpHandleGC(w, r, params)
return
default:
http.Error(w, fmt.Sprintf("Unknown system module type: %s", segments[1]), http.StatusNotFound)
return
}
}
separator_index = -1
sepIndex = -1
for i, part := range segments {
if part == ":" {
separator_index = i
sepIndex = i
break } }
params["separator_index"] = separator_index
params["separator_index"] = sepIndex
var group_path []string var module_type string var module_name string
var groupPath []string var moduleType string var moduleName string
if separator_index > 0 {
group_path = segments[:separator_index]
if sepIndex > 0 {
groupPath = segments[:sepIndex]
} else {
group_path = segments[:len(segments)-1]
groupPath = segments[:len(segments)-1]
}
params["group_path"] = group_path
params["group_path"] = groupPath
switch {
case non_empty_last_segments_len == 0:
handle_index(w, r, params)
case separator_index == -1:
if redirect_with_slash(w, r) {
case contentfulSegmentsLen == 0:
httpHandleIndex(w, r, params)
case sepIndex == -1:
if redirectDir(w, r) {
return }
handle_group_index(w, r, params) case non_empty_last_segments_len == separator_index+1:
httpHandleGroupIndex(w, r, params) case contentfulSegmentsLen == sepIndex+1:
http.Error(w, "Illegal path 1", http.StatusNotImplemented) return
case non_empty_last_segments_len == separator_index+2:
case contentfulSegmentsLen == sepIndex+2:
http.Error(w, "Illegal path 2", http.StatusNotImplemented) return default:
module_type = segments[separator_index+1]
module_name = segments[separator_index+2]
switch module_type {
moduleType = segments[sepIndex+1]
moduleName = segments[sepIndex+2]
switch moduleType {
case "repos":
params["repo_name"] = module_name
params["repo_name"] = moduleName
if non_empty_last_segments_len > separator_index+3 {
switch segments[separator_index+3] {
if contentfulSegmentsLen > sepIndex+3 {
switch segments[sepIndex+3] {
case "info":
if err = handle_repo_info(w, r, params); err != nil {
if err = httpHandleRepoInfo(w, r, params); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError) } return case "git-upload-pack":
if err = handle_upload_pack(w, r, params); err != nil {
if err = httpHandleUploadPack(w, r, params); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError) } return } }
if params["ref_type"], params["ref_name"], err = get_param_ref_and_type(r); err != nil {
if errors.Is(err, err_no_ref_spec) {
if params["ref_type"], params["ref_name"], err = getParamRefTypeName(r); err != nil {
if errors.Is(err, errNoRefSpec) {
params["ref_type"] = ""
} else {
http.Error(w, "Error querying ref type: "+err.Error(), http.StatusInternalServerError)
return
}
}
// TODO: subgroups
if params["repo"], params["repo_description"], params["repo_id"], err = openRepo(r.Context(), group_path, module_name); err != nil {
if params["repo"], params["repo_description"], params["repo_id"], err = openRepo(r.Context(), groupPath, moduleName); err != nil {
http.Error(w, "Error opening repo: "+err.Error(), http.StatusInternalServerError) return }
if non_empty_last_segments_len == separator_index+3 {
if redirect_with_slash(w, r) {
if contentfulSegmentsLen == sepIndex+3 {
if redirectDir(w, r) {
return }
handle_repo_index(w, r, params)
httpHandleRepoIndex(w, r, params)
return }
repo_feature := segments[separator_index+3]
switch repo_feature {
repoFeature := segments[sepIndex+3]
switch repoFeature {
case "tree":
params["rest"] = strings.Join(segments[separator_index+4:], "/")
if len(segments) < separator_index+5 && redirect_with_slash(w, r) {
params["rest"] = strings.Join(segments[sepIndex+4:], "/")
if len(segments) < sepIndex+5 && redirectDir(w, r) {
return }
handle_repo_tree(w, r, params)
httpHandleRepoTree(w, r, params)
case "raw":
params["rest"] = strings.Join(segments[separator_index+4:], "/")
if len(segments) < separator_index+5 && redirect_with_slash(w, r) {
params["rest"] = strings.Join(segments[sepIndex+4:], "/")
if len(segments) < sepIndex+5 && redirectDir(w, r) {
return }
handle_repo_raw(w, r, params)
httpHandleRepoRaw(w, r, params)
case "log":
if non_empty_last_segments_len > separator_index+4 {
if contentfulSegmentsLen > sepIndex+4 {
http.Error(w, "Too many parameters", http.StatusBadRequest) return }
if redirect_with_slash(w, r) {
if redirectDir(w, r) {
return }
handle_repo_log(w, r, params)
httpHandleRepoLog(w, r, params)
case "commit":
if redirect_without_slash(w, r) {
if redirectNoDir(w, r) {
return }
params["commit_id"] = segments[separator_index+4] handle_repo_commit(w, r, params)
params["commit_id"] = segments[sepIndex+4] httpHandleRepoCommit(w, r, params)
case "contrib":
if redirect_with_slash(w, r) {
if redirectDir(w, r) {
return }
switch non_empty_last_segments_len {
case separator_index + 4:
handle_repo_contrib_index(w, r, params)
case separator_index + 5:
params["mr_id"] = segments[separator_index+4]
handle_repo_contrib_one(w, r, params)
switch contentfulSegmentsLen {
case sepIndex + 4:
httpHandleRepoContribIndex(w, r, params)
case sepIndex + 5:
params["mr_id"] = segments[sepIndex+4]
httpHandleRepoContribOne(w, r, params)
default: http.Error(w, "Too many parameters", http.StatusBadRequest) } default:
http.Error(w, fmt.Sprintf("Unknown repo feature: %s", repo_feature), http.StatusNotFound)
http.Error(w, fmt.Sprintf("Unknown repo feature: %s", repoFeature), http.StatusNotFound)
} default:
http.Error(w, fmt.Sprintf("Unknown module type: %s", module_type), http.StatusNotFound)
http.Error(w, fmt.Sprintf("Unknown module type: %s", moduleType), http.StatusNotFound)
} } }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import "net/http"
// render_template abstracts out the annoyances of reporting template rendering
// renderTemplate abstracts out the annoyances of reporting template rendering
// errors.
func render_template(w http.ResponseWriter, template_name string, params map[string]any) {
if err := templates.ExecuteTemplate(w, template_name, params); err != nil {
func renderTemplate(w http.ResponseWriter, templateName string, params map[string]any) {
if err := templates.ExecuteTemplate(w, templateName, params); err != nil {
http.Error(w, "Error rendering template: "+err.Error(), http.StatusInternalServerError) } }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/url" "path" "strings" )
func first_line(s string) string {
func firstLine(s string) string {
before, _, _ := strings.Cut(s, "\n") return before }
func base_name(s string) string {
func baseName(s string) string {
return path.Base(s) }
func path_escape(s string) string {
func pathEscape(s string) string {
return url.PathEscape(s) }
func query_escape(s string) string {
func queryEscape(s string) string {
return url.QueryEscape(s) }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "bytes" "html" "html/template" "strings" "github.com/go-git/go-git/v5/plumbing/object" "github.com/microcosm-cc/bluemonday" "github.com/niklasfasching/go-org/org" "github.com/yuin/goldmark" "github.com/yuin/goldmark/extension" )
var markdown_converter = goldmark.New(goldmark.WithExtensions(extension.GFM))
var markdownConverter = goldmark.New(goldmark.WithExtensions(extension.GFM))
func render_readme_at_tree(tree *object.Tree) (readme_filename string, readme_content template.HTML) {
var readme_rendered_unsafe bytes.Buffer
var readme_file *object.File
var readme_file_contents string
func renderReadmeAtTree(tree *object.Tree) (readmeFilename string, readmeRenderedSafeHTML template.HTML) {
var readmeRenderedUnsafe bytes.Buffer
var readmeFile *object.File
var readmeFileContents string
var err error
if readme_file, err = tree.File("README"); err == nil {
if readme_file_contents, err = readme_file.Contents(); err != nil {
return "Error fetching README", string_escape_html("Unable to fetch contents of README: " + err.Error())
if readmeFile, err = tree.File("README"); err == nil {
if readmeFileContents, err = readmeFile.Contents(); err != nil {
return "Error fetching README", escapeHTML("Unable to fetch contents of README: " + err.Error())
}
return "README", template.HTML("<pre>" + html.EscapeString(readme_file_contents) + "</pre>") //#nosec G203
return "README", template.HTML("<pre>" + html.EscapeString(readmeFileContents) + "</pre>") //#nosec G203
}
if readme_file, err = tree.File("README.md"); err == nil {
if readme_file_contents, err = readme_file.Contents(); err != nil {
return "Error fetching README", string_escape_html("Unable to fetch contents of README: " + err.Error())
if readmeFile, err = tree.File("README.md"); err == nil {
if readmeFileContents, err = readmeFile.Contents(); err != nil {
return "Error fetching README", escapeHTML("Unable to fetch contents of README: " + err.Error())
}
if err = markdown_converter.Convert([]byte(readme_file_contents), &readme_rendered_unsafe); err != nil {
return "Error fetching README", string_escape_html("Unable to render README: " + err.Error())
if err = markdownConverter.Convert([]byte(readmeFileContents), &readmeRenderedUnsafe); err != nil {
return "Error fetching README", escapeHTML("Unable to render README: " + err.Error())
}
return "README.md", template.HTML(bluemonday.UGCPolicy().SanitizeBytes(readme_rendered_unsafe.Bytes())) //#nosec G203
return "README.md", template.HTML(bluemonday.UGCPolicy().SanitizeBytes(readmeRenderedUnsafe.Bytes())) //#nosec G203
}
if readme_file, err = tree.File("README.org"); err == nil {
if readme_file_contents, err = readme_file.Contents(); err != nil {
return "Error fetching README", string_escape_html("Unable to fetch contents of README: " + err.Error())
if readmeFile, err = tree.File("README.org"); err == nil {
if readmeFileContents, err = readmeFile.Contents(); err != nil {
return "Error fetching README", escapeHTML("Unable to fetch contents of README: " + err.Error())
}
org_html, err := org.New().Parse(strings.NewReader(readme_file_contents), readme_filename).Write(org.NewHTMLWriter())
orgHTML, err := org.New().Parse(strings.NewReader(readmeFileContents), readmeFilename).Write(org.NewHTMLWriter())
if err != nil {
return "Error fetching README", string_escape_html("Unable to render README: " + err.Error())
return "Error fetching README", escapeHTML("Unable to render README: " + err.Error())
}
return "README.org", template.HTML(bluemonday.UGCPolicy().Sanitize(org_html)) //#nosec G203
return "README.org", template.HTML(bluemonday.UGCPolicy().Sanitize(orgHTML)) //#nosec G203
} return "", "" }
func string_escape_html(s string) template.HTML {
func escapeHTML(s string) template.HTML {
return template.HTML(html.EscapeString(s)) //#nosec G203 }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/url" "strings" ) // We don't use path.Join because it collapses multiple slashes into one.
func generate_ssh_remote_url(group_path []string, repo_name string) string {
return strings.TrimSuffix(config.SSH.Root, "/") + "/" + path_escape_cat_segments(group_path) + "/:/repos/" + url.PathEscape(repo_name)
func genSSHRemoteURL(group_path []string, repo_name string) string {
return strings.TrimSuffix(config.SSH.Root, "/") + "/" + segmentsToURL(group_path) + "/:/repos/" + url.PathEscape(repo_name)
}
func generate_http_remote_url(group_path []string, repo_name string) string {
return strings.TrimSuffix(config.HTTP.Root, "/") + "/" + path_escape_cat_segments(group_path) + "/:/repos/" + url.PathEscape(repo_name)
func genHTTPRemoteURL(group_path []string, repo_name string) string {
return strings.TrimSuffix(config.HTTP.Root, "/") + "/" + segmentsToURL(group_path) + "/:/repos/" + url.PathEscape(repo_name)
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "embed" "html/template" "io/fs" "net/http" "github.com/tdewolff/minify/v2" "github.com/tdewolff/minify/v2/html" ) // We embed all source for easy AGPL compliance. // //go:embed .gitignore .gitattributes //go:embed LICENSE README.md //go:embed *.go go.mod go.sum //go:embed *.scfg //go:embed Makefile //go:embed static/* templates/* scripts/* sql/* //go:embed hookc/*.c //go:embed vendor/*
var source_fs embed.FS
var sourceFS embed.FS
var source_handler = http.StripPrefix(
var sourceHandler = http.StripPrefix(
"/:/source/",
http.FileServer(http.FS(source_fs)),
http.FileServer(http.FS(sourceFS)),
)
//go:embed templates/* static/* hookc/hookc
var resourcesFS embed.FS
var templates *template.Template
func loadTemplates() (err error) {
m := minify.New()
m.Add("text/html", &html.Minifier{TemplateDelims: [2]string{"{{", "}}"}, KeepDefaultAttrVals: true})
templates = template.New("templates").Funcs(template.FuncMap{
"first_line": first_line, "base_name": base_name, "path_escape": path_escape, "query_escape": query_escape,
"first_line": firstLine, "base_name": baseName, "path_escape": pathEscape, "query_escape": queryEscape,
})
err = fs.WalkDir(resourcesFS, "templates", func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if !d.IsDir() {
content, err := fs.ReadFile(resourcesFS, path)
if err != nil {
return err
}
minified, err := m.Bytes("text/html", content)
if err != nil {
return err
}
_, err = templates.Parse(string(minified))
if err != nil {
return err
}
}
return nil
})
return err
}
var static_handler http.Handler
var staticHandler http.Handler
func init() {
static_fs, err := fs.Sub(resourcesFS, "static")
staticFS, err := fs.Sub(resourcesFS, "static")
if err != nil {
panic(err)
}
static_handler = http.StripPrefix("/:/static/", http.FileServer(http.FS(static_fs)))
staticHandler = http.StripPrefix("/:/static/", http.FileServer(http.FS(staticFS)))
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "errors" "fmt" "os" "os/exec"
glider_ssh "github.com/gliderlabs/ssh"
gliderSSH "github.com/gliderlabs/ssh"
"github.com/go-git/go-git/v5"
"go.lindenii.runxiyu.org/lindenii-common/cmap"
)
type packPass struct {
session glider_ssh.Session repo *git.Repository pubkey string directAccess bool repo_path string userID int userType string repoID int group_path []string repo_name string contribReq string
session gliderSSH.Session repo *git.Repository pubkey string directAccess bool repoPath string userID int userType string repoID int groupPath []string repoName string contribReq string
}
var packPasses = cmap.Map[string, packPass]{}
// ssh_handle_receive_pack handles attempts to push to repos.
func ssh_handle_receive_pack(session glider_ssh.Session, pubkey, repo_identifier string) (err error) {
group_path, repo_name, repo_id, repo_path, direct_access, contrib_requirements, user_type, user_id, err := get_repo_path_perms_from_ssh_path_pubkey(session.Context(), repo_identifier, pubkey)
// sshHandleRecvPack handles attempts to push to repos.
func sshHandleRecvPack(session gliderSSH.Session, pubkey, repoIdentifier string) (err error) {
groupPath, repoName, repoID, repoPath, directAccess, contribReq, userType, userID, err := getRepoInfo2(session.Context(), repoIdentifier, pubkey)
if err != nil {
return err
}
repo, err := git.PlainOpen(repo_path)
repo, err := git.PlainOpen(repoPath)
if err != nil {
return err
}
repo_config, err := repo.Config()
repoConf, err := repo.Config()
if err != nil {
return err
}
repo_config_core := repo_config.Raw.Section("core")
if repo_config_core == nil {
return errors.New("Repository has no core section in config")
repoConfCore := repoConf.Raw.Section("core")
if repoConfCore == nil {
return errors.New("repository has no core section in config")
}
hooksPath := repo_config_core.OptionAll("hooksPath")
hooksPath := repoConfCore.OptionAll("hooksPath")
if len(hooksPath) != 1 || hooksPath[0] != config.Hooks.Execs {
return errors.New("Repository has hooksPath set to an unexpected value")
return errors.New("repository has hooksPath set to an unexpected value")
}
if !direct_access {
switch contrib_requirements {
if !directAccess {
switch contribReq {
case "closed":
if !direct_access {
return errors.New("You need direct access to push to this repo.")
if !directAccess {
return errors.New("you need direct access to push to this repo.")
} case "registered_user":
if user_type != "registered" {
return errors.New("You need to be a registered user to push to this repo.")
if userType != "registered" {
return errors.New("you need to be a registered user to push to this repo.")
}
case "ssh_pubkey":
fallthrough
case "federated":
if pubkey == "" {
return errors.New("You need to have an SSH public key to push to this repo.")
return errors.New("you need to have an SSH public key to push to this repo.")
}
if user_type == "" {
user_id, err = add_user_ssh(session.Context(), pubkey)
if userType == "" {
userID, err = addUserSSH(session.Context(), pubkey)
if err != nil {
return err
}
fmt.Fprintln(session.Stderr(), "You are now registered as user ID", user_id) user_type = "pubkey_only"
fmt.Fprintln(session.Stderr(), "you are now registered as user ID", userID) userType = "pubkey_only"
} case "public": default:
panic("unknown contrib_requirements value " + contrib_requirements)
panic("unknown contrib_requirements value " + contribReq)
} }
cookie, err := random_urlsafe_string(16)
cookie, err := randomUrlsafeStr(16)
if err != nil {
fmt.Fprintln(session.Stderr(), "Error while generating cookie:", err)
}
packPasses.Store(cookie, packPass{
session: session, pubkey: pubkey, directAccess: direct_access, repo_path: repo_path, userID: user_id, repoID: repo_id, group_path: group_path, repo_name: repo_name, repo: repo, contribReq: contrib_requirements, userType: user_type,
session: session, pubkey: pubkey, directAccess: directAccess, repoPath: repoPath, userID: userID, repoID: repoID, groupPath: groupPath, repoName: repoName, repo: repo, contribReq: contribReq, userType: userType,
}) defer packPasses.Delete(cookie) // The Delete won't execute until proc.Wait returns unless something // horribly wrong such as a panic occurs.
proc := exec.CommandContext(session.Context(), "git-receive-pack", repo_path)
proc := exec.CommandContext(session.Context(), "git-receive-pack", repoPath)
proc.Env = append(os.Environ(),
"LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket,
"LINDENII_FORGE_HOOKS_COOKIE="+cookie,
)
proc.Stdin = session
proc.Stdout = session
proc.Stderr = session.Stderr()
if err = proc.Start(); err != nil {
fmt.Fprintln(session.Stderr(), "Error while starting process:", err)
return err
}
err = proc.Wait()
if exitError, ok := err.(*exec.ExitError); ok {
fmt.Fprintln(session.Stderr(), "Process exited with error", exitError.ExitCode())
} else if err != nil {
fmt.Fprintln(session.Stderr(), "Error while waiting for process:", err)
}
return err
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "fmt" "os" "os/exec" glider_ssh "github.com/gliderlabs/ssh" )
// ssh_handle_upload_pack handles clones/fetches. It just uses git-upload-pack
// sshHandleUploadPack handles clones/fetches. It just uses git-upload-pack
// and has no ACL checks.
func ssh_handle_upload_pack(session glider_ssh.Session, pubkey, repo_identifier string) (err error) {
var repo_path string
if _, _, _, repo_path, _, _, _, _, err = get_repo_path_perms_from_ssh_path_pubkey(session.Context(), repo_identifier, pubkey); err != nil {
func sshHandleUploadPack(session glider_ssh.Session, pubkey, repoIdentifier string) (err error) {
var repoPath string
if _, _, _, repoPath, _, _, _, _, err = getRepoInfo2(session.Context(), repoIdentifier, pubkey); err != nil {
return err }
proc := exec.CommandContext(session.Context(), "git-upload-pack", repo_path)
proc := exec.CommandContext(session.Context(), "git-upload-pack", repoPath)
proc.Env = append(os.Environ(), "LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket)
proc.Stdin = session
proc.Stdout = session
proc.Stderr = session.Stderr()
if err = proc.Start(); err != nil {
fmt.Fprintln(session.Stderr(), "Error while starting process:", err)
return err
}
err = proc.Wait()
if exitError, ok := err.(*exec.ExitError); ok {
fmt.Fprintln(session.Stderr(), "Process exited with error", exitError.ExitCode())
} else if err != nil {
fmt.Fprintln(session.Stderr(), "Error while waiting for process:", err)
}
return err
}
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
package main
import (
"fmt"
"net"
"os"
"strings"
glider_ssh "github.com/gliderlabs/ssh"
"go.lindenii.runxiyu.org/lindenii-common/ansiec"
"go.lindenii.runxiyu.org/lindenii-common/clog"
go_ssh "golang.org/x/crypto/ssh"
)
var (
server_public_key_string string
server_public_key_fingerprint string
server_public_key go_ssh.PublicKey
)
func serveSSH(listener net.Listener) error {
var host_key_bytes []byte
var host_key go_ssh.Signer
var err error
var server *glider_ssh.Server
if host_key_bytes, err = os.ReadFile(config.SSH.Key); err != nil {
return err
}
if host_key, err = go_ssh.ParsePrivateKey(host_key_bytes); err != nil {
return err
}
server_public_key = host_key.PublicKey()
server_public_key_string = string(go_ssh.MarshalAuthorizedKey(server_public_key))
server_public_key_fingerprint = go_ssh.FingerprintSHA256(server_public_key)
server = &glider_ssh.Server{
Handler: func(session glider_ssh.Session) {
client_public_key := session.PublicKey()
var client_public_key_string string
if client_public_key != nil {
client_public_key_string = strings.TrimSuffix(string(go_ssh.MarshalAuthorizedKey(client_public_key)), "\n")
}
clog.Info("Incoming SSH: " + session.RemoteAddr().String() + " " + client_public_key_string + " " + session.RawCommand())
fmt.Fprintln(session.Stderr(), ansiec.Blue+"Lindenii Forge "+VERSION+", source at "+strings.TrimSuffix(config.HTTP.Root, "/")+"/:/source/"+ansiec.Reset+"\r")
cmd := session.Command()
if len(cmd) < 2 {
fmt.Fprintln(session.Stderr(), "Insufficient arguments\r")
return
}
switch cmd[0] {
case "git-upload-pack":
if len(cmd) > 2 {
fmt.Fprintln(session.Stderr(), "Too many arguments\r")
return
}
err = ssh_handle_upload_pack(session, client_public_key_string, cmd[1])
err = sshHandleUploadPack(session, client_public_key_string, cmd[1])
case "git-receive-pack":
if len(cmd) > 2 {
fmt.Fprintln(session.Stderr(), "Too many arguments\r")
return
}
err = ssh_handle_receive_pack(session, client_public_key_string, cmd[1])
err = sshHandleRecvPack(session, client_public_key_string, cmd[1])
default:
fmt.Fprintln(session.Stderr(), "Unsupported command: "+cmd[0]+"\r")
return
}
if err != nil {
fmt.Fprintln(session.Stderr(), err.Error())
return
}
},
PublicKeyHandler: func(ctx glider_ssh.Context, key glider_ssh.PublicKey) bool { return true },
KeyboardInteractiveHandler: func(ctx glider_ssh.Context, challenge go_ssh.KeyboardInteractiveChallenge) bool { return true },
// It is intentional that we do not check any credentials and accept all connections.
// This allows all users to connect and clone repositories. However, the public key
// is passed to handlers, so e.g. the push handler could check the key and reject the
// push if it needs to.
}
server.AddHostKey(host_key)
if err = server.Serve(listener); err != nil {
clog.Fatal(1, "Serving SSH: "+err.Error())
}
return nil
}
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileContributor: Runxi Yu <https://runxiyu.org>
package main
import (
"context"
"errors"
"fmt"
"io"
"net/url"
"strings"
"go.lindenii.runxiyu.org/lindenii-common/ansiec"
)
var err_ssh_illegal_endpoint = errors.New("illegal endpoint during SSH access")
func get_repo_path_perms_from_ssh_path_pubkey(ctx context.Context, ssh_path, ssh_pubkey string) (group_path []string, repo_name string, repo_id int, repo_path string, direct_access bool, contrib_requirements, user_type string, user_id int, err error) {
func getRepoInfo2(ctx context.Context, ssh_path, ssh_pubkey string) (group_path []string, repo_name string, repo_id int, repo_path string, direct_access bool, contrib_requirements, user_type string, user_id int, err error) {
var segments []string
var separator_index int
var module_type, module_name string
segments = strings.Split(strings.TrimPrefix(ssh_path, "/"), "/")
for i, segment := range segments {
var err error
segments[i], err = url.PathUnescape(segment)
if err != nil {
return []string{}, "", 0, "", false, "", "", 0, err
}
}
if segments[0] == ":" {
return []string{}, "", 0, "", false, "", "", 0, err_ssh_illegal_endpoint
}
separator_index = -1
for i, part := range segments {
if part == ":" {
separator_index = i
break
}
}
if segments[len(segments)-1] == "" {
segments = segments[:len(segments)-1]
}
switch {
case separator_index == -1:
return []string{}, "", 0, "", false, "", "", 0, err_ssh_illegal_endpoint
case len(segments) <= separator_index+2:
return []string{}, "", 0, "", false, "", "", 0, err_ssh_illegal_endpoint
}
group_path = segments[:separator_index]
module_type = segments[separator_index+1]
module_name = segments[separator_index+2]
repo_name = module_name
switch module_type {
case "repos":
_1, _2, _3, _4, _5, _6, _7 := getRepoInfo(ctx, group_path, module_name, ssh_pubkey)
return group_path, repo_name, _1, _2, _3, _4, _5, _6, _7
default:
return []string{}, "", 0, "", false, "", "", 0, err_ssh_illegal_endpoint
}
}
func writeRedError(w io.Writer, format string, args ...any) {
fmt.Fprintln(w, ansiec.Red+fmt.Sprintf(format, args...)+ansiec.Reset)
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "errors" "net/http" "net/url" "strings" ) var (
err_duplicate_ref_spec = errors.New("duplicate ref spec")
err_no_ref_spec = errors.New("no ref spec")
errDupRefSpec = errors.New("duplicate ref spec")
errNoRefSpec = errors.New("no ref spec")
)
func get_param_ref_and_type(r *http.Request) (ref_type, ref string, err error) {
func getParamRefTypeName(r *http.Request) (retRefType, retRefName string, err error) {
qr := r.URL.RawQuery
q, err := url.ParseQuery(qr)
if err != nil {
return
}
done := false
for _, _ref_type := range []string{"commit", "branch", "tag"} {
_ref, ok := q[_ref_type]
for _, refType := range []string{"commit", "branch", "tag"} {
refName, ok := q[refType]
if ok {
if done {
err = err_duplicate_ref_spec
err = errDupRefSpec
return
} else {
done = true
if len(_ref) != 1 {
err = err_duplicate_ref_spec
return
}
ref = _ref[0]
ref_type = _ref_type
}
done = true
if len(refName) != 1 {
err = errDupRefSpec
return
}
retRefName = refName[0]
retRefType = refType
}
}
if !done {
err = err_no_ref_spec
err = errNoRefSpec
} return }
func parse_request_uri(request_uri string) (segments []string, params url.Values, err error) {
path, params_string, _ := strings.Cut(request_uri, "?")
func parseReqURI(requestURI string) (segments []string, params url.Values, err error) {
path, paramsStr, _ := strings.Cut(requestURI, "?")
segments = strings.Split(strings.TrimPrefix(path, "/"), "/")
for i, segment := range segments {
segments[i], err = url.PathUnescape(segment)
if err != nil {
return
}
}
params, err = url.ParseQuery(params_string)
params, err = url.ParseQuery(paramsStr)
return }
func redirect_with_slash(w http.ResponseWriter, r *http.Request) bool {
request_uri := r.RequestURI
func redirectDir(w http.ResponseWriter, r *http.Request) bool {
requestURI := r.RequestURI
path_end := strings.IndexAny(request_uri, "?#")
pathEnd := strings.IndexAny(requestURI, "?#")
var path, rest string
if path_end == -1 {
path = request_uri
if pathEnd == -1 {
path = requestURI
} else {
path = request_uri[:path_end] rest = request_uri[path_end:]
path = requestURI[:pathEnd] rest = requestURI[pathEnd:]
}
if !strings.HasSuffix(path, "/") {
http.Redirect(w, r, path+"/"+rest, http.StatusSeeOther)
return true
}
return false
}
func redirect_without_slash(w http.ResponseWriter, r *http.Request) bool {
request_uri := r.RequestURI
func redirectNoDir(w http.ResponseWriter, r *http.Request) bool {
requestURI := r.RequestURI
path_end := strings.IndexAny(request_uri, "?#")
pathEnd := strings.IndexAny(requestURI, "?#")
var path, rest string
if path_end == -1 {
path = request_uri
if pathEnd == -1 {
path = requestURI
} else {
path = request_uri[:path_end] rest = request_uri[path_end:]
path = requestURI[:pathEnd] rest = requestURI[pathEnd:]
}
if strings.HasSuffix(path, "/") {
http.Redirect(w, r, strings.TrimSuffix(path, "/")+rest, http.StatusSeeOther)
return true
}
return false
}
func redirect_unconditionally(w http.ResponseWriter, r *http.Request) {
request_uri := r.RequestURI
func redirectUnconditionally(w http.ResponseWriter, r *http.Request) {
requestURI := r.RequestURI
path_end := strings.IndexAny(request_uri, "?#")
pathEnd := strings.IndexAny(requestURI, "?#")
var path, rest string
if path_end == -1 {
path = request_uri
if pathEnd == -1 {
path = requestURI
} else {
path = request_uri[:path_end] rest = request_uri[path_end:]
path = requestURI[:pathEnd] rest = requestURI[pathEnd:]
} http.Redirect(w, r, path+rest, http.StatusSeeOther) }
func path_escape_cat_segments(segments []string) string {
func segmentsToURL(segments []string) string {
for i, segment := range segments {
segments[i] = url.PathEscape(segment)
}
return strings.Join(segments, "/")
}
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "context" "github.com/jackc/pgx/v5" )
func add_user_ssh(ctx context.Context, pubkey string) (user_id int, err error) {
func addUserSSH(ctx context.Context, pubkey string) (user_id int, err error) {
var tx pgx.Tx
if tx, err = database.Begin(ctx); err != nil {
return
}
defer func() {
_ = tx.Rollback(ctx)
}()
if err = tx.QueryRow(ctx, `INSERT INTO users (type) VALUES ('pubkey_only') RETURNING id`).Scan(&user_id); err != nil {
return
}
if _, err = tx.Exec(ctx, `INSERT INTO ssh_public_keys (key_string, user_id) VALUES ($1, $2)`, pubkey, user_id); err != nil {
return
}
err = tx.Commit(ctx)
return
}