Lindenii Project Forge
Commit info | |
---|---|
ID | f947e4d7e72c45e9a4199ed191a4396db2ac3c60 |
Author | Runxi Yu<me@runxiyu.org> |
Author date | Thu, 20 Feb 2025 11:32:52 +0800 |
Committer | Runxi Yu<me@runxiyu.org> |
Committer date | Thu, 20 Feb 2025 11:32:52 +0800 |
Actions | Get patch |
hooks, ssh: Indicate URL of newly-created MRs
package main import ( "bytes" "context" "encoding/binary" "errors" "fmt" "io" "net" "os" "path/filepath"
"strconv"
"strings" "syscall" "github.com/jackc/pgx/v5" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "go.lindenii.runxiyu.org/lindenii-common/ansiec" ) var ( err_get_fd = errors.New("unable to get file descriptor") err_get_ucred = errors.New("failed getsockopt") ) // hooks_handle_connection handles a connection from git_hooks_client via the // unix socket. func hooks_handle_connection(conn net.Conn) { defer conn.Close() ctx, cancel := context.WithCancel(context.Background()) defer cancel() // There aren't reasonable cases where someone would run this as // another user. ucred, err := get_ucred(conn) if err != nil { if _, err := conn.Write([]byte{1}); err != nil { return } wf_error(conn, "\nUnable to get peer credentials: %v", err) return } if ucred.Uid != uint32(os.Getuid()) { if _, err := conn.Write([]byte{1}); err != nil { return } wf_error(conn, "\nUID mismatch") return } cookie := make([]byte, 64) _, err = conn.Read(cookie) if err != nil { if _, err := conn.Write([]byte{1}); err != nil { return } wf_error(conn, "\nFailed to read cookie: %v", err) return } pack_to_hook, ok := pack_to_hook_by_cookie.Load(string(cookie)) if !ok { if _, err := conn.Write([]byte{1}); err != nil { return } wf_error(conn, "\nInvalid handler cookie") return } ssh_stderr := pack_to_hook.session.Stderr() ssh_stderr.Write([]byte{'\n'}) hook_return_value := func() byte { var argc64 uint64 err = binary.Read(conn, binary.NativeEndian, &argc64) if err != nil { wf_error(ssh_stderr, "Failed to read argc: %v", err) return 1 } var args []string for i := uint64(0); i < argc64; i++ { var arg bytes.Buffer for { b := make([]byte, 1) n, err := conn.Read(b) if err != nil || n != 1 { wf_error(ssh_stderr, "Failed to read arg: %v", err) return 1 } if b[0] == 0 { break } arg.WriteByte(b[0]) } args = append(args, arg.String()) } var stdin bytes.Buffer _, err = io.Copy(&stdin, conn) if err != nil { wf_error(conn, "Failed to read to the stdin buffer: %v", err) } switch filepath.Base(args[0]) { case "pre-receive": if pack_to_hook.direct_access { return 0 } else { all_ok := true for { line, err := stdin.ReadString('\n') if errors.Is(err, io.EOF) { break } line = line[:len(line)-1] old_oid, rest, found := strings.Cut(line, " ") if !found { wf_error(ssh_stderr, "Invalid pre-receive line: %v", line) return 1 } new_oid, ref_name, found := strings.Cut(rest, " ") if !found { wf_error(ssh_stderr, "Invalid pre-receive line: %v", line) return 1 } if strings.HasPrefix(ref_name, "refs/heads/contrib/") { if all_zero_num_string(old_oid) { // New branch fmt.Fprintln(ssh_stderr, ansiec.Blue + "POK" + ansiec.Reset, ref_name)
_, err = database.Exec(ctx, "INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open')",
var new_mr_id int err = database.QueryRow(ctx, "INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING id",
pack_to_hook.repo_id, pack_to_hook.user_id, strings.TrimPrefix(ref_name, "refs/heads/"),
)
).Scan(&new_mr_id)
if err != nil { wf_error(ssh_stderr, "Error creating merge request: %v", err) return 1 }
fmt.Fprintln(ssh_stderr, ansiec.Blue + "Created merge request at", generate_http_remote_url(pack_to_hook.group_name, pack_to_hook.repo_name) + "/contrib/" + strconv.FormatUint(uint64(new_mr_id), 10) + "/" + ansiec.Reset)
} else { // Existing contrib branch var existing_merge_request_user_id int err = database.QueryRow(ctx, "SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2", strings.TrimPrefix(ref_name, "refs/heads/"), pack_to_hook.repo_id, ).Scan(&existing_merge_request_user_id) if err != nil { if errors.Is(err, pgx.ErrNoRows) { wf_error(ssh_stderr, "No existing merge request for existing contrib branch: %v", err) } else { wf_error(ssh_stderr, "Error querying for existing merge request: %v", err) } return 1 } if existing_merge_request_user_id == 0 { all_ok = false fmt.Fprintln(ssh_stderr, ansiec.Red + "NAK" + ansiec.Reset, ref_name, "(branch belongs to unowned MR)") continue } if existing_merge_request_user_id != pack_to_hook.user_id { all_ok = false fmt.Fprintln(ssh_stderr, ansiec.Red + "NAK" + ansiec.Reset, ref_name, "(branch belongs another user's MR)") continue } repo, err := git.PlainOpen(pack_to_hook.repo_path) if err != nil { wf_error(ssh_stderr, "Daemon failed to open repo: %v", err) return 1 } old_hash := plumbing.NewHash(old_oid) old_commit, err := repo.CommitObject(old_hash) if err != nil { wf_error(ssh_stderr, "Daemon failed to get old commit: %v", err) return 1 } // Potential BUG: I'm not sure if new_commit is guaranteed to be // detectable as they haven't been merged into the main repo's // objects yet. But it seems to work, and I don't think there's // any reason for this to only work intermitently. new_hash := plumbing.NewHash(new_oid) new_commit, err := repo.CommitObject(new_hash) if err != nil { wf_error(ssh_stderr, "Daemon failed to get new commit: %v", err) return 1 } is_ancestor, err := old_commit.IsAncestor(new_commit) if err != nil { wf_error(ssh_stderr, "Daemon failed to check if old commit is ancestor: %v", err) return 1 } if !is_ancestor { // TODO: Create MR snapshot ref instead all_ok = false fmt.Fprintln(ssh_stderr, ansiec.Red + "NAK" + ansiec.Reset, ref_name, "(force pushes are not supported yet)") continue } fmt.Fprintln(ssh_stderr, ansiec.Blue + "POK" + ansiec.Reset, ref_name) } } else { // Non-contrib branch all_ok = false fmt.Fprintln(ssh_stderr, ansiec.Red + "NAK" + ansiec.Reset, ref_name, "(you cannot push to branches outside of contrib/*)") } } fmt.Fprintln(ssh_stderr) if all_ok { fmt.Fprintln(ssh_stderr, "Overall " + ansiec.Green + "ACK" + ansiec.Reset + " (all checks passed)") return 0 } else { fmt.Fprintln(ssh_stderr, "Overall " + ansiec.Red + "NAK" + ansiec.Reset + " (one or more branches failed checks)") return 1 } } default: fmt.Fprintln(ssh_stderr, ansiec.Red + "Invalid hook:", args[0] + ansiec.Reset) return 1 } }() fmt.Fprintln(ssh_stderr) _, _ = conn.Write([]byte{hook_return_value}) } func serve_git_hooks(listener net.Listener) error { for { conn, err := listener.Accept() if err != nil { return err } go hooks_handle_connection(conn) } } func get_ucred(conn net.Conn) (*syscall.Ucred, error) { unix_conn := conn.(*net.UnixConn) fd, err := unix_conn.File() if err != nil { return nil, err_get_fd } defer fd.Close() ucred, err := syscall.GetsockoptUcred(int(fd.Fd()), syscall.SOL_SOCKET, syscall.SO_PEERCRED) if err != nil { return nil, err_get_ucred } return ucred, nil } func all_zero_num_string(s string) bool { for _, r := range s { if r != '0' { return false } } return true }
package main import ( "errors" "fmt" "os" "os/exec" glider_ssh "github.com/gliderlabs/ssh" "go.lindenii.runxiyu.org/lindenii-common/cmap" ) type pack_to_hook_t struct { session glider_ssh.Session pubkey string direct_access bool repo_path string user_id int repo_id int
group_name string repo_name string
} var pack_to_hook_by_cookie = cmap.Map[string, pack_to_hook_t]{} // ssh_handle_receive_pack handles attempts to push to repos. func ssh_handle_receive_pack(session glider_ssh.Session, pubkey string, repo_identifier string) (err error) {
repo_id, repo_path, direct_access, contrib_requirements, user_type, user_id, err := get_repo_path_perms_from_ssh_path_pubkey(session.Context(), repo_identifier, pubkey)
group_name, repo_name, repo_id, repo_path, direct_access, contrib_requirements, user_type, user_id, err := get_repo_path_perms_from_ssh_path_pubkey(session.Context(), repo_identifier, pubkey)
if err != nil { return err } if !direct_access { switch contrib_requirements { case "closed": if !direct_access { return errors.New("You need direct access to push to this repo.") } case "registered_user": if user_type != "registered" { return errors.New("You need to be a registered user to push to this repo.") } case "ssh_pubkey": if pubkey == "" { return errors.New("You need to have an SSH public key to push to this repo.") } if user_type == "" { user_id, err = add_user_ssh(session.Context(), pubkey) if err != nil { return err } fmt.Fprintln(session.Stderr(), "You are now registered as user ID", user_id) } case "public": default: panic("unknown contrib_requirements value " + contrib_requirements) } } cookie, err := random_urlsafe_string(16) if err != nil { fmt.Fprintln(session.Stderr(), "Error while generating cookie:", err) }
fmt.Println(group_name, repo_name)
pack_to_hook_by_cookie.Store(cookie, pack_to_hook_t{ session: session, pubkey: pubkey, direct_access: direct_access, repo_path: repo_path, user_id: user_id, repo_id: repo_id,
group_name: group_name, repo_name: repo_name,
}) defer pack_to_hook_by_cookie.Delete(cookie) // The Delete won't execute until proc.Wait returns unless something // horribly wrong such as a panic occurs. proc := exec.CommandContext(session.Context(), "git-receive-pack", repo_path) proc.Env = append(os.Environ(), "LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket, "LINDENII_FORGE_HOOKS_COOKIE="+cookie, ) proc.Stdin = session proc.Stdout = session proc.Stderr = session.Stderr() err = proc.Start() if err != nil { fmt.Fprintln(session.Stderr(), "Error while starting process:", err) return err } err = proc.Wait() if exitError, ok := err.(*exec.ExitError); ok { fmt.Fprintln(session.Stderr(), "Process exited with error", exitError.ExitCode()) } else if err != nil { fmt.Fprintln(session.Stderr(), "Error while waiting for process:", err) } return err }
package main import ( "fmt" "os" "os/exec" glider_ssh "github.com/gliderlabs/ssh" ) // ssh_handle_upload_pack handles clones/fetches. It just uses git-upload-pack // and has no ACL checks. func ssh_handle_upload_pack(session glider_ssh.Session, pubkey string, repo_identifier string) (err error) {
_, repo_path, _, _, _, _, err := get_repo_path_perms_from_ssh_path_pubkey(session.Context(), repo_identifier, pubkey)
_, _, _, repo_path, _, _, _, _, err := get_repo_path_perms_from_ssh_path_pubkey(session.Context(), repo_identifier, pubkey)
if err != nil { return err } proc := exec.CommandContext(session.Context(), "git-upload-pack", repo_path) proc.Env = append(os.Environ(), "LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket) proc.Stdin = session proc.Stdout = session proc.Stderr = session.Stderr() err = proc.Start() if err != nil { fmt.Fprintln(session.Stderr(), "Error while starting process:", err) return err } err = proc.Wait() if exitError, ok := err.(*exec.ExitError); ok { fmt.Fprintln(session.Stderr(), "Process exited with error", exitError.ExitCode()) } else if err != nil { fmt.Fprintln(session.Stderr(), "Error while waiting for process:", err) } return err }
package main import ( "context" "errors" "fmt" "io" "net/url" "strings" "go.lindenii.runxiyu.org/lindenii-common/ansiec" ) var err_ssh_illegal_endpoint = errors.New("illegal endpoint during SSH access")
func get_repo_path_perms_from_ssh_path_pubkey(ctx context.Context, ssh_path string, ssh_pubkey string) (repo_id int, repo_path string, direct_access bool, contrib_requirements string, user_type string, user_id int, err error) {
func get_repo_path_perms_from_ssh_path_pubkey(ctx context.Context, ssh_path string, ssh_pubkey string) (group_name string, repo_name string, repo_id int, repo_path string, direct_access bool, contrib_requirements string, user_type string, user_id int, err error) {
segments := strings.Split(strings.TrimPrefix(ssh_path, "/"), "/") for i, segment := range segments { var err error segments[i], err = url.PathUnescape(segment) if err != nil {
return 0, "", false, "", "", 0, err
return "", "", 0, "", false, "", "", 0, err
} } if segments[0] == ":" {
return 0, "", false, "", "", 0, err_ssh_illegal_endpoint
return "", "", 0, "", false, "", "", 0, err_ssh_illegal_endpoint
} separator_index := -1 for i, part := range segments { if part == ":" { separator_index = i break } } if segments[len(segments)-1] == "" { segments = segments[:len(segments)-1] } switch { case separator_index == -1:
return 0, "", false, "", "", 0, err_ssh_illegal_endpoint
return "", "", 0, "", false, "", "", 0, err_ssh_illegal_endpoint
case len(segments) <= separator_index+2:
return 0, "", false, "", "", 0, err_ssh_illegal_endpoint
return "", "", 0, "", false, "", "", 0, err_ssh_illegal_endpoint
}
group_name := segments[0]
group_name = segments[0]
module_type := segments[separator_index+1] module_name := segments[separator_index+2]
repo_name = module_name
switch module_type { case "repos":
return get_path_perm_by_group_repo_key(ctx, group_name, module_name, ssh_pubkey)
_1, _2, _3, _4, _5, _6, _7 := get_path_perm_by_group_repo_key(ctx, group_name, module_name, ssh_pubkey) return group_name, repo_name, _1, _2, _3, _4, _5, _6, _7
default:
return 0, "", false, "", "", 0, err_ssh_illegal_endpoint
return "", "", 0, "", false, "", "", 0, err_ssh_illegal_endpoint
} } func wf_error(w io.Writer, format string, args ...any) { fmt.Fprintln(w, ansiec.Red + fmt.Sprintf(format, args...) + ansiec.Reset) }