Hi… I am well aware that this diff view is very suboptimal. It will be fixed when the refactored server comes along!
ssh/recv: Check hooksPath before receiving packs
package main import ( "bytes" "context" "encoding/binary" "errors" "fmt" "io" "net" "os" "path/filepath" "strconv" "strings" "syscall"
"github.com/go-git/go-git/v5/plumbing"
"github.com/jackc/pgx/v5"
"github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing"
"go.lindenii.runxiyu.org/lindenii-common/ansiec"
)
var (
err_get_fd = errors.New("unable to get file descriptor")
err_get_ucred = errors.New("failed getsockopt")
)
// hooks_handle_connection handles a connection from git_hooks_client via the
// unix socket.
func hooks_handle_connection(conn net.Conn) {
defer conn.Close()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
// There aren't reasonable cases where someone would run this as
// another user.
ucred, err := get_ucred(conn)
if err != nil {
if _, err := conn.Write([]byte{1}); err != nil {
return
}
wf_error(conn, "\nUnable to get peer credentials: %v", err)
return
}
if ucred.Uid != uint32(os.Getuid()) {
if _, err := conn.Write([]byte{1}); err != nil {
return
}
wf_error(conn, "\nUID mismatch")
return
}
cookie := make([]byte, 64)
_, err = conn.Read(cookie)
if err != nil {
if _, err := conn.Write([]byte{1}); err != nil {
return
}
wf_error(conn, "\nFailed to read cookie: %v", err)
return
}
pack_to_hook, ok := pack_to_hook_by_cookie.Load(string(cookie))
if !ok {
if _, err := conn.Write([]byte{1}); err != nil {
return
}
wf_error(conn, "\nInvalid handler cookie")
return
}
ssh_stderr := pack_to_hook.session.Stderr()
ssh_stderr.Write([]byte{'\n'})
hook_return_value := func() byte {
var argc64 uint64
err = binary.Read(conn, binary.NativeEndian, &argc64)
if err != nil {
wf_error(ssh_stderr, "Failed to read argc: %v", err)
return 1
}
var args []string
for i := uint64(0); i < argc64; i++ {
var arg bytes.Buffer
for {
b := make([]byte, 1)
n, err := conn.Read(b)
if err != nil || n != 1 {
wf_error(ssh_stderr, "Failed to read arg: %v", err)
return 1
}
if b[0] == 0 {
break
}
arg.WriteByte(b[0])
}
args = append(args, arg.String())
}
var stdin bytes.Buffer
_, err = io.Copy(&stdin, conn)
if err != nil {
wf_error(conn, "Failed to read to the stdin buffer: %v", err)
}
switch filepath.Base(args[0]) {
case "pre-receive":
if pack_to_hook.direct_access {
return 0
} else {
all_ok := true
for {
line, err := stdin.ReadString('\n')
if errors.Is(err, io.EOF) {
break
}
line = line[:len(line)-1]
old_oid, rest, found := strings.Cut(line, " ")
if !found {
wf_error(ssh_stderr, "Invalid pre-receive line: %v", line)
return 1
}
new_oid, ref_name, found := strings.Cut(rest, " ")
if !found {
wf_error(ssh_stderr, "Invalid pre-receive line: %v", line)
return 1
}
if strings.HasPrefix(ref_name, "refs/heads/contrib/") {
if all_zero_num_string(old_oid) { // New branch
fmt.Fprintln(ssh_stderr, ansiec.Blue + "POK" + ansiec.Reset, ref_name)
fmt.Fprintln(ssh_stderr, ansiec.Blue+"POK"+ansiec.Reset, ref_name)
var new_mr_id int
err = database.QueryRow(ctx,
"INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING id",
pack_to_hook.repo_id, pack_to_hook.user_id, strings.TrimPrefix(ref_name, "refs/heads/"),
).Scan(&new_mr_id)
if err != nil {
wf_error(ssh_stderr, "Error creating merge request: %v", err)
return 1
}
fmt.Fprintln(ssh_stderr, ansiec.Blue + "Created merge request at", generate_http_remote_url(pack_to_hook.group_name, pack_to_hook.repo_name) + "/contrib/" + strconv.FormatUint(uint64(new_mr_id), 10) + "/" + ansiec.Reset)
fmt.Fprintln(ssh_stderr, ansiec.Blue+"Created merge request at", generate_http_remote_url(pack_to_hook.group_name, pack_to_hook.repo_name)+"/contrib/"+strconv.FormatUint(uint64(new_mr_id), 10)+"/"+ansiec.Reset)
} else { // Existing contrib branch
var existing_merge_request_user_id int
err = database.QueryRow(ctx,
"SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2",
strings.TrimPrefix(ref_name, "refs/heads/"), pack_to_hook.repo_id,
).Scan(&existing_merge_request_user_id)
if err != nil {
if errors.Is(err, pgx.ErrNoRows) {
wf_error(ssh_stderr, "No existing merge request for existing contrib branch: %v", err)
} else {
wf_error(ssh_stderr, "Error querying for existing merge request: %v", err)
}
return 1
}
if existing_merge_request_user_id == 0 {
all_ok = false
fmt.Fprintln(ssh_stderr, ansiec.Red + "NAK" + ansiec.Reset, ref_name, "(branch belongs to unowned MR)")
fmt.Fprintln(ssh_stderr, ansiec.Red+"NAK"+ansiec.Reset, ref_name, "(branch belongs to unowned MR)")
continue
}
if existing_merge_request_user_id != pack_to_hook.user_id {
all_ok = false
fmt.Fprintln(ssh_stderr, ansiec.Red + "NAK" + ansiec.Reset, ref_name, "(branch belongs another user's MR)")
fmt.Fprintln(ssh_stderr, ansiec.Red+"NAK"+ansiec.Reset, ref_name, "(branch belongs another user's MR)")
continue }
repo, err := git.PlainOpen(pack_to_hook.repo_path)
if err != nil {
wf_error(ssh_stderr, "Daemon failed to open repo: %v", err)
return 1
}
old_hash := plumbing.NewHash(old_oid)
old_commit, err := repo.CommitObject(old_hash)
old_commit, err := pack_to_hook.repo.CommitObject(old_hash)
if err != nil {
wf_error(ssh_stderr, "Daemon failed to get old commit: %v", err)
return 1
}
// Potential BUG: I'm not sure if new_commit is guaranteed to be
// detectable as they haven't been merged into the main repo's
// objects yet. But it seems to work, and I don't think there's
// any reason for this to only work intermitently.
new_hash := plumbing.NewHash(new_oid)
new_commit, err := repo.CommitObject(new_hash)
new_commit, err := pack_to_hook.repo.CommitObject(new_hash)
if err != nil {
wf_error(ssh_stderr, "Daemon failed to get new commit: %v", err)
return 1
}
is_ancestor, err := old_commit.IsAncestor(new_commit)
if err != nil {
wf_error(ssh_stderr, "Daemon failed to check if old commit is ancestor: %v", err)
return 1
}
if !is_ancestor {
// TODO: Create MR snapshot ref instead
all_ok = false
fmt.Fprintln(ssh_stderr, ansiec.Red + "NAK" + ansiec.Reset, ref_name, "(force pushes are not supported yet)")
fmt.Fprintln(ssh_stderr, ansiec.Red+"NAK"+ansiec.Reset, ref_name, "(force pushes are not supported yet)")
continue }
fmt.Fprintln(ssh_stderr, ansiec.Blue + "POK" + ansiec.Reset, ref_name)
fmt.Fprintln(ssh_stderr, ansiec.Blue+"POK"+ansiec.Reset, ref_name)
}
} else { // Non-contrib branch
all_ok = false
fmt.Fprintln(ssh_stderr, ansiec.Red + "NAK" + ansiec.Reset, ref_name, "(you cannot push to branches outside of contrib/*)")
fmt.Fprintln(ssh_stderr, ansiec.Red+"NAK"+ansiec.Reset, ref_name, "(you cannot push to branches outside of contrib/*)")
}
}
fmt.Fprintln(ssh_stderr)
if all_ok {
fmt.Fprintln(ssh_stderr, "Overall " + ansiec.Green + "ACK" + ansiec.Reset + " (all checks passed)")
fmt.Fprintln(ssh_stderr, "Overall "+ansiec.Green+"ACK"+ansiec.Reset+" (all checks passed)")
return 0
} else {
fmt.Fprintln(ssh_stderr, "Overall " + ansiec.Red + "NAK" + ansiec.Reset + " (one or more branches failed checks)")
fmt.Fprintln(ssh_stderr, "Overall "+ansiec.Red+"NAK"+ansiec.Reset+" (one or more branches failed checks)")
return 1 } } default:
fmt.Fprintln(ssh_stderr, ansiec.Red + "Invalid hook:", args[0] + ansiec.Reset)
fmt.Fprintln(ssh_stderr, ansiec.Red+"Invalid hook:", args[0]+ansiec.Reset)
return 1
}
}()
fmt.Fprintln(ssh_stderr)
_, _ = conn.Write([]byte{hook_return_value})
}
func serve_git_hooks(listener net.Listener) error {
for {
conn, err := listener.Accept()
if err != nil {
return err
}
go hooks_handle_connection(conn)
}
}
func get_ucred(conn net.Conn) (*syscall.Ucred, error) {
unix_conn := conn.(*net.UnixConn)
fd, err := unix_conn.File()
if err != nil {
return nil, err_get_fd
}
defer fd.Close()
ucred, err := syscall.GetsockoptUcred(int(fd.Fd()), syscall.SOL_SOCKET, syscall.SO_PEERCRED)
if err != nil {
return nil, err_get_ucred
}
return ucred, nil
}
func all_zero_num_string(s string) bool {
for _, r := range s {
if r != '0' {
return false
}
}
return true
}
package main import ( "errors" "fmt" "os" "os/exec" glider_ssh "github.com/gliderlabs/ssh"
"github.com/go-git/go-git/v5"
"go.lindenii.runxiyu.org/lindenii-common/cmap"
)
type pack_to_hook_t struct {
session glider_ssh.Session
repo *git.Repository
pubkey string
direct_access bool
repo_path string
user_id int
repo_id int
group_name string
repo_name string
}
var pack_to_hook_by_cookie = cmap.Map[string, pack_to_hook_t]{}
// ssh_handle_receive_pack handles attempts to push to repos.
func ssh_handle_receive_pack(session glider_ssh.Session, pubkey string, repo_identifier string) (err error) {
group_name, repo_name, repo_id, repo_path, direct_access, contrib_requirements, user_type, user_id, err := get_repo_path_perms_from_ssh_path_pubkey(session.Context(), repo_identifier, pubkey)
if err != nil {
return err
}
repo, err := git.PlainOpen(repo_path)
if err != nil {
return err
}
repo_config, err := repo.Config()
if err != nil {
return err
}
repo_config_core := repo_config.Raw.Section("core")
if repo_config_core == nil {
return errors.New("Repository has no core section in config")
}
hooksPath := repo_config_core.OptionAll("hooksPath")
if len(hooksPath) != 1 || hooksPath[0] != config.Hooks.Execs {
return errors.New("Repository has hooksPath set to an unexpected value")
}
if !direct_access {
switch contrib_requirements {
case "closed":
if !direct_access {
return errors.New("You need direct access to push to this repo.")
}
case "registered_user":
if user_type != "registered" {
return errors.New("You need to be a registered user to push to this repo.")
}
case "ssh_pubkey":
if pubkey == "" {
return errors.New("You need to have an SSH public key to push to this repo.")
}
if user_type == "" {
user_id, err = add_user_ssh(session.Context(), pubkey)
if err != nil {
return err
}
fmt.Fprintln(session.Stderr(), "You are now registered as user ID", user_id)
}
case "public":
default:
panic("unknown contrib_requirements value " + contrib_requirements)
}
}
cookie, err := random_urlsafe_string(16)
if err != nil {
fmt.Fprintln(session.Stderr(), "Error while generating cookie:", err)
}
fmt.Println(group_name, repo_name)
pack_to_hook_by_cookie.Store(cookie, pack_to_hook_t{
session: session,
pubkey: pubkey,
direct_access: direct_access,
repo_path: repo_path,
user_id: user_id,
repo_id: repo_id,
group_name: group_name,
repo_name: repo_name,
repo: repo,
})
defer pack_to_hook_by_cookie.Delete(cookie)
// The Delete won't execute until proc.Wait returns unless something
// horribly wrong such as a panic occurs.
proc := exec.CommandContext(session.Context(), "git-receive-pack", repo_path)
proc.Env = append(os.Environ(),
"LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket,
"LINDENII_FORGE_HOOKS_COOKIE="+cookie,
)
proc.Stdin = session
proc.Stdout = session
proc.Stderr = session.Stderr()
err = proc.Start()
if err != nil {
fmt.Fprintln(session.Stderr(), "Error while starting process:", err)
return err
}
err = proc.Wait()
if exitError, ok := err.(*exec.ExitError); ok {
fmt.Fprintln(session.Stderr(), "Process exited with error", exitError.ExitCode())
} else if err != nil {
fmt.Fprintln(session.Stderr(), "Error while waiting for process:", err)
}
return err
}