Lindenii Project Forge
Commit info | |
---|---|
ID | 80492711b4588c10dffa93a57fd9926dc337bbae |
Author | Runxi Yu<me@runxiyu.org> |
Author date | Wed, 05 Mar 2025 10:05:04 +0800 |
Committer | Runxi Yu<me@runxiyu.org> |
Committer date | Wed, 05 Mar 2025 10:05:04 +0800 |
Actions | Get patch |
*: Typing fixes
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import (
"errors"
"io" "io/fs" "os" "path/filepath" ) // deploy_hooks_to_filesystem deploys the git hooks client to the filesystem. // The git hooks client is expected to be embedded in resources_fs and must be // pre-compiled during the build process; see the Makefile. func deploy_hooks_to_filesystem() (err error) { err = func() (err error) { var src_fd fs.File var dst_fd *os.File if src_fd, err = resources_fs.Open("git_hooks_client/git_hooks_client"); err != nil { return err } defer src_fd.Close() if dst_fd, err = os.OpenFile(filepath.Join(config.Hooks.Execs, "git_hooks_client"), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755); err != nil { return err } defer dst_fd.Close() if _, err = io.Copy(dst_fd, src_fd); err != nil { return err } return nil }() if err != nil { return err } // Go's embed filesystems do not store permissions; but in any case, // they would need to be 0o755: if err = os.Chmod(filepath.Join(config.Hooks.Execs, "git_hooks_client"), 0o755); err != nil { return err } for _, hook_name := range []string{ "pre-receive", } { if err = os.Symlink(filepath.Join(config.Hooks.Execs, "git_hooks_client"), filepath.Join(config.Hooks.Execs, hook_name)); err != nil { return err } } return nil }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "bytes" "context" "encoding/binary" "errors" "fmt" "io" "net" "os" "path/filepath" "strconv" "strings" "syscall" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/jackc/pgx/v5" "go.lindenii.runxiyu.org/lindenii-common/ansiec" ) var ( err_get_fd = errors.New("unable to get file descriptor") err_get_ucred = errors.New("failed getsockopt") ) // hooks_handle_connection handles a connection from git_hooks_client via the // unix socket. func hooks_handle_connection(conn net.Conn) { var ctx context.Context var cancel context.CancelFunc var ucred *syscall.Ucred var err error var cookie []byte var pack_to_hook pack_to_hook_t var ssh_stderr io.Writer var ok bool var hook_return_value byte defer conn.Close() ctx, cancel = context.WithCancel(context.Background()) defer cancel() // There aren't reasonable cases where someone would run this as // another user. if ucred, err = get_ucred(conn); err != nil { if _, err = conn.Write([]byte{1}); err != nil { return } wf_error(conn, "\nUnable to get peer credentials: %v", err) return } if ucred.Uid != uint32(os.Getuid()) { if _, err = conn.Write([]byte{1}); err != nil { return } wf_error(conn, "\nUID mismatch") return } cookie = make([]byte, 64) if _, err = conn.Read(cookie); err != nil { if _, err = conn.Write([]byte{1}); err != nil { return } wf_error(conn, "\nFailed to read cookie: %v", err) return } pack_to_hook, ok = pack_to_hook_by_cookie.Load(string(cookie)) if !ok { if _, err = conn.Write([]byte{1}); err != nil { return } wf_error(conn, "\nInvalid handler cookie") return } ssh_stderr = pack_to_hook.session.Stderr()
ssh_stderr.Write([]byte{'\n'})
_, _ = ssh_stderr.Write([]byte{'\n'})
hook_return_value = func() byte { var argc64 uint64 if err = binary.Read(conn, binary.NativeEndian, &argc64); err != nil { wf_error(ssh_stderr, "Failed to read argc: %v", err) return 1 } var args []string for i := uint64(0); i < argc64; i++ { var arg bytes.Buffer for { b := make([]byte, 1) n, err := conn.Read(b) if err != nil || n != 1 { wf_error(ssh_stderr, "Failed to read arg: %v", err) return 1 } if b[0] == 0 { break } arg.WriteByte(b[0]) } args = append(args, arg.String()) } var stdin bytes.Buffer if _, err = io.Copy(&stdin, conn); err != nil { wf_error(conn, "Failed to read to the stdin buffer: %v", err) } switch filepath.Base(args[0]) { case "pre-receive": if pack_to_hook.direct_access { return 0 } else { all_ok := true for { var line, old_oid, rest, new_oid, ref_name string var found bool var old_hash, new_hash plumbing.Hash var old_commit, new_commit *object.Commit line, err = stdin.ReadString('\n') if errors.Is(err, io.EOF) { break } else if err != nil { wf_error(ssh_stderr, "Failed to read pre-receive line: %v", err) return 1 } line = line[:len(line)-1] old_oid, rest, found = strings.Cut(line, " ") if !found { wf_error(ssh_stderr, "Invalid pre-receive line: %v", line) return 1 } new_oid, ref_name, found = strings.Cut(rest, " ") if !found { wf_error(ssh_stderr, "Invalid pre-receive line: %v", line) return 1 } if strings.HasPrefix(ref_name, "refs/heads/contrib/") { if all_zero_num_string(old_oid) { // New branch fmt.Fprintln(ssh_stderr, ansiec.Blue+"POK"+ansiec.Reset, ref_name) var new_mr_id int err = database.QueryRow(ctx, "INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING id", pack_to_hook.repo_id, pack_to_hook.user_id, strings.TrimPrefix(ref_name, "refs/heads/"), ).Scan(&new_mr_id) if err != nil { wf_error(ssh_stderr, "Error creating merge request: %v", err) return 1 } fmt.Fprintln(ssh_stderr, ansiec.Blue+"Created merge request at", generate_http_remote_url(pack_to_hook.group_name, pack_to_hook.repo_name)+"/contrib/"+strconv.FormatUint(uint64(new_mr_id), 10)+"/"+ansiec.Reset) } else { // Existing contrib branch var existing_merge_request_user_id int var is_ancestor bool err = database.QueryRow(ctx, "SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2", strings.TrimPrefix(ref_name, "refs/heads/"), pack_to_hook.repo_id, ).Scan(&existing_merge_request_user_id) if err != nil { if errors.Is(err, pgx.ErrNoRows) { wf_error(ssh_stderr, "No existing merge request for existing contrib branch: %v", err) } else { wf_error(ssh_stderr, "Error querying for existing merge request: %v", err) } return 1 } if existing_merge_request_user_id == 0 { all_ok = false fmt.Fprintln(ssh_stderr, ansiec.Red+"NAK"+ansiec.Reset, ref_name, "(branch belongs to unowned MR)") continue } if existing_merge_request_user_id != pack_to_hook.user_id { all_ok = false fmt.Fprintln(ssh_stderr, ansiec.Red+"NAK"+ansiec.Reset, ref_name, "(branch belongs another user's MR)") continue } old_hash = plumbing.NewHash(old_oid) if old_commit, err = pack_to_hook.repo.CommitObject(old_hash); err != nil { wf_error(ssh_stderr, "Daemon failed to get old commit: %v", err) return 1 } // Potential BUG: I'm not sure if new_commit is guaranteed to be // detectable as they haven't been merged into the main repo's // objects yet. But it seems to work, and I don't think there's // any reason for this to only work intermitently. new_hash = plumbing.NewHash(new_oid) if new_commit, err = pack_to_hook.repo.CommitObject(new_hash); err != nil { wf_error(ssh_stderr, "Daemon failed to get new commit: %v", err) return 1 } if is_ancestor, err = old_commit.IsAncestor(new_commit); err != nil { wf_error(ssh_stderr, "Daemon failed to check if old commit is ancestor: %v", err) return 1 } if !is_ancestor { // TODO: Create MR snapshot ref instead all_ok = false fmt.Fprintln(ssh_stderr, ansiec.Red+"NAK"+ansiec.Reset, ref_name, "(force pushes are not supported yet)") continue } fmt.Fprintln(ssh_stderr, ansiec.Blue+"POK"+ansiec.Reset, ref_name) } } else { // Non-contrib branch all_ok = false fmt.Fprintln(ssh_stderr, ansiec.Red+"NAK"+ansiec.Reset, ref_name, "(you cannot push to branches outside of contrib/*)") } } fmt.Fprintln(ssh_stderr) if all_ok { fmt.Fprintln(ssh_stderr, "Overall "+ansiec.Green+"ACK"+ansiec.Reset+" (all checks passed)") return 0 } else { fmt.Fprintln(ssh_stderr, "Overall "+ansiec.Red+"NAK"+ansiec.Reset+" (one or more branches failed checks)") return 1 } } default: fmt.Fprintln(ssh_stderr, ansiec.Red+"Invalid hook:", args[0]+ansiec.Reset) return 1 } }() fmt.Fprintln(ssh_stderr) _, _ = conn.Write([]byte{hook_return_value}) } func serve_git_hooks(listener net.Listener) error { for { conn, err := listener.Accept() if err != nil { return err } go hooks_handle_connection(conn) } } func get_ucred(conn net.Conn) (ucred *syscall.Ucred, err error) { var unix_conn *net.UnixConn = conn.(*net.UnixConn) var fd *os.File if fd, err = unix_conn.File(); err != nil { return nil, err_get_fd } defer fd.Close() if ucred, err = syscall.GetsockoptUcred(int(fd.Fd()), syscall.SOL_SOCKET, syscall.SO_PEERCRED); err != nil { return nil, err_get_ucred } return ucred, nil } func all_zero_num_string(s string) bool { for _, r := range s { if r != '0' { return false } } return true }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "fmt" "net/http" "strings" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/filemode" "github.com/go-git/go-git/v5/plumbing/format/diff" "github.com/go-git/go-git/v5/plumbing/object" "go.lindenii.runxiyu.org/lindenii-common/misc" ) // The file patch type from go-git isn't really usable in HTML templates // either. type usable_file_patch_t struct { From diff.File To diff.File Chunks []usable_chunk } type usable_chunk struct { Operation diff.Operation Content string } func handle_repo_commit(w http.ResponseWriter, r *http.Request, params map[string]any) { var repo *git.Repository var commit_id_specified_string, commit_id_specified_string_without_suffix string var commit_id plumbing.Hash var parent_commit_hash plumbing.Hash var commit_object *object.Commit var commit_id_string string var err error var patch *object.Patch repo, commit_id_specified_string = params["repo"].(*git.Repository), params["commit_id"].(string) commit_id_specified_string_without_suffix = strings.TrimSuffix(commit_id_specified_string, ".patch") commit_id = plumbing.NewHash(commit_id_specified_string_without_suffix) if commit_object, err = repo.CommitObject(commit_id); err != nil { http.Error(w, "Error getting commit object: "+err.Error(), http.StatusInternalServerError) return } if commit_id_specified_string_without_suffix != commit_id_specified_string { var formatted_patch string if formatted_patch, err = format_patch_from_commit(commit_object); err != nil { http.Error(w, "Error formatting patch: "+err.Error(), http.StatusInternalServerError) return } fmt.Fprintln(w, formatted_patch) return } commit_id_string = commit_object.Hash.String() if commit_id_string != commit_id_specified_string { http.Redirect(w, r, commit_id_string, http.StatusSeeOther) return } params["commit_object"] = commit_object params["commit_id"] = commit_id_string parent_commit_hash, patch, err = get_patch_from_commit(commit_object) if err != nil { http.Error(w, "Error getting patch from commit: "+err.Error(), http.StatusInternalServerError) return } params["parent_commit_hash"] = parent_commit_hash.String() params["patch"] = patch params["file_patches"] = make_usable_file_patches(patch) render_template(w, "repo_commit", params) } type fake_diff_file struct { hash plumbing.Hash mode filemode.FileMode path string } func (f fake_diff_file) Hash() plumbing.Hash { return f.hash } func (f fake_diff_file) Mode() filemode.FileMode { return f.mode } func (f fake_diff_file) Path() string { return f.path } var fake_diff_file_null = fake_diff_file{ hash: plumbing.NewHash("0000000000000000000000000000000000000000"), mode: misc.First_or_panic(filemode.New("100644")), path: "", }
func make_usable_file_patches(patch diff.Patch) (usable_file_patches []usable_file_patch) {
func make_usable_file_patches(patch diff.Patch) (usable_file_patches []usable_file_patch_t) {
// TODO: Remove unnecessary context // TODO: Prepend "+"/"-"/" " instead of solely distinguishing based on color for _, file_patch := range patch.FilePatches() { var from, to diff.File var usable_file_patch usable_file_patch_t chunks := []usable_chunk{} from, to = file_patch.Files() if from == nil { from = fake_diff_file_null } if to == nil { to = fake_diff_file_null } for _, chunk := range file_patch.Chunks() { var content string content = chunk.Content() if len(content) > 0 && content[0] == '\n' { content = "\n" + content } // Horrible hack to fix how browsers newlines that immediately proceed <pre> chunks = append(chunks, usable_chunk{ Operation: chunk.Type(), Content: content, }) } usable_file_patch = usable_file_patch_t{ Chunks: chunks, From: from, To: to, } usable_file_patches = append(usable_file_patches, usable_file_patch) } return }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "net/http" "strconv" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" ) func handle_repo_contrib_one(w http.ResponseWriter, r *http.Request, params map[string]any) { var mr_id_string string var mr_id int var err error var title, status, source_ref, destination_branch string var repo *git.Repository var source_ref_hash plumbing.Hash var source_commit *object.Commit mr_id_string = params["mr_id"].(string) mr_id_int64, err := strconv.ParseInt(mr_id_string, 10, strconv.IntSize) if err != nil { http.Error(w, "Merge request ID not an integer: "+err.Error(), http.StatusBadRequest) return } mr_id = int(mr_id_int64) if err = database.QueryRow(r.Context(), "SELECT COALESCE(title, ''), status, source_ref, COALESCE(destination_branch, '') FROM merge_requests WHERE id = $1", mr_id, ).Scan(&title, &status, &source_ref, &destination_branch); err != nil { http.Error(w, "Error querying merge request: "+err.Error(), http.StatusInternalServerError) return } repo = params["repo"].(*git.Repository) if source_ref_hash, err = get_ref_hash_from_type_and_name(repo, "branch", source_ref); err != nil { http.Error(w, "Error getting source ref hash: "+err.Error(), http.StatusInternalServerError) return } if source_commit, err = repo.CommitObject(source_ref_hash); err != nil { http.Error(w, "Error getting source commit: "+err.Error(), http.StatusInternalServerError) return } params["source_commit"] = source_commit var destination_branch_hash plumbing.Hash if destination_branch == "" { destination_branch = "HEAD" destination_branch_hash, err = get_ref_hash_from_type_and_name(repo, "", "") } else { destination_branch_hash, err = get_ref_hash_from_type_and_name(repo, "branch", destination_branch)
if err != nil { http.Error(w, "Error getting destination branch hash: "+err.Error(), http.StatusInternalServerError) return }
} if err != nil { http.Error(w, "Error getting destination branch hash: "+err.Error(), http.StatusInternalServerError) return
}
destination_commit, err := repo.CommitObject(destination_branch_hash) if err != nil { http.Error(w, "Error getting destination commit: "+err.Error(), http.StatusInternalServerError) return } params["destination_commit"] = destination_commit patch, err := destination_commit.Patch(source_commit) if err != nil { http.Error(w, "Error getting patch: "+err.Error(), http.StatusInternalServerError) return } params["file_patches"] = make_usable_file_patches(patch) params["mr_title"], params["mr_status"], params["mr_source_ref"], params["mr_destination_branch"] = title, status, source_ref, destination_branch render_template(w, "repo_contrib_one", params) }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "errors" "fmt" "net/http" "strconv" "strings" "github.com/jackc/pgx/v5" "go.lindenii.runxiyu.org/lindenii-common/clog" ) type http_router_t struct{} func (router *http_router_t) ServeHTTP(w http.ResponseWriter, r *http.Request) { clog.Info("Incoming HTTP: " + r.RemoteAddr + " " + r.Method + " " + r.RequestURI) var segments []string var err error var non_empty_last_segments_len int
var params map[string]any
var separator_index int
params := make(map[string]any)
if segments, _, err = parse_request_uri(r.RequestURI); err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return } non_empty_last_segments_len = len(segments) if segments[len(segments)-1] == "" { non_empty_last_segments_len-- } if segments[0] == ":" { if len(segments) < 2 { http.Error(w, "Blank system endpoint", http.StatusNotFound) return } else if len(segments) == 2 && redirect_with_slash(w, r) { return } switch segments[1] { case "static": static_handler.ServeHTTP(w, r) return case "source": source_handler.ServeHTTP(w, r) return } } params["url_segments"] = segments params["global"] = global_data var _user_id int // 0 for none _user_id, params["username"], err = get_user_info_from_request(r) if errors.Is(err, http.ErrNoCookie) { } else if errors.Is(err, pgx.ErrNoRows) { } else if err != nil { http.Error(w, "Error getting user info from request: "+err.Error(), http.StatusInternalServerError) return } if _user_id == 0 { params["user_id"] = "" } else { params["user_id"] = strconv.Itoa(_user_id) } if segments[0] == ":" { switch segments[1] { case "login": handle_login(w, r, params) return case "users": handle_users(w, r, params) return default: http.Error(w, fmt.Sprintf("Unknown system module type: %s", segments[1]), http.StatusNotFound) return } } separator_index = -1 for i, part := range segments { if part == ":" { separator_index = i break } } params["separator_index"] = separator_index // TODO if separator_index > 1 { http.Error(w, "Subgroups haven't been implemented yet", http.StatusNotImplemented) return } var module_type string var module_name string var group_name string switch { case non_empty_last_segments_len == 0: handle_index(w, r, params) case separator_index == -1: http.Error(w, "Group indexing hasn't been implemented yet", http.StatusNotImplemented) case non_empty_last_segments_len == separator_index+1: http.Error(w, "Group root hasn't been implemented yet", http.StatusNotImplemented) case non_empty_last_segments_len == separator_index+2: if redirect_with_slash(w, r) { return } module_type = segments[separator_index+1] params["group_name"] = segments[0] switch module_type { case "repos": handle_group_repos(w, r, params) default: http.Error(w, fmt.Sprintf("Unknown module type: %s", module_type), http.StatusNotFound) } default: module_type = segments[separator_index+1] module_name = segments[separator_index+2] group_name = segments[0] params["group_name"] = group_name switch module_type { case "repos": params["repo_name"] = module_name if non_empty_last_segments_len > separator_index+3 { switch segments[separator_index+3] { case "info": if err = handle_repo_info(w, r, params); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } return case "git-upload-pack": if err = handle_upload_pack(w, r, params); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } return } } if params["ref_type"], params["ref_name"], err = get_param_ref_and_type(r); err != nil { if errors.Is(err, err_no_ref_spec) { params["ref_type"] = "" } else { http.Error(w, "Error querying ref type: "+err.Error(), http.StatusInternalServerError) return } } // TODO: subgroups if params["repo"], params["repo_description"], params["repo_id"], err = open_git_repo(r.Context(), group_name, module_name); err != nil { http.Error(w, "Error opening repo: "+err.Error(), http.StatusInternalServerError) return } if non_empty_last_segments_len == separator_index+3 { if redirect_with_slash(w, r) { return } handle_repo_index(w, r, params) return } repo_feature := segments[separator_index+3] switch repo_feature { case "tree": params["rest"] = strings.Join(segments[separator_index+4:], "/") if len(segments) < separator_index+5 && redirect_with_slash(w, r) { return } handle_repo_tree(w, r, params) case "raw": params["rest"] = strings.Join(segments[separator_index+4:], "/") if len(segments) < separator_index+5 && redirect_with_slash(w, r) { return } handle_repo_raw(w, r, params) case "log": if non_empty_last_segments_len > separator_index+4 { http.Error(w, "Too many parameters", http.StatusBadRequest) return } if redirect_with_slash(w, r) { return } handle_repo_log(w, r, params) case "commit": if redirect_without_slash(w, r) { return } params["commit_id"] = segments[separator_index+4] handle_repo_commit(w, r, params) case "contrib": if redirect_with_slash(w, r) { return } switch non_empty_last_segments_len { case separator_index + 4: handle_repo_contrib_index(w, r, params) case separator_index + 5: params["mr_id"] = segments[separator_index+4] handle_repo_contrib_one(w, r, params) default: http.Error(w, "Too many parameters", http.StatusBadRequest) } default: http.Error(w, fmt.Sprintf("Unknown repo feature: %s", repo_feature), http.StatusNotFound) } default: http.Error(w, fmt.Sprintf("Unknown module type: %s", module_type), http.StatusNotFound) } } }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "embed" "html/template" "io/fs" "net/http" ) // We embed all source for easy AGPL compliance. // //go:embed .gitignore .gitattributes //go:embed LICENSE README.md //go:embed *.go go.mod go.sum //go:embed *.scfg //go:embed Makefile //go:embed schema.sql //go:embed static/* templates/* //go:embed git_hooks_client/*.c //go:embed vendor/* var source_fs embed.FS var source_handler = http.StripPrefix( "/:/source/", http.FileServer(http.FS(source_fs)), ) //go:embed templates/* static/* git_hooks_client/git_hooks_client var resources_fs embed.FS var templates *template.Template func load_templates() (err error) { templates, err = template.New("templates").Funcs(template.FuncMap{ "first_line": first_line, "base_name": base_name, }).ParseFS(resources_fs, "templates/*") return err } var static_handler http.Handler func init() {
if static_fs, err := fs.Sub(resources_fs, "static"); err != nil {
static_fs, err := fs.Sub(resources_fs, "static") if err != nil {
panic(err) } static_handler = http.StripPrefix("/:/static/", http.FileServer(http.FS(static_fs))) }
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-FileContributor: Runxi Yu <https://runxiyu.org> package main import ( "context" "github.com/jackc/pgx/v5" ) func add_user_ssh(ctx context.Context, pubkey string) (user_id int, err error) { var tx pgx.Tx if tx, err = database.Begin(ctx); err != nil { return }
defer tx.Rollback(ctx)
defer func() { _ = tx.Rollback(ctx) }()
if err = tx.QueryRow(ctx, `INSERT INTO users (type) VALUES ('pubkey_only') RETURNING id`).Scan(&user_id); err != nil { return } if _, err = tx.Exec(ctx, `INSERT INTO ssh_public_keys (key_string, user_id) VALUES ($1, $2)`, pubkey, user_id); err != nil { return } err = tx.Commit(ctx) return }