diff options
Diffstat (limited to 'scripts')
-rwxr-xr-x | scripts/cleanup | 46 | ||||
-rw-r--r-- | scripts/git-integration/0001-Patch-sshd-for-the-AUR.patch | 152 | ||||
-rw-r--r-- | scripts/git-integration/aurinfo.py | 204 | ||||
-rwxr-xr-x | scripts/git-integration/git-auth.py | 42 | ||||
-rwxr-xr-x | scripts/git-integration/git-serve.py | 157 | ||||
-rwxr-xr-x | scripts/git-integration/git-update.py | 246 | ||||
-rwxr-xr-x | scripts/git-integration/init-repos.py | 51 | ||||
-rw-r--r-- | scripts/git-integration/sshd_config | 6 | ||||
-rwxr-xr-x | scripts/uploadbuckets.sh | 58 |
9 files changed, 858 insertions, 104 deletions
diff --git a/scripts/cleanup b/scripts/cleanup deleted file mode 100755 index 0ccbe7df..00000000 --- a/scripts/cleanup +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/php -<?php -# Run this script by providing it with the top path of AUR. -# In that path you should see a file lib/aur.inc -# -# This will remove files which belong to deleted packages -# in unsupported. -# -# ex: php cleanup dev/aur/web -# -$dir = $argv[1]; - -if (empty($dir)) { - echo "Please specify AUR directory.\n"; - exit; -} - -set_include_path(get_include_path() . PATH_SEPARATOR . "$dir/lib"); -include("confparser.inc.php"); -include("aur.inc.php"); -include("pkgfuncs.inc.php"); - -$count = 0; - -$incoming_dir = config_get('paths', 'storage'); -$buckets = scandir($incoming_dir); -foreach ($buckets as $bucket) { - $bucketpath = $incoming_dir . $bucket; - if ($bucket == '.' || $bucket == '..' || !is_dir($bucketpath)) { - continue; - } - $files = scandir($incoming_dir . $bucket); - foreach ($files as $pkgname) { - if ($pkgname == '.' || $pkgname == '..') { - continue; - } - $fullpath = $incoming_dir . $bucket . "/" . $pkgname; - if (!pkg_from_name($pkgname) && is_dir($fullpath)) { - echo 'Removing ' . $fullpath . "\n"; - rm_tree($fullpath); - $count++; - } - } -} - -echo "\nRemoved $count directories.\n"; diff --git a/scripts/git-integration/0001-Patch-sshd-for-the-AUR.patch b/scripts/git-integration/0001-Patch-sshd-for-the-AUR.patch new file mode 100644 index 00000000..6b727123 --- /dev/null +++ b/scripts/git-integration/0001-Patch-sshd-for-the-AUR.patch @@ -0,0 +1,152 @@ +From e23745b61a46f034bca3cab9936c24c249afdc7f Mon Sep 17 00:00:00 2001 +From: Lukas Fleischer <archlinux@cryptocrack.de> +Date: Sun, 21 Dec 2014 22:17:48 +0100 +Subject: [PATCH] Patch sshd for the AUR + +* Add SSH_KEY_FINGERPRINT and SSH_KEY variables to the environment of + the AuthorizedKeysCommand which allows for efficiently looking up SSH + keys in the AUR database. + +* Remove the secure path check for the AuthorizedKeysCommand. We are + running the sshd under a non-privileged user who has as little + permissions as possible. In particular, he does not own the directory + that contains the scripts for the Git backend. + +* Prevent from running the sshd as root. + +Signed-off-by: Lukas Fleischer <archlinux@cryptocrack.de> +--- + auth2-pubkey.c | 48 +++++++++++++++++++++++++++++++++++++++++++----- + ssh.h | 12 ++++++++++++ + sshd.c | 5 +++++ + sshd_config.5 | 5 +++++ + 4 files changed, 65 insertions(+), 5 deletions(-) + +diff --git a/auth2-pubkey.c b/auth2-pubkey.c +index 0a3c1de..baf4922 100644 +--- a/auth2-pubkey.c ++++ b/auth2-pubkey.c +@@ -510,6 +510,8 @@ user_key_command_allowed2(struct passwd *user_pw, Key *key) + int status, devnull, p[2], i; + pid_t pid; + char *username, errmsg[512]; ++ struct sshbuf *b = NULL, *bb = NULL; ++ char *keytext, *uu = NULL; + + if (options.authorized_keys_command == NULL || + options.authorized_keys_command[0] != '/') +@@ -538,11 +540,6 @@ user_key_command_allowed2(struct passwd *user_pw, Key *key) + options.authorized_keys_command, strerror(errno)); + goto out; + } +- if (auth_secure_path(options.authorized_keys_command, &st, NULL, 0, +- errmsg, sizeof(errmsg)) != 0) { +- error("Unsafe AuthorizedKeysCommand: %s", errmsg); +- goto out; +- } + + if (pipe(p) != 0) { + error("%s: pipe: %s", __func__, strerror(errno)); +@@ -568,6 +565,47 @@ user_key_command_allowed2(struct passwd *user_pw, Key *key) + for (i = 0; i < NSIG; i++) + signal(i, SIG_DFL); + ++ keytext = key_fingerprint(key, SSH_FP_MD5, SSH_FP_HEX); ++ if (setenv(SSH_KEY_FINGERPRINT_ENV_NAME, keytext, 1) == -1) { ++ error("%s: setenv: %s", __func__, strerror(errno)); ++ _exit(1); ++ } ++ ++ if (!(b = sshbuf_new()) || !(bb = sshbuf_new())) { ++ error("%s: sshbuf_new: %s", __func__, strerror(errno)); ++ _exit(1); ++ } ++ if (sshkey_to_blob_buf(key, bb) != 0) { ++ error("%s: sshkey_to_blob_buf: %s", __func__, ++ strerror(errno)); ++ _exit(1); ++ } ++ if (!(uu = sshbuf_dtob64(bb))) { ++ error("%s: sshbuf_dtob64: %s", __func__, ++ strerror(errno)); ++ _exit(1); ++ } ++ if (sshbuf_putf(b, "%s ", sshkey_ssh_name(key))) { ++ error("%s: sshbuf_putf: %s", __func__, ++ strerror(errno)); ++ _exit(1); ++ } ++ if (sshbuf_put(b, uu, strlen(uu) + 1)) { ++ error("%s: sshbuf_put: %s", __func__, ++ strerror(errno)); ++ _exit(1); ++ } ++ if (setenv(SSH_KEY_ENV_NAME, sshbuf_ptr(b), 1) == -1) { ++ error("%s: setenv: %s", __func__, strerror(errno)); ++ _exit(1); ++ } ++ if (uu) ++ free(uu); ++ if (b) ++ sshbuf_free(b); ++ if (bb) ++ sshbuf_free(bb); ++ + if ((devnull = open(_PATH_DEVNULL, O_RDWR)) == -1) { + error("%s: open %s: %s", __func__, _PATH_DEVNULL, + strerror(errno)); +diff --git a/ssh.h b/ssh.h +index c94633b..411ea86 100644 +--- a/ssh.h ++++ b/ssh.h +@@ -97,3 +97,15 @@ + + /* Listen backlog for sshd, ssh-agent and forwarding sockets */ + #define SSH_LISTEN_BACKLOG 128 ++ ++/* ++ * Name of the environment variable containing the incoming key passed ++ * to AuthorizedKeysCommand. ++ */ ++#define SSH_KEY_ENV_NAME "SSH_KEY" ++ ++/* ++ * Name of the environment variable containing the incoming key fingerprint ++ * passed to AuthorizedKeysCommand. ++ */ ++#define SSH_KEY_FINGERPRINT_ENV_NAME "SSH_KEY_FINGERPRINT" +diff --git a/sshd.c b/sshd.c +index 4e01855..60c676f 100644 +--- a/sshd.c ++++ b/sshd.c +@@ -1424,6 +1424,11 @@ main(int ac, char **av) + av = saved_argv; + #endif + ++ if (geteuid() == 0) { ++ fprintf(stderr, "this is a patched version of the sshd that must not be run as root.\n"); ++ exit(1); ++ } ++ + if (geteuid() == 0 && setgroups(0, NULL) == -1) + debug("setgroups(): %.200s", strerror(errno)); + +diff --git a/sshd_config.5 b/sshd_config.5 +index ef36d33..1d7bade 100644 +--- a/sshd_config.5 ++++ b/sshd_config.5 +@@ -223,6 +223,11 @@ It will be invoked with a single argument of the username + being authenticated, and should produce on standard output zero or + more lines of authorized_keys output (see AUTHORIZED_KEYS in + .Xr sshd 8 ) . ++The key being used for authentication (the key's type and the key text itself, ++separated by a space) will be available in the ++.Ev SSH_KEY ++environment variable, and the fingerprint of the key will be available in the ++.Ev SSH_KEY_FINGERPRINT environment variable. + If a key supplied by AuthorizedKeysCommand does not successfully authenticate + and authorize the user then public key authentication continues using the usual + .Cm AuthorizedKeysFile +-- +2.2.1 + diff --git a/scripts/git-integration/aurinfo.py b/scripts/git-integration/aurinfo.py new file mode 100644 index 00000000..d9b93729 --- /dev/null +++ b/scripts/git-integration/aurinfo.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python + +from copy import copy, deepcopy +import pprint +import sys + +class Attr(object): + def __init__(self, name, is_multivalued=False, allow_arch_extensions=False): + self.name = name + self.is_multivalued = is_multivalued + self.allow_arch_extensions = allow_arch_extensions + +PKGBUILD_ATTRIBUTES = { + 'arch': Attr('arch', True), + 'backup': Attr('backup', True), + 'changelog': Attr('changelog', False), + 'checkdepends': Attr('checkdepends', True), + 'conflicts': Attr('conflicts', True, True), + 'depends': Attr('depends', True, True), + 'epoch': Attr('epoch', False), + 'groups': Attr('groups', True), + 'install': Attr('install', False), + 'license': Attr('license', True), + 'makedepends': Attr('makedepends', True, True), + 'md5sums': Attr('md5sums', True, True), + 'noextract': Attr('noextract', True), + 'optdepends': Attr('optdepends', True, True), + 'options': Attr('options', True), + 'pkgname': Attr('pkgname', False), + 'pkgrel': Attr('pkgrel', False), + 'pkgver': Attr('pkgver', False), + 'provides': Attr('provides', True, True), + 'replaces': Attr('replaces', True, True), + 'sha1sums': Attr('sha1sums', True, True), + 'sha224sums': Attr('sha224sums', True, True), + 'sha256sums': Attr('sha256sums', True, True), + 'sha384sums': Attr('sha384sums', True, True), + 'sha512sums': Attr('sha512sums', True, True), + 'source': Attr('source', True, True), + 'url': Attr('url', False), + 'validpgpkeys': Attr('validpgpkeys', True), +} + +def find_attr(attrname): + # exact match + attr = PKGBUILD_ATTRIBUTES.get(attrname, None) + if attr: + return attr + + # prefix match + # XXX: this could break in the future if PKGBUILD(5) ever + # introduces a key which is a subset of another. + for k in PKGBUILD_ATTRIBUTES.keys(): + if attrname.startswith(k + '_'): + return PKGBUILD_ATTRIBUTES[k] + +def IsMultiValued(attrname): + attr = find_attr(attrname) + return attr and attr.is_multivalued + +class AurInfo(object): + def __init__(self): + self._pkgbase = {} + self._packages = {} + + def GetPackageNames(self): + return self._packages.keys() + + def GetMergedPackage(self, pkgname): + package = deepcopy(self._pkgbase) + package['pkgname'] = pkgname + for k, v in self._packages.get(pkgname).items(): + package[k] = deepcopy(v) + return package + + def AddPackage(self, pkgname): + self._packages[pkgname] = {} + return self._packages[pkgname] + + def SetPkgbase(self, pkgbasename): + self._pkgbase = {'pkgname' : pkgbasename} + return self._pkgbase + + +class StderrECatcher(object): + def Catch(self, lineno, error): + print('ERROR[%d]: %s' % (lineno, error), file=sys.stderr) + + +class CollectionECatcher(object): + def __init__(self): + self._errors = [] + + def Catch(self, lineno, error): + self._errors.append((lineno, error)) + + def HasErrors(self): + return len(self._errors) > 0 + + def Errors(self): + return copy(self._errors) + + +def ParseAurinfoFromIterable(iterable, ecatcher=None): + aurinfo = AurInfo() + + if ecatcher is None: + ecatcher = StderrECatcher() + + current_package = None + lineno = 0 + + for line in iterable: + lineno += 1 + + if line.startswith('#'): + continue + + if not line.strip(): + # end of package + current_package = None + continue + + if not line.startswith('\t'): + # start of new package + try: + key, value = map(str.strip, line.split('=', 1)) + except ValueError: + ecatcher.Catch(lineno, 'unexpected header format in section=%s' % + current_package['pkgname']) + continue + + if key == 'pkgbase': + current_package = aurinfo.SetPkgbase(value) + else: + current_package = aurinfo.AddPackage(value) + else: + # package attribute + if current_package is None: + ecatcher.Catch(lineno, 'package attribute found outside of ' + 'a package section') + continue + + try: + key, value = map(str.strip, line.split('=', 1)) + except ValueError: + ecatcher.Catch(lineno, 'unexpected attribute format in ' + 'section=%s' % current_package['pkgname']) + + if IsMultiValued(key): + if not current_package.get(key): + current_package[key] = [] + if value: + current_package[key].append(value) + else: + if not current_package.get(key): + current_package[key] = value + else: + ecatcher.Catch(lineno, 'overwriting attribute ' + '%s: %s -> %s' % (key, current_package[key], + value)) + + return aurinfo + + +def ParseAurinfo(filename='.AURINFO', ecatcher=None): + with open(filename) as f: + return ParseAurinfoFromIterable(f, ecatcher) + + +def ValidateAurinfo(filename='.AURINFO'): + ecatcher = CollectionECatcher() + ParseAurinfo(filename, ecatcher) + errors = ecatcher.Errors() + for error in errors: + print('error on line %d: %s' % error, file=sys.stderr) + return not errors + + +if __name__ == '__main__': + pp = pprint.PrettyPrinter(indent=4) + + if len(sys.argv) == 1: + print('error: not enough arguments') + sys.exit(1) + elif len(sys.argv) == 2: + action = sys.argv[1] + filename = '.AURINFO' + else: + action, filename = sys.argv[1:3] + + if action == 'parse': + aurinfo = ParseAurinfo() + for pkgname in aurinfo.GetPackageNames(): + print(">>> merged package: %s" % pkgname) + pp.pprint(aurinfo.GetMergedPackage(pkgname)) + print() + elif action == 'validate': + sys.exit(not ValidateAurinfo(filename)) + else: + print('unknown action: %s' % action) + sys.exit(1) + +# vim: set et ts=4 sw=4: diff --git a/scripts/git-integration/git-auth.py b/scripts/git-integration/git-auth.py new file mode 100755 index 00000000..801a1d36 --- /dev/null +++ b/scripts/git-integration/git-auth.py @@ -0,0 +1,42 @@ +#!/usr/bin/python3 + +import configparser +import mysql.connector +import os +import re + +config = configparser.RawConfigParser() +config.read(os.path.dirname(os.path.realpath(__file__)) + "/../../conf/config") + +aur_db_host = config.get('database', 'host') +aur_db_name = config.get('database', 'name') +aur_db_user = config.get('database', 'user') +aur_db_pass = config.get('database', 'password') +aur_db_socket = config.get('database', 'socket') + +key_prefixes = config.get('auth', 'key-prefixes').split() +username_regex = config.get('auth', 'username-regex') +git_serve_cmd = config.get('auth', 'git-serve-cmd') +ssh_opts = config.get('auth', 'ssh-options') + +pubkey = os.environ.get("SSH_KEY") +valid_prefixes = tuple(p + " " for p in key_prefixes) +if pubkey is None or not pubkey.startswith(valid_prefixes): + exit(1) + +db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, + passwd=aur_db_pass, db=aur_db_name, + unix_socket=aur_db_socket, buffered=True) + +cur = db.cursor() +cur.execute("SELECT Username FROM Users WHERE SSHPubKey = %s " + + "AND Suspended = 0", (pubkey,)) + +if cur.rowcount != 1: + exit(1) + +user = cur.fetchone()[0] +if not re.match(username_regex, user): + exit(1) + +print('command="%s %s",%s %s' % (git_serve_cmd, user, ssh_opts, pubkey)) diff --git a/scripts/git-integration/git-serve.py b/scripts/git-integration/git-serve.py new file mode 100755 index 00000000..227e37b9 --- /dev/null +++ b/scripts/git-integration/git-serve.py @@ -0,0 +1,157 @@ +#!/usr/bin/python3 + +import configparser +import mysql.connector +import os +import pygit2 +import re +import shlex +import sys + +config = configparser.RawConfigParser() +config.read(os.path.dirname(os.path.realpath(__file__)) + "/../../conf/config") + +aur_db_host = config.get('database', 'host') +aur_db_name = config.get('database', 'name') +aur_db_user = config.get('database', 'user') +aur_db_pass = config.get('database', 'password') +aur_db_socket = config.get('database', 'socket') + +repo_base_path = config.get('serve', 'repo-base') +repo_regex = config.get('serve', 'repo-regex') +git_update_hook = config.get('serve', 'git-update-hook') +git_shell_cmd = config.get('serve', 'git-shell-cmd') +ssh_cmdline = config.get('serve', 'ssh-cmdline') + +def repo_path_validate(path): + if not path.startswith(repo_base_path): + return False + if path.endswith('.git'): + repo = path[len(repo_base_path):-4] + elif path.endswith('.git/'): + repo = path[len(repo_base_path):-5] + else: + return False + return re.match(repo_regex, repo) + +def repo_path_get_pkgbase(path): + pkgbase = path.rstrip('/').rpartition('/')[2] + if pkgbase.endswith('.git'): + pkgbase = pkgbase[:-4] + return pkgbase + +def list_repos(user): + db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, + passwd=aur_db_pass, db=aur_db_name, + unix_socket=aur_db_socket) + cur = db.cursor() + + cur.execute("SELECT ID FROM Users WHERE Username = %s ", [user]) + userid = cur.fetchone()[0] + if userid == 0: + die('%s: unknown user: %s' % (action, user)) + + cur.execute("SELECT Name, PackagerUID FROM PackageBases " + + "WHERE MaintainerUID = %s ", [userid]) + for row in cur: + print((' ' if row[1] else '*') + row[0]) + db.close() + +def setup_repo(repo, user): + if not re.match(repo_regex, repo): + die('%s: invalid repository name: %s' % (action, repo)) + + db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, + passwd=aur_db_pass, db=aur_db_name, + unix_socket=aur_db_socket) + cur = db.cursor() + + cur.execute("SELECT COUNT(*) FROM PackageBases WHERE Name = %s ", [repo]) + if cur.fetchone()[0] > 0: + die('%s: package base already exists: %s' % (action, repo)) + + cur.execute("SELECT ID FROM Users WHERE Username = %s ", [user]) + userid = cur.fetchone()[0] + if userid == 0: + die('%s: unknown user: %s' % (action, user)) + + cur.execute("INSERT INTO PackageBases (Name, SubmittedTS, ModifiedTS, " + + "SubmitterUID, MaintainerUID) VALUES (%s, UNIX_TIMESTAMP(), " + + "UNIX_TIMESTAMP(), %s, %s)", [repo, userid, userid]) + + db.commit() + db.close() + + repo_path = repo_base_path + '/' + repo + '.git/' + pygit2.init_repository(repo_path, True) + os.symlink(git_update_hook, repo_path + 'hooks/update') + +def check_permissions(pkgbase, user): + db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, + passwd=aur_db_pass, db=aur_db_name, + unix_socket=aur_db_socket, buffered=True) + cur = db.cursor() + + cur.execute("SELECT AccountTypeID FROM Users WHERE UserName = %s ", [user]) + if cur.fetchone()[0] > 1: + return True + + cur.execute("SELECT COUNT(*) FROM PackageBases " + + "LEFT JOIN PackageComaintainers " + + "ON PackageComaintainers.PackageBaseID = PackageBases.ID " + + "INNER JOIN Users ON Users.ID = PackageBases.MaintainerUID " + + "OR PackageBases.MaintainerUID IS NULL " + + "OR Users.ID = PackageComaintainers.UsersID " + + "WHERE Name = %s AND Username = %s", [pkgbase, user]) + return cur.fetchone()[0] > 0 + +def die(msg): + sys.stderr.write("%s\n" % (msg)) + exit(1) + +def die_with_help(msg): + die(msg + "\nTry `%s help` for a list of commands." % (ssh_cmdline)) + +user = sys.argv[1] +cmd = os.environ.get("SSH_ORIGINAL_COMMAND") +if not cmd: + die_with_help("Interactive shell is disabled.") +cmdargv = shlex.split(cmd) +action = cmdargv[0] + +if action == 'git-upload-pack' or action == 'git-receive-pack': + if len(cmdargv) < 2: + die_with_help("%s: missing path" % (action)) + path = repo_base_path.rstrip('/') + cmdargv[1] + if not repo_path_validate(path): + die('%s: invalid path: %s' % (action, path)) + pkgbase = repo_path_get_pkgbase(path) + if not os.path.exists(path): + setup_repo(pkgbase, user) + if action == 'git-receive-pack': + if not check_permissions(pkgbase, user): + die('%s: permission denied: %s' % (action, user)) + os.environ["AUR_USER"] = user + os.environ["AUR_GIT_DIR"] = path + os.environ["AUR_PKGBASE"] = pkgbase + cmd = action + " '" + path + "'" + os.execl(git_shell_cmd, git_shell_cmd, '-c', cmd) +elif action == 'list-repos': + if len(cmdargv) > 1: + die_with_help("%s: too many arguments" % (action)) + list_repos(user) +elif action == 'setup-repo': + if len(cmdargv) < 2: + die_with_help("%s: missing repository name" % (action)) + if len(cmdargv) > 2: + die_with_help("%s: too many arguments" % (action)) + setup_repo(cmdargv[1], user) +elif action == 'help': + die("Commands:\n" + + " help Show this help message and exit.\n" + + " list-repos List all your repositories.\n" + + " setup-repo <name> Create an empty repository.\n" + + " git-receive-pack Internal command used with Git.\n" + + " git-upload-pack Internal command used with Git.") +else: + die_with_help("invalid command: %s" % (action)) diff --git a/scripts/git-integration/git-update.py b/scripts/git-integration/git-update.py new file mode 100755 index 00000000..3d2742a2 --- /dev/null +++ b/scripts/git-integration/git-update.py @@ -0,0 +1,246 @@ +#!/usr/bin/python3 + +from copy import copy, deepcopy +import configparser +import mysql.connector +import os +import pygit2 +import re +import sys + +import aurinfo + +config = configparser.RawConfigParser() +config.read(os.path.dirname(os.path.realpath(__file__)) + "/../../conf/config") + +aur_db_host = config.get('database', 'host') +aur_db_name = config.get('database', 'name') +aur_db_user = config.get('database', 'user') +aur_db_pass = config.get('database', 'password') +aur_db_socket = config.get('database', 'socket') + +def extract_arch_fields(pkginfo, field): + values = [] + + if field in pkginfo: + for val in pkginfo[field]: + values.append({"value": val, "arch": None}) + + for arch in ['i686', 'x86_64']: + if field + '_' + arch in pkginfo: + for val in pkginfo[field + '_' + arch]: + values.append({"value": val, "arch": arch}) + + return values + +def save_srcinfo(srcinfo, db, cur, user): + # Obtain package base ID and previous maintainer. + pkgbase = srcinfo._pkgbase['pkgname'] + cur.execute("SELECT ID, MaintainerUID FROM PackageBases " + "WHERE Name = %s", [pkgbase]) + (pkgbase_id, maintainer_uid) = cur.fetchone() + was_orphan = not maintainer_uid + + # Obtain the user ID of the new maintainer. + cur.execute("SELECT ID FROM Users WHERE Username = %s", [user]) + user_id = int(cur.fetchone()[0]) + + # Update package base details and delete current packages. + cur.execute("UPDATE PackageBases SET ModifiedTS = UNIX_TIMESTAMP(), " + + "PackagerUID = %s, OutOfDateTS = NULL WHERE ID = %s", + [user_id, pkgbase_id]) + cur.execute("UPDATE PackageBases SET MaintainerUID = %s " + + "WHERE ID = %s AND MaintainerUID IS NULL", + [user_id, pkgbase_id]) + cur.execute("DELETE FROM Packages WHERE PackageBaseID = %s", + [pkgbase_id]) + + for pkgname in srcinfo.GetPackageNames(): + pkginfo = srcinfo.GetMergedPackage(pkgname) + + if 'epoch' in pkginfo and int(pkginfo['epoch']) > 0: + ver = '%d:%s-%s' % (int(pkginfo['epoch']), pkginfo['pkgver'], + pkginfo['pkgrel']) + else: + ver = '%s-%s' % (pkginfo['pkgver'], pkginfo['pkgrel']) + + # Create a new package. + cur.execute("INSERT INTO Packages (PackageBaseID, Name, " + + "Version, Description, URL) " + + "VALUES (%s, %s, %s, %s, %s)", + [pkgbase_id, pkginfo['pkgname'], ver, + pkginfo['pkgdesc'], pkginfo['url']]) + db.commit() + pkgid = cur.lastrowid + + # Add package sources. + for source_info in extract_arch_fields(pkginfo, 'source'): + cur.execute("INSERT INTO PackageSources (PackageID, Source, " + + "SourceArch) VALUES (%s, %s, %s)", + [pkgid, source_info['value'], source_info['arch']]) + + # Add package dependencies. + for deptype in ('depends', 'makedepends', + 'checkdepends', 'optdepends'): + cur.execute("SELECT ID FROM DependencyTypes WHERE Name = %s", + [deptype]) + deptypeid = cur.fetchone()[0] + for dep_info in extract_arch_fields(pkginfo, deptype): + depname = re.sub(r'(<|=|>).*', '', dep_info['value']) + depcond = dep_info['value'][len(depname):] + deparch = dep_info['arch'] + cur.execute("INSERT INTO PackageDepends (PackageID, " + + "DepTypeID, DepName, DepCondition, DepArch) " + + "VALUES (%s, %s, %s, %s, %s)", + [pkgid, deptypeid, depname, depcond, deparch]) + + # Add package relations (conflicts, provides, replaces). + for reltype in ('conflicts', 'provides', 'replaces'): + cur.execute("SELECT ID FROM RelationTypes WHERE Name = %s", + [reltype]) + reltypeid = cur.fetchone()[0] + for rel_info in extract_arch_fields(pkginfo, reltype): + relname = re.sub(r'(<|=|>).*', '', rel_info['value']) + relcond = rel_info['value'][len(relname):] + relarch = rel_info['arch'] + cur.execute("INSERT INTO PackageRelations (PackageID, " + + "RelTypeID, RelName, RelCondition, RelArch) " + + "VALUES (%s, %s, %s, %s, %s)", + [pkgid, reltypeid, relname, relcond, relarch]) + + # Add package licenses. + if 'license' in pkginfo: + for license in pkginfo['license']: + cur.execute("SELECT ID FROM Licenses WHERE Name = %s", + [license]) + if cur.rowcount == 1: + licenseid = cur.fetchone()[0] + else: + cur.execute("INSERT INTO Licenses (Name) VALUES (%s)", + [license]) + db.commit() + licenseid = cur.lastrowid + cur.execute("INSERT INTO PackageLicenses (PackageID, " + + "LicenseID) VALUES (%s, %s)", + [pkgid, licenseid]) + + # Add package groups. + if 'groups' in pkginfo: + for group in pkginfo['groups']: + cur.execute("SELECT ID FROM Groups WHERE Name = %s", + [group]) + if cur.rowcount == 1: + groupid = cur.fetchone()[0] + else: + cur.execute("INSERT INTO Groups (Name) VALUES (%s)", + [group]) + db.commit() + groupid = cur.lastrowid + cur.execute("INSERT INTO PackageGroups (PackageID, " + "GroupID) VALUES (%s, %s)", [pkgid, groupid]) + + # Add user to notification list on adoption. + if was_orphan: + cur.execute("INSERT INTO CommentNotify (PackageBaseID, UserID) " + + "VALUES (%s, %s)", [pkgbase_id, user_id]) + + db.commit() + +def die(msg): + sys.stderr.write("error: %s\n" % (msg)) + exit(1) + +def die_commit(msg, commit): + sys.stderr.write("error: The following error " + + "occurred when parsing commit\n") + sys.stderr.write("error: %s:\n" % (commit)) + sys.stderr.write("error: %s\n" % (msg)) + exit(1) + +if len(sys.argv) != 4: + die("invalid arguments") + +refname = sys.argv[1] +sha1_old = sys.argv[2] +sha1_new = sys.argv[3] + +user = os.environ.get("AUR_USER") +pkgbase = os.environ.get("AUR_PKGBASE") +git_dir = os.environ.get("AUR_GIT_DIR") + +if refname != "refs/heads/master": + die("pushing to a branch other than master is restricted") + +repo = pygit2.Repository(git_dir) +walker = repo.walk(sha1_new, pygit2.GIT_SORT_TOPOLOGICAL) +if sha1_old != "0000000000000000000000000000000000000000": + walker.hide(sha1_old) + +db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, + passwd=aur_db_pass, db=aur_db_name, + unix_socket=aur_db_socket, buffered=True) +cur = db.cursor() + +cur.execute("SELECT Name FROM PackageBlacklist") +blacklist = [row[0] for row in cur.fetchall()] + +for commit in walker: + if not '.SRCINFO' in commit.tree: + die_commit("missing .SRCINFO", commit.id) + + for treeobj in commit.tree: + if repo[treeobj.id].size > 100000: + die_commit("maximum blob size (100kB) exceeded", commit.id) + + srcinfo_raw = repo[commit.tree['.SRCINFO'].id].data.decode() + srcinfo_raw = srcinfo_raw.split('\n') + ecatcher = aurinfo.CollectionECatcher() + srcinfo = aurinfo.ParseAurinfoFromIterable(srcinfo_raw, ecatcher) + errors = ecatcher.Errors() + if errors: + sys.stderr.write("error: The following errors occurred " + "when parsing .SRCINFO in commit\n") + sys.stderr.write("error: %s:\n" % (commit.id)) + for error in errors: + sys.stderr.write("error: line %d: %s\n" % error) + exit(1) + + srcinfo_pkgbase = srcinfo._pkgbase['pkgname'] + if srcinfo_pkgbase != pkgbase: + die_commit('invalid pkgbase: %s' % (srcinfo_pkgbase), commit.id) + + for pkgname in srcinfo.GetPackageNames(): + pkginfo = srcinfo.GetMergedPackage(pkgname) + + if 'epoch' in pkginfo and not pkginfo['epoch'].isdigit(): + die_commit('invalid epoch: %s' % (pkginfo['epoch']), commit.id) + + if not re.match(r'[a-z0-9][a-z0-9\.+_-]*$', pkginfo['pkgname']): + die_commit('invalid package name: %s' % (pkginfo['pkgname']), + commit.id) + + if pkginfo['pkgname'] in blacklist: + die_commit('package is blacklisted: %s' % (pkginfo['pkgname']), + commit.id) + + if not re.match(r'(?:http|ftp)s?://.*', pkginfo['url']): + die_commit('invalid URL: %s' % (pkginfo['url']), commit.id) + + for field in ('pkgname', 'pkgdesc', 'url'): + if len(pkginfo[field]) > 255: + die_commit('%s field too long: %s' % (field, pkginfo[field]), + commit.id) + +srcinfo_raw = repo[repo[sha1_new].tree['.SRCINFO'].id].data.decode() +srcinfo_raw = srcinfo_raw.split('\n') +srcinfo = aurinfo.ParseAurinfoFromIterable(srcinfo_raw) + +save_srcinfo(srcinfo, db, cur, user) + +db.close() + +pkglist = list(srcinfo.GetPackageNames()) +if len(pkglist) > 0: + with open(git_dir + '/description', 'w') as f: + pkginfo = srcinfo.GetMergedPackage(pkglist[0]) + f.write(pkginfo['pkgdesc']) diff --git a/scripts/git-integration/init-repos.py b/scripts/git-integration/init-repos.py new file mode 100755 index 00000000..62c51b1b --- /dev/null +++ b/scripts/git-integration/init-repos.py @@ -0,0 +1,51 @@ +#!/usr/bin/python3 + +import configparser +import mysql.connector +import os +import pygit2 +import re +import shlex +import sys + +config = configparser.RawConfigParser() +config.read(os.path.dirname(os.path.realpath(__file__)) + "/../../conf/config") + +aur_db_host = config.get('database', 'host') +aur_db_name = config.get('database', 'name') +aur_db_user = config.get('database', 'user') +aur_db_pass = config.get('database', 'password') +aur_db_socket = config.get('database', 'socket') + +repo_base_path = config.get('serve', 'repo-base') +repo_regex = config.get('serve', 'repo-regex') +git_update_hook = config.get('serve', 'git-update-hook') + +def die(msg): + sys.stderr.write("%s\n" % (msg)) + exit(1) + +db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, + passwd=aur_db_pass, db=aur_db_name, + unix_socket=aur_db_socket) +cur = db.cursor() + +cur.execute("SELECT Name FROM PackageBases") +repos = [row[0] for row in cur] +db.close() + +for repo in repos: + if not re.match(repo_regex, repo): + die('invalid repository name: %s' % (repo)) + +i = 1 +n = len(repos) + +for repo in repos: + print("[%s/%d] %s" % (str(i).rjust(len(str(n))), n, repo)) + + repo_path = repo_base_path + '/' + repo + '.git/' + pygit2.init_repository(repo_path, True) + os.symlink(git_update_hook, repo_path + 'hooks/update') + + i += 1 diff --git a/scripts/git-integration/sshd_config b/scripts/git-integration/sshd_config new file mode 100644 index 00000000..fbe35789 --- /dev/null +++ b/scripts/git-integration/sshd_config @@ -0,0 +1,6 @@ +Port 2222 +HostKey ~/.ssh/ssh_host_rsa_key +PasswordAuthentication no +UsePrivilegeSeparation no +AuthorizedKeysCommand /srv/http/aur/scripts/git-integration/git-auth.py +AuthorizedKeysCommandUser aur diff --git a/scripts/uploadbuckets.sh b/scripts/uploadbuckets.sh deleted file mode 100755 index 32526926..00000000 --- a/scripts/uploadbuckets.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash - -DRYRUN=${DRYRUN:-1} - -source="$1" -dest="$2" - -if [[ -z $source || -z $dest ]]; then - echo 'usage: uploadbuckets.sh <source> <dest>' - echo 'Script runs in DRYRUN mode by default.' - echo 'To run for real, set DRYRUN=0 in your environment.' - exit 1 -fi - -if [[ ! -d $source ]]; then - echo 'error: source is not a directory' - exit 1 -fi - -if [[ -e $dest && ! -d $dest ]]; then - echo 'error: dest is not a directory' - exit 1 -fi - -if [[ $(readlink -e $dest) = $(readlink -e $source) ]]; then - echo 'error: source and dest cannot be the same. Rotate the result' - echo 'into place once the migration is complete.' - exit 1 -fi - -if [[ ! -d $dest ]]; then - mkdir $dest -fi - -shopt -s nullglob - -for package in "$source"/*; do - pkgname="${package##*/}" - newfolder="$dest/${pkgname:0:2}" - if [[ ! -d "$newfolder" ]]; then - if [[ $DRYRUN -gt 0 ]]; then - echo mkdir -p "$newfolder" - else - mkdir -p "$newfolder" - fi - fi - if [[ $DRYRUN -gt 0 ]]; then - echo mv "$source/$pkgname" "$newfolder/$pkgname" - else - mv "$source/$pkgname" "$newfolder/$pkgname" - fi -done - -if [[ $DRYRUN -gt 0 ]]; then - echo - echo 'DRYRUN mode was enabled.' - echo 'To run for real, set DRYRUN=0 in your environment.' -fi |