summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
authorLukas Fleischer <archlinux@cryptocrack.de>2014-06-05 17:19:19 +0200
committerLukas Fleischer <archlinux@cryptocrack.de>2014-12-27 12:42:12 +0100
commit943b6bc9769a03e967f08d8b46bd975e2e8d2a20 (patch)
tree08c5a373cd26102a5a130754dbc14383c7f42f17 /scripts
parentad17b9e2b4bebcf744129ed5a1a2c6e544d42739 (diff)
downloadaurweb-943b6bc9769a03e967f08d8b46bd975e2e8d2a20.tar.xz
Add update hook template
This adds a script that can be used as an update hook to check all commits for validity and to regenerate the package details page before updating a named ref. Signed-off-by: Lukas Fleischer <archlinux@cryptocrack.de>
Diffstat (limited to 'scripts')
-rw-r--r--scripts/git-integration/aurinfo.py204
-rwxr-xr-xscripts/git-integration/git-serve.py2
-rwxr-xr-xscripts/git-integration/git-update.py212
3 files changed, 418 insertions, 0 deletions
diff --git a/scripts/git-integration/aurinfo.py b/scripts/git-integration/aurinfo.py
new file mode 100644
index 0000000..d9b9372
--- /dev/null
+++ b/scripts/git-integration/aurinfo.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python
+
+from copy import copy, deepcopy
+import pprint
+import sys
+
+class Attr(object):
+ def __init__(self, name, is_multivalued=False, allow_arch_extensions=False):
+ self.name = name
+ self.is_multivalued = is_multivalued
+ self.allow_arch_extensions = allow_arch_extensions
+
+PKGBUILD_ATTRIBUTES = {
+ 'arch': Attr('arch', True),
+ 'backup': Attr('backup', True),
+ 'changelog': Attr('changelog', False),
+ 'checkdepends': Attr('checkdepends', True),
+ 'conflicts': Attr('conflicts', True, True),
+ 'depends': Attr('depends', True, True),
+ 'epoch': Attr('epoch', False),
+ 'groups': Attr('groups', True),
+ 'install': Attr('install', False),
+ 'license': Attr('license', True),
+ 'makedepends': Attr('makedepends', True, True),
+ 'md5sums': Attr('md5sums', True, True),
+ 'noextract': Attr('noextract', True),
+ 'optdepends': Attr('optdepends', True, True),
+ 'options': Attr('options', True),
+ 'pkgname': Attr('pkgname', False),
+ 'pkgrel': Attr('pkgrel', False),
+ 'pkgver': Attr('pkgver', False),
+ 'provides': Attr('provides', True, True),
+ 'replaces': Attr('replaces', True, True),
+ 'sha1sums': Attr('sha1sums', True, True),
+ 'sha224sums': Attr('sha224sums', True, True),
+ 'sha256sums': Attr('sha256sums', True, True),
+ 'sha384sums': Attr('sha384sums', True, True),
+ 'sha512sums': Attr('sha512sums', True, True),
+ 'source': Attr('source', True, True),
+ 'url': Attr('url', False),
+ 'validpgpkeys': Attr('validpgpkeys', True),
+}
+
+def find_attr(attrname):
+ # exact match
+ attr = PKGBUILD_ATTRIBUTES.get(attrname, None)
+ if attr:
+ return attr
+
+ # prefix match
+ # XXX: this could break in the future if PKGBUILD(5) ever
+ # introduces a key which is a subset of another.
+ for k in PKGBUILD_ATTRIBUTES.keys():
+ if attrname.startswith(k + '_'):
+ return PKGBUILD_ATTRIBUTES[k]
+
+def IsMultiValued(attrname):
+ attr = find_attr(attrname)
+ return attr and attr.is_multivalued
+
+class AurInfo(object):
+ def __init__(self):
+ self._pkgbase = {}
+ self._packages = {}
+
+ def GetPackageNames(self):
+ return self._packages.keys()
+
+ def GetMergedPackage(self, pkgname):
+ package = deepcopy(self._pkgbase)
+ package['pkgname'] = pkgname
+ for k, v in self._packages.get(pkgname).items():
+ package[k] = deepcopy(v)
+ return package
+
+ def AddPackage(self, pkgname):
+ self._packages[pkgname] = {}
+ return self._packages[pkgname]
+
+ def SetPkgbase(self, pkgbasename):
+ self._pkgbase = {'pkgname' : pkgbasename}
+ return self._pkgbase
+
+
+class StderrECatcher(object):
+ def Catch(self, lineno, error):
+ print('ERROR[%d]: %s' % (lineno, error), file=sys.stderr)
+
+
+class CollectionECatcher(object):
+ def __init__(self):
+ self._errors = []
+
+ def Catch(self, lineno, error):
+ self._errors.append((lineno, error))
+
+ def HasErrors(self):
+ return len(self._errors) > 0
+
+ def Errors(self):
+ return copy(self._errors)
+
+
+def ParseAurinfoFromIterable(iterable, ecatcher=None):
+ aurinfo = AurInfo()
+
+ if ecatcher is None:
+ ecatcher = StderrECatcher()
+
+ current_package = None
+ lineno = 0
+
+ for line in iterable:
+ lineno += 1
+
+ if line.startswith('#'):
+ continue
+
+ if not line.strip():
+ # end of package
+ current_package = None
+ continue
+
+ if not line.startswith('\t'):
+ # start of new package
+ try:
+ key, value = map(str.strip, line.split('=', 1))
+ except ValueError:
+ ecatcher.Catch(lineno, 'unexpected header format in section=%s' %
+ current_package['pkgname'])
+ continue
+
+ if key == 'pkgbase':
+ current_package = aurinfo.SetPkgbase(value)
+ else:
+ current_package = aurinfo.AddPackage(value)
+ else:
+ # package attribute
+ if current_package is None:
+ ecatcher.Catch(lineno, 'package attribute found outside of '
+ 'a package section')
+ continue
+
+ try:
+ key, value = map(str.strip, line.split('=', 1))
+ except ValueError:
+ ecatcher.Catch(lineno, 'unexpected attribute format in '
+ 'section=%s' % current_package['pkgname'])
+
+ if IsMultiValued(key):
+ if not current_package.get(key):
+ current_package[key] = []
+ if value:
+ current_package[key].append(value)
+ else:
+ if not current_package.get(key):
+ current_package[key] = value
+ else:
+ ecatcher.Catch(lineno, 'overwriting attribute '
+ '%s: %s -> %s' % (key, current_package[key],
+ value))
+
+ return aurinfo
+
+
+def ParseAurinfo(filename='.AURINFO', ecatcher=None):
+ with open(filename) as f:
+ return ParseAurinfoFromIterable(f, ecatcher)
+
+
+def ValidateAurinfo(filename='.AURINFO'):
+ ecatcher = CollectionECatcher()
+ ParseAurinfo(filename, ecatcher)
+ errors = ecatcher.Errors()
+ for error in errors:
+ print('error on line %d: %s' % error, file=sys.stderr)
+ return not errors
+
+
+if __name__ == '__main__':
+ pp = pprint.PrettyPrinter(indent=4)
+
+ if len(sys.argv) == 1:
+ print('error: not enough arguments')
+ sys.exit(1)
+ elif len(sys.argv) == 2:
+ action = sys.argv[1]
+ filename = '.AURINFO'
+ else:
+ action, filename = sys.argv[1:3]
+
+ if action == 'parse':
+ aurinfo = ParseAurinfo()
+ for pkgname in aurinfo.GetPackageNames():
+ print(">>> merged package: %s" % pkgname)
+ pp.pprint(aurinfo.GetMergedPackage(pkgname))
+ print()
+ elif action == 'validate':
+ sys.exit(not ValidateAurinfo(filename))
+ else:
+ print('unknown action: %s' % action)
+ sys.exit(1)
+
+# vim: set et ts=4 sw=4:
diff --git a/scripts/git-integration/git-serve.py b/scripts/git-integration/git-serve.py
index 08132f2..e621677 100755
--- a/scripts/git-integration/git-serve.py
+++ b/scripts/git-integration/git-serve.py
@@ -18,6 +18,7 @@ aur_db_pass = config.get('database', 'password')
repo_base_path = config.get('serve', 'repo-base')
repo_regex = config.get('serve', 'repo-regex')
+git_update_hook = config.get('serve', 'git-update-hook')
git_shell_cmd = config.get('serve', 'git-shell-cmd')
def repo_path_validate(path):
@@ -60,6 +61,7 @@ def setup_repo(repo, user):
repo_path = repo_base_path + '/' + repo + '.git/'
pygit2.init_repository(repo_path, True)
+ os.symlink(git_update_hook, repo_path + 'hooks/update')
def check_permissions(pkgbase, user):
db = mysql.connector.connect(host=aur_db_host, user=aur_db_user,
diff --git a/scripts/git-integration/git-update.py b/scripts/git-integration/git-update.py
new file mode 100755
index 0000000..c20eede
--- /dev/null
+++ b/scripts/git-integration/git-update.py
@@ -0,0 +1,212 @@
+#!/usr/bin/python3
+
+from copy import copy, deepcopy
+import configparser
+import mysql.connector
+import os
+import pygit2
+import re
+import sys
+
+import aurinfo
+
+config = configparser.RawConfigParser()
+config.read(os.path.dirname(os.path.realpath(__file__)) + "/../../conf/config")
+
+aur_db_host = config.get('database', 'host')
+aur_db_name = config.get('database', 'name')
+aur_db_user = config.get('database', 'user')
+aur_db_pass = config.get('database', 'password')
+
+def save_srcinfo(srcinfo, db, cur, user):
+ # Obtain package base ID and previous maintainer.
+ pkgbase = srcinfo._pkgbase['pkgname']
+ cur.execute("SELECT ID, MaintainerUID FROM PackageBases "
+ "WHERE Name = %s", [pkgbase])
+ (pkgbase_id, maintainer_uid) = cur.fetchone()
+ was_orphan = not maintainer_uid
+
+ # Obtain the user ID of the new maintainer.
+ cur.execute("SELECT ID FROM Users WHERE Username = %s", [user])
+ user_id = int(cur.fetchone()[0])
+
+ # Update package base details and delete current packages.
+ cur.execute("UPDATE PackageBases SET ModifiedTS = UNIX_TIMESTAMP(), " +
+ "MaintainerUID = %s, PackagerUID = %s, " +
+ "OutOfDateTS = NULL WHERE ID = %s",
+ [user_id, user_id, pkgbase_id])
+ cur.execute("DELETE FROM Packages WHERE PackageBaseID = %s",
+ [pkgbase_id])
+
+ for pkgname in srcinfo.GetPackageNames():
+ pkginfo = srcinfo.GetMergedPackage(pkgname)
+
+ if 'epoch' in pkginfo and pkginfo['epoch'] > 0:
+ ver = '%d:%s-%s' % (pkginfo['epoch'], pkginfo['pkgver'],
+ pkginfo['pkgrel'])
+ else:
+ ver = '%s-%s' % (pkginfo['pkgver'], pkginfo['pkgrel'])
+
+ # Create a new package.
+ cur.execute("INSERT INTO Packages (PackageBaseID, Name, " +
+ "Version, Description, URL) " +
+ "VALUES (%s, %s, %s, %s, %s)",
+ [pkgbase_id, pkginfo['pkgname'], ver,
+ pkginfo['pkgdesc'], pkginfo['url']])
+ db.commit()
+ pkgid = cur.lastrowid
+
+ # Add package sources.
+ for source in pkginfo['source']:
+ cur.execute("INSERT INTO PackageSources (PackageID, Source) " +
+ "VALUES (%s, %s)", [pkgid, source])
+
+ # Add package dependencies.
+ for deptype in ('depends', 'makedepends',
+ 'checkdepends', 'optdepends'):
+ if not deptype in pkginfo:
+ continue
+ cur.execute("SELECT ID FROM DependencyTypes WHERE Name = %s",
+ [deptype])
+ deptypeid = cur.fetchone()[0]
+ for dep in pkginfo[deptype]:
+ depname = re.sub(r'(<|=|>).*', '', dep)
+ depcond = dep[len(depname):]
+ cur.execute("INSERT INTO PackageDepends (PackageID, " +
+ "DepTypeID, DepName, DepCondition) " +
+ "VALUES (%s, %s, %s, %s)", [pkgid, deptypeid,
+ depname, depcond])
+
+ # Add package relations (conflicts, provides, replaces).
+ for reltype in ('conflicts', 'provides', 'replaces'):
+ if not reltype in pkginfo:
+ continue
+ cur.execute("SELECT ID FROM RelationTypes WHERE Name = %s",
+ [reltype])
+ reltypeid = cur.fetchone()[0]
+ for rel in pkginfo[reltype]:
+ relname = re.sub(r'(<|=|>).*', '', rel)
+ relcond = rel[len(relname):]
+ cur.execute("INSERT INTO PackageRelations (PackageID, " +
+ "RelTypeID, RelName, RelCondition) " +
+ "VALUES (%s, %s, %s, %s)", [pkgid, reltypeid,
+ relname, relcond])
+
+ # Add package licenses.
+ if 'license' in pkginfo:
+ for license in pkginfo['license']:
+ cur.execute("SELECT ID FROM Licenses WHERE Name = %s",
+ [license])
+ if cur.rowcount == 1:
+ licenseid = cur.fetchone()[0]
+ else:
+ cur.execute("INSERT INTO Licenses (Name) VALUES (%s)",
+ [license])
+ db.commit()
+ licenseid = cur.lastrowid
+ cur.execute("INSERT INTO PackageLicenses (PackageID, " +
+ "LicenseID) VALUES (%s, %s)",
+ [pkgid, licenseid])
+
+ # Add package groups.
+ if 'groups' in pkginfo:
+ for group in pkginfo['groups']:
+ cur.execute("SELECT ID FROM Groups WHERE Name = %s",
+ [group])
+ if cur.rowcount == 1:
+ groupid = cur.fetchone()[0]
+ else:
+ cur.execute("INSERT INTO Groups (Name) VALUES (%s)",
+ [group])
+ db.commit()
+ groupid = cur.lastrowid
+ cur.execute("INSERT INTO PackageGroups (PackageID, "
+ "GroupID) VALUES (%s, %s)", [pkgid, groupid])
+
+ # Add user to notification list on adoption.
+ if was_orphan:
+ cur.execute("INSERT INTO CommentNotify (PackageBaseID, UserID) " +
+ "VALUES (%s, %s)", [pkgbase_id, user_id])
+
+ db.commit()
+
+def die(msg):
+ sys.stderr.write("error: %s\n" % (msg))
+ exit(1)
+
+def die_commit(msg, commit):
+ sys.stderr.write("error: The following error " +
+ "occurred when parsing commit\n")
+ sys.stderr.write("error: %s:\n" % (commit))
+ sys.stderr.write("error: %s\n" % (msg))
+ exit(1)
+
+if len(sys.argv) != 4:
+ die("invalid arguments")
+
+refname = sys.argv[1]
+sha1_old = sys.argv[2]
+sha1_new = sys.argv[3]
+
+user = os.environ.get("AUR_USER")
+pkgbase = os.environ.get("AUR_PKGBASE")
+git_dir = os.environ.get("AUR_GIT_DIR")
+
+if refname != "refs/heads/master":
+ die("pushing to a branch other than master is restricted")
+
+repo = pygit2.Repository(git_dir)
+walker = repo.walk(sha1_new, pygit2.GIT_SORT_TOPOLOGICAL)
+if sha1_old != "0000000000000000000000000000000000000000":
+ walker.hide(sha1_old)
+
+for commit in walker:
+ if not '.SRCINFO' in commit.tree:
+ die_commit("missing .SRCINFO", commit.id)
+
+ for treeobj in commit.tree:
+ if repo[treeobj.id].size > 100000:
+ die_commit("maximum blob size (100kB) exceeded", commit.id)
+
+ srcinfo_raw = repo[commit.tree['.SRCINFO'].id].data.decode()
+ srcinfo_raw = srcinfo_raw.split('\n')
+ ecatcher = aurinfo.CollectionECatcher()
+ srcinfo = aurinfo.ParseAurinfoFromIterable(srcinfo_raw, ecatcher)
+ errors = ecatcher.Errors()
+ if errors:
+ sys.stderr.write("error: The following errors occurred "
+ "when parsing .SRCINFO in commit\n")
+ sys.stderr.write("error: %s:\n" % (commit.id))
+ for error in errors:
+ sys.stderr.write("error: line %d: %s\n" % error)
+ exit(1)
+
+ srcinfo_pkgbase = srcinfo._pkgbase['pkgname']
+ if srcinfo_pkgbase != pkgbase:
+ die_commit('invalid pkgbase: %s' % (srcinfo_pkgbase), commit.id)
+
+ for pkgname in srcinfo.GetPackageNames():
+ pkginfo = srcinfo.GetMergedPackage(pkgname)
+
+ if not re.match(r'[a-z0-9][a-z0-9\.+_-]*$', pkginfo['pkgname']):
+ die_commit('invalid package name: %s' % (pkginfo['pkgname']),
+ commit.id)
+
+ if not re.match(r'(?:http|ftp)s?://.*', pkginfo['url']):
+ die_commit('invalid URL: %s' % (pkginfo['url']), commit.id)
+
+ for field in ('pkgname', 'pkgdesc', 'url'):
+ if len(pkginfo[field]) > 255:
+ die_commit('%s field too long: %s' % (field, pkginfo[field]),
+ commit.id)
+
+srcinfo_raw = repo[repo[sha1_new].tree['.SRCINFO'].id].data.decode()
+srcinfo_raw = srcinfo_raw.split('\n')
+srcinfo = aurinfo.ParseAurinfoFromIterable(srcinfo_raw)
+
+db = mysql.connector.connect(host=aur_db_host, user=aur_db_user,
+ passwd=aur_db_pass, db=aur_db_name,
+ buffered=True)
+cur = db.cursor()
+save_srcinfo(srcinfo, db, cur, user)
+db.close()