#!/usr/bin/python -O

import re, os, sys, pacman, getopt
import MySQLdb, MySQLdb.connections
import ConfigParser

###########################################################
# Deal with configuration
###########################################################

conffile = '/home/aur/tupkgs.conf'

if not os.path.isfile(conffile):
  print "Error: cannot access config file ("+conffile+")"
  usage(argv[0])
  sys.exit(1)

config = ConfigParser.ConfigParser()
config.read(conffile)

############################################################

# Define some classes we need
class Version:
  def __init__(self):
    self.version = None
    self.file = None

class Package:
  def __init__(self):
    self.name = None
    self.category = None
    self.old = None
    self.new = None
    self.desc = None
    self.url = None
    self.depends = None
    self.sources = None

class PackageDatabase:
  def __init__(self, host, user, password, dbname):
    self.host = host
    self.user = user
    self.password = password
    self.dbname = dbname
    self.connection = MySQLdb.connect(host=host, user=user, passwd=password, db=dbname)
  def cursor(self):
		try: 
			self.connection.ping()
		except MySQLdb.OperationalError:
			self.connection = MySQLdb.connect(host=self.host, user=self.user, passwd=self.password, db=self.dbname)
		return self.connection.cursor()
  def lookup(self, packagename):
    warning("DB: Looking up package: " + packagename)
    q = self.cursor()
    q.execute("SELECT ID FROM Packages WHERE Name = '" + 
               MySQLdb.escape_string(packagename) + "'")
    if (q.rowcount != 0):
      row = q.fetchone()
      return row[0]
    return None
  def getCategoryID(self, package):
    category_id = self.lookupCategory(package.category)
    if (category_id == None):
      category_id = 1
    warning("DB: Got category ID '" + str(category_id) + "' for package '" + package.name + "'")
    return category_id
  def insert(self, package, locationId):
    warning("DB: Inserting package: " + package.name)
    global repo_dir
    q = self.cursor()
    q.execute("INSERT INTO Packages " +
      "(Name, CategoryID, Version, FSPath, LocationID, SubmittedTS, Description, URL) VALUES ('" +
      MySQLdb.escape_string(package.name) + "', " + 
      str(self.getCategoryID(package)) + ", '" +
      MySQLdb.escape_string(package.new.version) + "', '" +
      MySQLdb.escape_string(
        os.path.join(repo_dir, os.path.basename(package.new.file))) + "', " +
      str(locationId) + ", " +
      "UNIX_TIMESTAMP(), '" +
      MySQLdb.escape_string(str(package.desc)) + "', '" +
      MySQLdb.escape_string(str(package.url)) + "')")
    id = self.lookup(package.name)
    self.insertNewInfo(package, id, locationId)    
  def update(self, id, package, locationId):
    warning("DB: Updating package: " + package.name + " with id " + str(id))
    global repo_dir
    q = self.cursor()
    if (self.isdummy(package.name)):
      q.execute("UPDATE Packages SET " +
        "Version = '" + MySQLdb.escape_string(package.new.version) + "', " +
        "CategoryID = " + str(self.getCategoryID(package)) + ", " +
        "FSPath = '" + MySQLdb.escape_string(
          os.path.join(repo_dir, os.path.basename(package.new.file))) + "', " +
        "Description = '" + MySQLdb.escape_string(str(package.desc)) + "', " +
        "DummyPkg = 0, " +
	"SubmittedTS = UNIX_TIMESTAMP(), " +
        "URL = '" + MySQLdb.escape_string(str(package.url)) + "' " +
        "WHERE ID = " + str(id))
    else:
      q.execute("UPDATE Packages SET " +
        "Version = '" + MySQLdb.escape_string(package.new.version) + "', " +
        "CategoryID = " + str(self.getCategoryID(package)) + ", " +
        "FSPath = '" + MySQLdb.escape_string(
          os.path.join(repo_dir, os.path.basename(package.new.file))) + "', " +
        "Description = '" + MySQLdb.escape_string(str(package.desc)) + "', " +
        "ModifiedTS = UNIX_TIMESTAMP(), " +
        "URL = '" + MySQLdb.escape_string(str(package.url)) + "' " +
        "WHERE ID = " + str(id))
    self.insertNewInfo(package, id, locationId)
    # we must lastly check to see if this is a move of a package from
    # unsupported to community, because we'd have to reset maintainer and location
    q = self.cursor()
    q.execute("SELECT LocationID FROM Packages WHERE ID = " + str(id))
    if (q.rowcount != 0):
      row = q.fetchone()
      if (row[0] != 3):
        q = self.cursor()
	q.execute("UPDATE Packages SET LocationID = 3, MaintainerUID = null WHERE ID = " + str(id))
  def remove(self, id, locationId):
    warning("DB: Removing package with id: " + str(id))
    q = self.cursor()
    q.execute("DELETE FROM Packages WHERE " +
      "LocationID = " + str(locationId) + " AND ID = " + str(id))
  def clearOldInfo(self, id):
    warning("DB: Clearing old info for package with id : " + str(id))
    q = self.cursor()
    q.execute("DELETE FROM PackageContents WHERE PackageID = " + str(id))
    q.execute("DELETE FROM PackageDepends WHERE PackageID = " + str(id))
    q.execute("DELETE FROM PackageSources WHERE PackageID = " + str(id))
  def lookupOrDummy(self, packagename):
    retval = self.lookup(packagename)
    if (retval != None):
      return retval
    return self.createDummy(packagename)
  def lookupCategory(self, categoryname):
    warning("DB: Looking up category: " + categoryname)
    q = self.cursor()
    q.execute("SELECT ID from PackageCategories WHERE Category = '" + MySQLdb.escape_string(categoryname) + "'")
    if (q.rowcount != 0):
      row = q.fetchone()
      return row[0]
    return None
  def createDummy(self, packagename):
    warning("DB: Creating dummy package for: " + packagename)
    q = self.cursor()
    q.execute("INSERT INTO Packages " +
      "(Name, Description, LocationID, DummyPkg) " +
      "VALUES ('" +
      MySQLdb.escape_string(packagename) + "', '" +
      MySQLdb.escape_string("A dummy package") + "', 1, 1)")
    return self.lookup(packagename)
  def insertNewInfo(self, package, id, locationId):
    q = self.cursor()

    # first delete the old; this is never bad
    self.clearOldInfo(id)

    warning("DB: Inserting new package info for " + package.name +
            " with id " + str(id))

    # PackageSources
    for source in package.sources:
      q.execute("INSERT INTO PackageSources (PackageID, Source) " + 
        "VALUES (" + str(id) + ", '" + MySQLdb.escape_string(source) + "')")
    # PackageDepends
    for dep in package.depends:
      depid = self.lookupOrDummy(dep)
      q.execute("INSERT INTO PackageDepends (PackageID, DepPkgID) " +
        "VALUES (" + str(id) + ", " + str(depid) + ")")
  def isdummy(self, packagename):
    warning("DB: Looking up package: " + packagename)
    q = self.cursor()
    q.execute("SELECT * FROM Packages WHERE Name = '" +
      MySQLdb.escape_string(packagename) + "' AND DummyPkg = 1")
    if (q.rowcount != 0):
      return True
    return False

############################################################
# Functions for walking the file trees
############################################################

def filesForRegexp(topdir, regexp):
  retval = []
  def matchfile(regexp, dirpath, namelist):
    for name in namelist:
      if (regexp.match(name)):
        retval.append(os.path.join(dirpath, name))
  os.path.walk(topdir, matchfile, regexp)
  return retval

def packagesInTree(topdir):
  return filesForRegexp(topdir, re.compile("^.*\.pkg\.tar\.gz$"))

def pkgbuildsInTree(topdir):
  return filesForRegexp(topdir, re.compile("^PKGBUILD$"))

############################################################
# Function for testing if two files are identical 
############################################################

def areFilesIdentical(file_a, file_b):
  command = "cmp '" + file_a + "' '" + file_b + "' >/dev/null"
  retval = os.system(command)
  if (retval == 0):
    return True
  return False

############################################################
# Function for fetching info from PKGBUILDs and packages
############################################################

def infoFromPackageFile(filename):
	pkg = os.path.basename(filename)
	m = re.compile("(?P<pkgname>.*)-(?P<pkgver>.*)-(?P<pkgrel>.*).pkg.tar.gz").search(pkg)
	if not m:
		raise Exception("Non-standard filename")
	else:
		return m.group('pkgname'), m.group('pkgver') + "-" + m.group('pkgrel')

def infoFromPkgbuildFile(filename):
  # first grab the category based on the file path
  pkgdirectory = os.path.dirname(filename)
  catdirectory = os.path.dirname(pkgdirectory)
  m = re.match(r".*/([^/]+)$", catdirectory)
  if (m):
    category = m.group(1)
  else:
    category = "none"

  # open and source the file
  pf_stdin, pf_stdout = os.popen2("/bin/bash", 't', 0)
  print >>pf_stdin, ". " + filename
  #print "PKGBUILD: " + filename

  # get pkgname
  print >>pf_stdin, 'echo $pkgname'
  pkgname = pf_stdout.readline().strip()
  #print "PKGBUILD: pkgname: " + pkgname

  # get pkgver
  print >>pf_stdin, 'echo $pkgver'
  pkgver = pf_stdout.readline().strip()
  #print "PKGBUILD: pkgver: " + pkgver

  # get pkgrel
  print >>pf_stdin, 'echo $pkgrel'
  pkgrel = pf_stdout.readline().strip()
  #print "PKGBUILD: pkgrel: " + pkgrel

  # get url
  print >>pf_stdin, 'echo $url'
  url = pf_stdout.readline().strip()
  #print "PKGBUILD: url: " + url

  # get desc
  print >>pf_stdin, 'echo $pkgdesc'
  pkgdesc = pf_stdout.readline().strip()
  #print "PKGBUILD: pkgdesc: " + pkgdesc

  # get source array
  print >>pf_stdin, 'echo ${source[*]}'
  source = (pf_stdout.readline().strip()).split(" ")

  # get depends array
  print >>pf_stdin, 'echo ${depends[*]}'
  depends = (pf_stdout.readline().strip()).split(" ")

  # clean up
  pf_stdin.close()
  pf_stdout.close()

  return pkgname, pkgver + "-" + pkgrel, pkgdesc, url, depends, source, category

def infoFromPkgbuildFileWorse(filename):
  # load the file with pacman library
  pkg = pacman.load(filename)
  return (pkg.name, pkg.version + "-" + pkg.release, pkg.desc,
    pkg.url, pkg.depends, pkg.source)
  
############################################################
# Functions for doing the final steps of execution
############################################################

def execute(command):
  global switches
  print(command)
  if not (switches.get("-n") == True):
    return os.system(command)
  return 0

def copyFileToRepo(filename, repodir):
  destfile = os.path.join(repodir, os.path.basename(filename))
  command = "cp --preserve=timestamps '" + filename + "' '" + destfile + "'"
  return execute(command)

def deleteFile(filename):
  command = "rm '" + filename + "'"
  return execute(command)

def runGensync(repo, pkgbuild):
#target = os.path.join(repo, os.path.basename(repo) + ".db.tar.gz")
  target = os.path.join(repo, "community.db.tar.gz")
  command = "gensync '" + pkgbuild + "' '" + target + "'"
  return execute(command)

def runUpdatesyncUpd(repo, pkgbuild):
	global havefakeroot
	targetDB = os.path.join(repo, "community.db.tar.gz")
	if havefakeroot:
		command = "fakeroot updatesync upd '" + targetDB + "' '" + pkgbuild + "' '" + repo +"'"
	else:
		command = "updatesync upd '" + targetDB + "' '" + pkgbuild + "' '" + repo +"'"
	return execute(command)

def runUpdatesyncDel(repo, pkgname):
	global havefakeroot
	targetDB = os.path.join(repo, "community.db.tar.gz")
	if havefakeroot:
		command = "fakeroot updatesync del '" + targetDB + "' '" + pkgname +"'"
	else:
		command = "updatesync del '" + targetDB + "' '" + pkgname +"'"
	return execute(command)

############################################################
# Functions for error handling
############################################################

def warning(string):
  print >>sys.stderr, string

had_error = 0
def error(string):
  global had_error
  warning(string)
  had_error = 1

############################################################
# MAIN
############################################################

# ARGUMENTS
#
# tupkgupdate [-n] [--delete] [--paranoid] <repo_dir> <pkgbuild_dir> <build_dir>

# First call getopt
switch_list,args_proper = getopt.getopt(sys.argv[1:], 'n',
                          [ "delete", "paranoid" ])
switches = {}
for switch in switch_list:
  switches[switch[0]] = 1

# Then handle the remaining arguments
if (len(args_proper) < 3):
  print >>sys.stderr, "syntax: tupkgupdate [-n] [--delete] [--paranoid] <repo_dir> <pkgbuild_tree> <build_tree>"
  sys.exit(-1)

# Make sure we can use fakeroot, warn if not
havefakeroot = False
if os.access('/usr/bin/fakeroot', os.X_OK):
	havefakeroot = True
else:
	warning("Not using fakeroot for repo db generation")

repo_dir, pkgbuild_dir, build_dir = args_proper

# Open the database so we find out now if we can't!
db = PackageDatabase(config.get('mysql', 'host'),
  config.get('mysql', 'username'),
  config.get('mysql', 'password'),
  config.get('mysql', 'db'))

# Set up the lists and tables
packages = dict()
copy = list()
delete = list()

dbremove = list()
dbmodify = list()

# PASS 1: PARSING/LOCATING
#  
# A) Go through the PKGBUILD tree
#    For each PKGBUILD, create a Package with new Version containing
#    parsed version and and None for file

a_files = pkgbuildsInTree(pkgbuild_dir)
for a_file in a_files:
  pkgname, ver, desc, url, depends, sources, category = infoFromPkgbuildFile(a_file)

  # Error (and skip) if we encounter any invalid PKGBUILD files
  if (pkgname == None or ver == None):
    error("Pkgbuild '" + a_file + "' is invalid!")
    continue

  # Error (and skip) if we encounter any duplicate package names
  # in the PKGBUILDs
  if (packages.get(pkgname)):
    error("Pkgbuild '" + a_file + "' is a duplicate!")
    continue
  
  version = Version()
  version.version = ver
  version.file = None

  package = Package()
  package.name = pkgname
  package.category = category
  package.desc = desc
  package.url = url
  package.depends = depends
  package.sources = sources
  package.new = version

#  print "Package: desc " + desc

  packages[pkgname] = package

# B) Go through the old repo dir
#    For each package file we encounter, create a Package with old
#    Version containing parsed version and filepath

b_files = packagesInTree(repo_dir)
for b_file in b_files:
  pkgname, ver = infoFromPackageFile(b_file)
  
  version = Version()
  version.version = ver
  version.file = b_file

  package = packages.get(pkgname)
  if (package == None):
    package = Package()
    package.name = pkgname
    packages[pkgname] = package
  package.old = version

# C) Go through the build tree
#    For each package file we encounter:
#      1 - look up the package name; if it fails, ignore the file (no error)
#      2 - if package.new == None, ignore the package (no error)
#      3 - if package.new.version doesn't match, then skip (no error)
#      4 - if package.new.file == None, point it to this file
#          otherwise, log an error (and skip)

c_files = packagesInTree(build_dir)
for c_file in c_files:
  pkgname, ver = infoFromPackageFile(c_file)

  # 1
  package = packages.get(pkgname)
  if (package == None):
    continue
  
  # 2
  if (package.new == None):
    continue

  # 3
  if (package.new.version != ver):
    continue

  # 4
  if (package.new.file == None):
    package.new.file = c_file
    continue
  else:
    error("Duplicate new file '" + c_file + "'")
    continue

# PASS 2: CHECKING
#
# Go through the package collection
#   1 - if package has no new, place its old file on the "delete" list (and package on "dbremove")
#   2 - if package has a new but no new.file, and old file doesn't
#       have the same version, then error (because gensync won't rebuild)
#   3 - if package has no old, add new file to "copy" list into repo dir (and package on "dbmodify")
#   4 - if new == old and paranoid is set, compare the files and error if not the same;
#       otherwise just skip (no update)
#   5 - if we got here, it's a legit nontrivial new version which we allow
#       add entry to "delete" list for old file and "copy" list for
#       new file into repo dir (and package to "dbmodify")

for package in packages.values():
  # 1
  if (package.new == None):
    delete.append(package.old.file)
    dbremove.append(package)
    continue

  # 2
  if (package.new.file == None):
    if (package.old == None or package.old.file == None or 
      package.old.version != package.new.version):
      errstr = "No new package supplied for " + package.name + " " + package.new.version + "!"
      error(errstr)
      continue

  # 3 
  if (package.old == None):
    copy.append(package.new.file)
    dbmodify.append(package)
    continue

  # 4
  if (package.old.version == package.new.version): 
    if (switches.get("--paranoid") == True and package.new.file != None):
      if not (areFilesIdentical(package.old.file, package.new.file)):
        warning("New package file with identical version '" + 
	  package.new.file + "' is different than the old one:")
	if (switches.get("--delete") == True):
          warning("  Deleting the new file.")
          delete.append(package.new.file)
	else:
          warning("  Ignoring the new file.")
    continue

  # 5
  delete.append(package.old.file)
  copy.append(package.new.file)
  dbmodify.append(package)
  continue  

## IF WE HAVE HAD ANY ERRORS AT THIS POINT, ABORT! ##
if (had_error == 1):
  error("Aborting due to errors.")
  sys.exit(-1)

# PASS 3: EXECUTION
#

# First, do all the database updates
for package in dbremove:
  id = db.lookup(package.name)
  # Note: this could remove a package from unsupported; probably want to restrict to locationId and/or non-dummy
  if (id != None):
    db.clearOldInfo(id)
    db.remove(id, 3)

for package in dbmodify:
  warning("DB: Package in dbmodify: " + package.name)
  id = db.lookup(package.name)
  if (id == None):
    db.insert(package, 3)
  else:
    db.update(id, package, 3)

# Copy
for file in copy:
  retval = copyFileToRepo(file, repo_dir)
  if (retval != 0):
    error("Could not copy file to repo: '" + file + "'")
    sys.exit(-1)
# Delete (second, for safety's sake)
for file in delete:
  deleteFile(file)
# Now that we've copied new files and deleted, we should delete the source
# files, if we're supposed to
if (switches.get("--delete") == True):
  for file in copy:
    deleteFile(file)
# Run gensync to build the repo index
#if (len(copy) + len(delete) > 0):
#  retval = runGensync(repo_dir, pkgbuild_dir)
#  if (retval != 0):
#    error("Gensync returned an error!")
#    sys.exit(-1)

# Run updatesync where it is needed
for package in dbremove:
	retval = runUpdatesyncDel(repo_dir, package.name)
	if (retval != 0):
		error("Updatesync del returned an error!")
		sys.exit(-1)
for package in dbmodify:
	retval = runUpdatesyncUpd(repo_dir, os.path.join(pkgbuild_dir,package.category,package.name,"PKGBUILD"))
	if (retval != 0):
		error("Updatesync upd returned an error!")
		sys.exit(-1)

# vim: ft=python ts=2 sw=2 noet