Welcome to mirror list, hosted at ThFree Co, Russian Federation.

cygwin.com/git/cygwin-apps/calm.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--TODO12
-rwxr-xr-xcalm137
-rw-r--r--maintainers.py15
-rwxr-xr-xmksetupini263
-rwxr-xr-xpackage.py317
-rwxr-xr-xupload-scan161
-rw-r--r--uploads.py177
7 files changed, 663 insertions, 419 deletions
diff --git a/TODO b/TODO
deleted file mode 100644
index c977ae9..0000000
--- a/TODO
+++ /dev/null
@@ -1,12 +0,0 @@
-* The current update done by upset is atomic: It reads both uploads and release
- area and verifies the resulting package set is consistent before moving an
- ything.
-
- For simplicitly, this reimplmentation splits the moving and package set
- consistency check into separate pieces, but we should have the pieces to add
- that feature back.
-
-* upset appears to have a feature intended to merge existing the requires: with
- those in setup.hint. I'm not sure if it works, though. Something like that
- would be quite useful, though, telling you when dependencies were added or
- removed.
diff --git a/calm b/calm
new file mode 100755
index 0000000..b4e5605
--- /dev/null
+++ b/calm
@@ -0,0 +1,137 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2015 Jon Turney
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+
+#
+# calm - better than being upset
+#
+
+#
+# read packages from release area
+# for each maintainer
+# - read and validate any package uploads
+# - build a list of files to move and remove
+# - merge package sets
+# - validate merged package set
+# - process remove list
+# - on failure
+# -- mail maintainer with errors
+# -- empty move list
+# -- discard merged package set
+# - on success
+# -- process move list
+# -- mail maintainer with movelist
+# -- continue with merged package set
+# write setup.ini file
+#
+
+import argparse
+import logging
+import os
+import sys
+
+import common_constants
+import maintainers
+import package
+import uploads
+
+
+#
+#
+#
+
+def main(args):
+ # build package list
+ packages = package.read_packages(args.rel_area, args.arch)
+
+ # validate the package set
+ if not package.validate_packages(args, packages):
+ logging.error("existing package set has errors, not processing uploads or writing setup.ini")
+ return
+
+ # read maintainer list
+ mlist = maintainers.Maintainer.read(args)
+
+ # make the list of all packages
+ all_packages = maintainers.Maintainer.all_packages(mlist)
+
+ # for each maintainer
+ for name in sorted(mlist.keys()):
+ m = mlist[name]
+
+ # XXX: wrap this in a 'mail logs' container
+ # XXX: but only want to send one mail per run to leads
+
+ (error, mpackages, move, remove_always, remove_success) = uploads.scan(m, all_packages, args)
+
+ uploads.remove(args, remove_always)
+
+ if not error:
+ merged_packages = package.merge(packages, mpackages)
+
+ # validate the package set
+ if package.validate_packages(args, merged_packages):
+ # process the move list
+ uploads.move(m, args, move)
+ uploads.remove(args, remove_success)
+ # use merged package list
+ packages = merged_packages
+ else:
+ # otherwise we discard move list and merged_packages
+ logging.error("error while merging uploads for %s" % (name))
+
+ # write setup.ini
+ package.write_setup_ini(args, packages)
+
+#
+#
+#
+
+if __name__ == "__main__":
+ homedir_default = common_constants.HOMEDIR
+ orphanmaint_default = common_constants.ORPHANMAINT
+ pkglist_default = common_constants.PKGMAINT
+ relarea_default = common_constants.FTP
+
+ parser = argparse.ArgumentParser(description='Upset replacement')
+ parser.add_argument('--arch', action='store', required=True, choices=common_constants.ARCHES)
+ parser.add_argument('--email', action='store', dest='email', nargs='?', const=common_constants.EMAILS, help='email output to maintainer and ADDRS (default: ' + common_constants.EMAILS + ')', metavar='ADDRS')
+ parser.add_argument('--homedir', action='store', metavar='DIR', help="maintainer home directory (default: " + homedir_default + ")", default=homedir_default)
+ parser.add_argument('--inifile', '-u', action='store', help='output filename', required=True)
+ parser.add_argument('--orphanmaint', action='store', metavar='NAMES', help="orphan package maintainers (default: '" + orphanmaint_default + "')", default=orphanmaint_default)
+ parser.add_argument('--pkglist', action='store', metavar='FILE', help="package maintainer list (default: " + pkglist_default + ")", default=pkglist_default)
+ parser.add_argument('--release', action='store', help='value for setup-release key (default: cygwin)', default='cygwin')
+ parser.add_argument('--releasearea', action='store', metavar='DIR', help="release directory (default: " + relarea_default + ")", default=relarea_default, dest='rel_area')
+ parser.add_argument('--setup-version', action='store', metavar='VERSION', help='value for setup-version key')
+ parser.add_argument('-n', '--dry-run', action='store_true', dest='dryrun', help="don't do anything")
+ parser.add_argument('-v', '--verbose', action='count', dest='verbose', help='verbose output')
+ (args) = parser.parse_args()
+
+ if args.verbose:
+ logging.getLogger().setLevel(logging.INFO)
+
+ logging.basicConfig(format=os.path.basename(sys.argv[0])+': %(message)s')
+
+ if args.email:
+ args.email = args.email.split(',')
+
+ main(args)
diff --git a/maintainers.py b/maintainers.py
index 061a9e4..81fc272 100644
--- a/maintainers.py
+++ b/maintainers.py
@@ -34,6 +34,7 @@
# it, and want to allow the maintainer to change it)
#
+import itertools
import logging
import os
import re
@@ -116,3 +117,17 @@ class Maintainer(object):
logging.error("unrecognized line in %s:%d: '%s'" % (pkglist, i, l))
return mlist
+
+ # create maintainer list
+ @staticmethod
+ def read(args):
+ mlist = {}
+ mlist = Maintainer.add_directories(mlist, args.homedir)
+ mlist = Maintainer.add_packages(mlist, args.pkglist, args.orphanmaint)
+
+ return mlist
+
+ # a list of all packages
+ @staticmethod
+ def all_packages(mlist):
+ return list(itertools.chain.from_iterable(mlist[m].pkgs for m in mlist))
diff --git a/mksetupini b/mksetupini
index 5fa8406..c4627d0 100755
--- a/mksetupini
+++ b/mksetupini
@@ -24,292 +24,37 @@
#
# mksetupini
#
-# Make a setup.ini file from a collection of tarfile and setup.hints
+# Make a setup.ini file from a collection of tarfiles and setup.hints
#
import argparse
-import textwrap
-import time
-import re
import logging
import os
import sys
-from collections import defaultdict
import common_constants
import package
-from version import SetupVersion
#
#
#
-
def main(args):
# build package list
packages = package.read_packages(args.rel_area, args.arch)
# validate the package set
- if not validate_packages(args, packages):
+ if not package.validate_packages(args, packages):
+ logging.error("package set has errors, not writing setup.ini")
return
# write setup.ini
- write_setup_ini(args, packages)
+ package.write_setup_ini(args, packages)
#
-# validate the package database
-#
-def validate_packages(args, packages):
- error = False
-
- for p in sorted(packages.keys()):
- # all packages listed in requires must exist
- if 'requires' in packages[p].hints:
- for r in packages[p].hints['requires'].split():
- if r not in packages:
- logging.error("package '%s' requires nonexistent package '%s'" % (p, r))
- error = True
-
- # a package is should not appear in it's own requires
- if r == p:
- logging.error("package '%s' requires itself" % (p))
-
- # if external-source is used, the package must exist
- if 'external-source' in packages[p].hints:
- e = packages[p].hints['external-source']
- if e not in packages:
- logging.error("package '%s' refers to nonexistent external-source '%s'" % (p, e))
- error = True
-
- packages[p].vermap = defaultdict(defaultdict)
- has_install = False
- is_empty = {}
-
- for t in packages[p].tars:
- # categorize each tarfile as either 'source' or 'install'
- if re.search(r'-src\.tar', t):
- category = 'source'
- else:
- category = 'install'
- has_install = True
-
- # check if install package is empty
- is_empty[t] = package.tarfile_is_empty(os.path.join(args.rel_area, args.arch, packages[p].path, t))
-
- # extract just the version part from tar filename
- v = re.sub(r'^' + re.escape(p) + '-', '', t)
- v = re.sub(r'(-src|)\.tar\.(xz|bz2|gz)$', '', v)
-
- # store tarfile corresponding to this version and category
- packages[p].vermap[v][category] = t
-
- # verify the versions specified for stability level exist
- levels = ['test', 'curr', 'prev']
- for l in levels:
- if l in packages[p].hints:
- # check that version exists
- v = packages[p].hints[l]
- if v not in packages[p].vermap:
- logging.error("package '%s' stability '%s' selects non-existent version '%s'" % (p, l, v))
- error = True
-
- # assign a version to each stability level
- packages[p].stability = defaultdict()
-
- # sort in order from highest to lowest version
- for v in sorted(packages[p].vermap.keys(), key=lambda v: SetupVersion(v), reverse=True):
- level_found = False
-
- while True:
- # no stability levels left
- if len(levels) == 0:
- # XXX: versions which don't correspond to any stability level
- # should be reported, we might want to remove them at some point
- logging.info("package '%s' has no stability levels left for version '%s'" % (p, v))
- break
-
- l = levels[0]
-
- # if current stability level has an override
- if l in packages[p].hints:
- # if we haven't reached that version yet
- if v != packages[p].hints[l]:
- break
- else:
- logging.info("package '%s' stability '%s' override to version '%s'" % (p, l, v))
- else:
- # level 'test' must be assigned by override
- if l == 'test':
- levels.remove(l)
- # go around again to check for override at the new level
- continue
-
- level_found = True
- logging.debug("package '%s' stability '%s' assigned version '%s'" % (p, l, v))
- break
-
- if not level_found:
- continue
-
- # assign version to level
- packages[p].stability[l] = v
- # and remove from list of unallocated levels
- levels.remove(l)
-
- # lastly, fill in any levels which we skipped over because a higher
- # stability level was overriden to a lower version
- for l in levels:
- if l in packages[p].hints:
- packages[p].stability[l] = packages[p].hints[l]
-
- # verify that versions have files
- for v in sorted(packages[p].vermap.keys(), key=lambda v: SetupVersion(v), reverse=True):
- required_categories = []
-
- # a source tarfile must exist for every version, unless
- # - the install tarfile is empty, or
- # - this package is external-source
- if 'external-source' not in packages[p].hints:
- if 'install' in packages[p].vermap[v]:
- if not is_empty[packages[p].vermap[v]['install']]:
- required_categories.append('source')
-
- # XXX: actually we should verify that a source tarfile must exist
- # for every install tarfile version, but it may be either in this
- # package or in the external-source package...
-
- # similarly, we should verify that each version has an install
- # tarfile, unless this is a source-only package. Unfortunately, the
- # current data model doesn't clearly identify those. For the
- # moment, if we have seen at least one install tarfile, assume we
- # aren't a source-only package.
- if has_install:
- required_categories.append('install')
-
- for c in required_categories:
- if c not in packages[p].vermap[v]:
- # logging.error("package '%s' version '%s' is missing %s tarfile" % (p, v, c))
- # error = True
- pass
-
- # for historical reasons, add cygwin to requires if not already present,
- # the package is not source-only, not empty, not only contains symlinks,
- # and not on the list to avoid doing this for
- # (this approximates what 'autodep' did)
- if has_install and (not all(is_empty.values())) and (p not in ['base-cygwin', 'gcc4-core', 'gcc4-g++']):
- requires = packages[p].hints.get('requires', '')
-
- if not re.search(r'\bcygwin\b', requires):
- if len(requires) > 0:
- requires = requires + ' '
- packages[p].hints['requires'] = requires + 'cygwin'
-
- # if the package has no install tarfiles (i.e. is source only), mark it
- # as 'skip' (which really means 'source-only' at the moment)
- if not has_install and 'skip' not in packages[p].hints:
- packages[p].hints['skip'] = ''
-
- return not error
-
-
#
-# write setup.ini
#
-def write_setup_ini(args, packages):
-
- with open(args.inifile, 'w') as f:
- # write setup.ini header
- print(textwrap.dedent('''\
- # This file is automatically generated. If you edit it, your
- # edits will be discarded next time the file is generated.
- # See http://cygwin.com/setup.html for details.
- #'''), file=f)
-
- if args.release:
- print("release: %s" % args.release, file=f)
- print("arch: %s" % args.arch, file=f)
- print("setup-timestamp: %d" % time.time(), file=f)
- if args.setup_version:
- print("setup-version: %s" % args.setup_version, file=f)
-
- # for each package
- for p in sorted(packages.keys(), key=package.sort_key):
- # do nothing if 'skip'
- if 'skip' in packages[p].hints:
- continue
-
- # write package data
- print("\n@ %s" % p, file=f)
-
- # for historical reasons, we adjust sdesc slightly:
- #
- # - strip anything up to and including first ':'
- # - capitalize first letter
- # whilst preserving any leading quote
- #
- # these are both bad ideas, due to sdesc's which start with a
- # lower-case command name, or contain perl or ruby module names like
- # 'Net::HTTP'
- sdesc = packages[p].hints['sdesc']
- sdesc = re.sub('^("?)(.*?)("?)$', r'\2', sdesc)
- if ':' in sdesc:
- sdesc = re.sub(r'^[^:]+:\s*', '', sdesc)
- sdesc = '"' + upper_first_character(sdesc) + '"'
- print("sdesc: %s" % sdesc, file=f)
-
- if 'ldesc' in packages[p].hints:
- print("ldesc: %s" % packages[p].hints['ldesc'], file=f)
-
- # for historical reasons, category names must start with a capital
- # letter
- category = ' '.join(map(upper_first_character, packages[p].hints['category'].split()))
- print("category: %s" % category, file=f)
-
- if 'requires' in packages[p].hints:
- # for historical reasons, empty requires are suppressed
- requires = packages[p].hints['requires']
- if requires:
- print("requires: %s" % requires, file=f)
-
- # write tarfile lines for each stability level
- for level in ['curr', 'prev', 'test']:
- if level in packages[p].stability:
- version = packages[p].stability[level]
- if level != 'curr':
- print("[%s]" % level, file=f)
- print("version: %s" % version, file=f)
-
- if 'install' in packages[p].vermap[version]:
- t = packages[p].vermap[version]['install']
- tar_line('install', args.arch, packages[p], t, f)
-
- # look for corresponding source in this package first
- if 'source' in packages[p].vermap[version]:
- t = packages[p].vermap[version]['source']
- tar_line('source', args.arch, packages[p], t, f)
- # if that doesn't exist, follow external-source
- elif 'external-source' in packages[p].hints:
- s = packages[p].hints['external-source']
- t = packages[s].vermap[version]['source']
- tar_line('source', args.arch, packages[s], t, f)
-
- if 'message' in packages[p].hints:
- print("message: %s" % packages[p].hints['message'], file=f)
-
-
-def tar_line(category, arch, p, t, f):
- fn = os.path.join(arch, p.path, t)
- sha512 = p.tars[t]['sha512']
- size = p.tars[t]['size']
- print("%s: %s %d %s" % (category, fn, size, sha512), file=f)
-
-
-# change the first character of a string to upper case, without altering the
-# rest
-def upper_first_character(s):
- return s[:1].upper() + s[1:]
-
if __name__ == "__main__":
relarea_default = common_constants.FTP
diff --git a/package.py b/package.py
index b862d39..4167b17 100755
--- a/package.py
+++ b/package.py
@@ -25,14 +25,21 @@
# utilities for working with a package database
#
+from collections import defaultdict
+import copy
+import difflib
+import logging
import os
+import pprint
+import re
import re
-import logging
import tarfile
-from collections import defaultdict
+import textwrap
+import time
import hint
import common_constants
+from version import SetupVersion
class Package(object):
@@ -42,6 +49,9 @@ class Package(object):
self.hints = {}
+#
+# read a packages from a directory hierarchy
+#
def read_packages(rel_area, arch):
packages = defaultdict(Package)
@@ -56,6 +66,9 @@ def read_packages(rel_area, arch):
return packages
+#
+# read a single package
+#
def read_package(packages, basedir, dirpath, files, strict=False):
relpath = os.path.relpath(dirpath, basedir)
warnings = False
@@ -178,6 +191,306 @@ def sort_key(k):
k = chr(255) + k
return k
+
+#
+# validate the package database
+#
+def validate_packages(args, packages):
+ error = False
+
+ for p in sorted(packages.keys()):
+ # all packages listed in requires must exist
+ if 'requires' in packages[p].hints:
+ for r in packages[p].hints['requires'].split():
+ if r not in packages:
+ logging.error("package '%s' requires nonexistent package '%s'" % (p, r))
+ error = True
+
+ # a package is should not appear in it's own requires
+ if r == p:
+ logging.error("package '%s' requires itself" % (p))
+
+ # if external-source is used, the package must exist
+ if 'external-source' in packages[p].hints:
+ e = packages[p].hints['external-source']
+ if e not in packages:
+ logging.error("package '%s' refers to nonexistent external-source '%s'" % (p, e))
+ error = True
+
+ packages[p].vermap = defaultdict(defaultdict)
+ has_install = False
+ is_empty = {}
+
+ for t in packages[p].tars:
+ # categorize each tarfile as either 'source' or 'install'
+ if re.search(r'-src\.tar', t):
+ category = 'source'
+ else:
+ category = 'install'
+ has_install = True
+
+ # check if install package is empty
+ is_empty[t] = tarfile_is_empty(os.path.join(args.rel_area, args.arch, packages[p].path, t))
+
+ # extract just the version part from tar filename
+ v = re.sub(r'^' + re.escape(p) + '-', '', t)
+ v = re.sub(r'(-src|)\.tar\.(xz|bz2|gz)$', '', v)
+
+ # store tarfile corresponding to this version and category
+ packages[p].vermap[v][category] = t
+
+ # verify the versions specified for stability level exist
+ levels = ['test', 'curr', 'prev']
+ for l in levels:
+ if l in packages[p].hints:
+ # check that version exists
+ v = packages[p].hints[l]
+ if v not in packages[p].vermap:
+ logging.error("package '%s' stability '%s' selects non-existent version '%s'" % (p, l, v))
+ error = True
+
+ # assign a version to each stability level
+ packages[p].stability = defaultdict()
+
+ # sort in order from highest to lowest version
+ for v in sorted(packages[p].vermap.keys(), key=lambda v: SetupVersion(v), reverse=True):
+ level_found = False
+
+ while True:
+ # no stability levels left
+ if len(levels) == 0:
+ # XXX: versions which don't correspond to any stability level
+ # should be reported, we might want to remove them at some point
+ logging.info("package '%s' has no stability levels left for version '%s'" % (p, v))
+ break
+
+ l = levels[0]
+
+ # if current stability level has an override
+ if l in packages[p].hints:
+ # if we haven't reached that version yet
+ if v != packages[p].hints[l]:
+ break
+ else:
+ logging.info("package '%s' stability '%s' override to version '%s'" % (p, l, v))
+ else:
+ # level 'test' must be assigned by override
+ if l == 'test':
+ levels.remove(l)
+ # go around again to check for override at the new level
+ continue
+
+ level_found = True
+ logging.debug("package '%s' stability '%s' assigned version '%s'" % (p, l, v))
+ break
+
+ if not level_found:
+ continue
+
+ # assign version to level
+ packages[p].stability[l] = v
+ # and remove from list of unallocated levels
+ levels.remove(l)
+
+ # lastly, fill in any levels which we skipped over because a higher
+ # stability level was overriden to a lower version
+ for l in levels:
+ if l in packages[p].hints:
+ packages[p].stability[l] = packages[p].hints[l]
+
+ # verify that versions have files
+ for v in sorted(packages[p].vermap.keys(), key=lambda v: SetupVersion(v), reverse=True):
+ required_categories = []
+
+ # a source tarfile must exist for every version, unless
+ # - the install tarfile is empty, or
+ # - this package is external-source
+ if 'external-source' not in packages[p].hints:
+ if 'install' in packages[p].vermap[v]:
+ if not is_empty[packages[p].vermap[v]['install']]:
+ required_categories.append('source')
+
+ # XXX: actually we should verify that a source tarfile must exist
+ # for every install tarfile version, but it may be either in this
+ # package or in the external-source package...
+
+ # similarly, we should verify that each version has an install
+ # tarfile, unless this is a source-only package. Unfortunately, the
+ # current data model doesn't clearly identify those. For the
+ # moment, if we have seen at least one install tarfile, assume we
+ # aren't a source-only package.
+ if has_install:
+ required_categories.append('install')
+
+ for c in required_categories:
+ if c not in packages[p].vermap[v]:
+ # logging.error("package '%s' version '%s' is missing %s tarfile" % (p, v, c))
+ # error = True
+ pass
+
+ # for historical reasons, add cygwin to requires if not already present,
+ # the package is not source-only, not empty, not only contains symlinks,
+ # and not on the list to avoid doing this for
+ # (this approximates what 'autodep' did)
+ if has_install and (not all(is_empty.values())) and (p not in ['base-cygwin', 'gcc4-core', 'gcc4-g++']):
+ requires = packages[p].hints.get('requires', '')
+
+ if not re.search(r'\bcygwin\b', requires):
+ if len(requires) > 0:
+ requires = requires + ' '
+ packages[p].hints['requires'] = requires + 'cygwin'
+
+ # if the package has no install tarfiles (i.e. is source only), mark it
+ # as 'skip' (which really means 'source-only' at the moment)
+ if not has_install and 'skip' not in packages[p].hints:
+ packages[p].hints['skip'] = ''
+
+ return not error
+
+
+#
+# write setup.ini
+#
+def write_setup_ini(args, packages):
+
+ with open(args.inifile, 'w') as f:
+ # write setup.ini header
+ print(textwrap.dedent('''\
+ # This file is automatically generated. If you edit it, your
+ # edits will be discarded next time the file is generated.
+ # See http://cygwin.com/setup.html for details.
+ #'''), file=f)
+
+ if args.release:
+ print("release: %s" % args.release, file=f)
+ print("arch: %s" % args.arch, file=f)
+ print("setup-timestamp: %d" % time.time(), file=f)
+ if args.setup_version:
+ print("setup-version: %s" % args.setup_version, file=f)
+
+ # for each package
+ for p in sorted(packages.keys(), key=sort_key):
+ # do nothing if 'skip'
+ if 'skip' in packages[p].hints:
+ continue
+
+ # write package data
+ print("\n@ %s" % p, file=f)
+
+ # for historical reasons, we adjust sdesc slightly:
+ #
+ # - strip anything up to and including first ':'
+ # - capitalize first letter
+ # whilst preserving any leading quote
+ #
+ # these are both bad ideas, due to sdesc's which start with a
+ # lower-case command name, or contain perl or ruby module names like
+ # 'Net::HTTP'
+ sdesc = packages[p].hints['sdesc']
+ sdesc = re.sub('^("?)(.*?)("?)$', r'\2', sdesc)
+ if ':' in sdesc:
+ sdesc = re.sub(r'^[^:]+:\s*', '', sdesc)
+ sdesc = '"' + upper_first_character(sdesc) + '"'
+ print("sdesc: %s" % sdesc, file=f)
+
+ if 'ldesc' in packages[p].hints:
+ print("ldesc: %s" % packages[p].hints['ldesc'], file=f)
+
+ # for historical reasons, category names must start with a capital
+ # letter
+ category = ' '.join(map(upper_first_character, packages[p].hints['category'].split()))
+ print("category: %s" % category, file=f)
+
+ if 'requires' in packages[p].hints:
+ # for historical reasons, empty requires are suppressed
+ requires = packages[p].hints['requires']
+ if requires:
+ print("requires: %s" % requires, file=f)
+
+ # write tarfile lines for each stability level
+ for level in ['curr', 'prev', 'test']:
+ if level in packages[p].stability:
+ version = packages[p].stability[level]
+ if level != 'curr':
+ print("[%s]" % level, file=f)
+ print("version: %s" % version, file=f)
+
+ if 'install' in packages[p].vermap[version]:
+ t = packages[p].vermap[version]['install']
+ tar_line('install', args.arch, packages[p], t, f)
+
+ # look for corresponding source in this package first
+ if 'source' in packages[p].vermap[version]:
+ t = packages[p].vermap[version]['source']
+ tar_line('source', args.arch, packages[p], t, f)
+ # if that doesn't exist, follow external-source
+ elif 'external-source' in packages[p].hints:
+ s = packages[p].hints['external-source']
+ t = packages[s].vermap[version]['source']
+ tar_line('source', args.arch, packages[s], t, f)
+
+ if 'message' in packages[p].hints:
+ print("message: %s" % packages[p].hints['message'], file=f)
+
+
+# helper function to output details for a particular tar file
+def tar_line(category, arch, p, t, f):
+ fn = os.path.join(arch, p.path, t)
+ sha512 = p.tars[t]['sha512']
+ size = p.tars[t]['size']
+ print("%s: %s %d %s" % (category, fn, size, sha512), file=f)
+
+
+# helper function to change the first character of a string to upper case,
+# without altering the rest
+def upper_first_character(s):
+ return s[:1].upper() + s[1:]
+
+
+#
+# merge two sets of packages
+#
+# for each package which exist in both a and b:
+# - they must exist at the same relative path, or the package from a is used
+# - we combine the list of tarfiles, duplicates are not expected
+# - we use the hints from b, and warn if they are different to the hints for a
+#
+def merge(a, b):
+ # start with a copy of a
+ c = copy.deepcopy(a)
+
+ for p in b:
+ # if the package is in b but not in a, add it to the copy
+ if p not in a:
+ c[p] = b[p]
+ # else, if the package is both in a and b, we have to do a merge
+ else:
+ # package must exist at same relative path
+ if a[p].path != b[p].path:
+ logging.error("package name %s at paths %s and %s" % (p, a[p].path, b[p].path))
+ else:
+ for t in b[p].tars:
+ if t in c[p].tars:
+ logging.error("package name %s duplicate tarfile %s" % (p, t))
+ else:
+ c[p].tars[t] = b[p].tars[t]
+
+ # use hints from b, but warn that they have changed
+ if a[p].hints != b[p].hints:
+ c[p].hints = b[p].hints
+
+ diff = '\n'.join(difflib.ndiff(
+ pprint.pformat(a[p].hints).splitlines(),
+ pprint.pformat(b[p].hints).splitlines()))
+
+ logging.warning("package name %s hints changed\n%s\n" % (p, diff))
+
+ return c
+
+
+#
+#
+#
if __name__ == "__main__":
for arch in common_constants.ARCHES:
packages = read_packages(common_constants.FTP, arch)
diff --git a/upload-scan b/upload-scan
index 9524429..fc71df9 100755
--- a/upload-scan
+++ b/upload-scan
@@ -33,155 +33,15 @@
# - send a report on what we did to the maintainer and project leads
#
-from collections import defaultdict
import argparse
-import filecmp
-import itertools
import logging
import os
import sys
import buffering_smtp_handler
import common_constants
-import hint
import maintainers
-import package
-
-
-#
-#
-#
-
-def scan(m, arch, all_packages, args):
- basedir = os.path.join(m.homedir(), arch)
- releasedir = os.path.join(args.rel_area, arch)
-
- packages = defaultdict(package.Package)
- move = defaultdict(list)
- readys = []
- sums = defaultdict(list)
- error = False
- mtime = 0
-
- # note mtime of !ready file
- for ready in [os.path.join(basedir, '!ready'), os.path.join(basedir, 'release', '!ready')]:
- if os.path.exists(ready):
- mtime = os.path.getmtime(ready)
- logging.info('processing files with mtime older than %d' % (mtime))
- readys.append(ready)
-
- # scan package directories
- for (dirpath, subdirs, files) in os.walk(os.path.join(basedir, 'release')):
- relpath = os.path.relpath(dirpath, basedir)
-
- # skip uninteresting directories
- if (not files) or (relpath == 'release'):
- continue
-
- logging.info('reading uploads from %s' % dirpath)
-
- # It really only makes sense for !ready to be in the basedir, or
- # basedir/release, but historically we have accepted it anywhere, which
- # affected all files thereafter in some unspecified directory traversal.
- if '!ready' in files:
- logging.error("!ready at %s not supported, ignored" % relpath)
- files.remove('!ready')
-
- # package doesn't appear in package list at all
- pkgname = os.path.basename(dirpath)
- if pkgname not in all_packages:
- logging.error("%s is not in the package list" % pkgname)
- continue
-
- # only process packages for which we are listed as a maintainer
- if pkgname not in m.pkgs:
- logging.warn("%s is not in the package list for this maintainer" % pkgname)
- continue
-
- # ensure sha512.sum exists
- # XXX: either we make read_package able to calculate sh512 sum when
- # sha512.sum doesn't exist, or we make sure shs512.sum exists. Not
- # sure which is the better approach.
- if 'sha512.sum' not in files:
- logging.info('generating sha512.sum')
- os.system("cd '%s' ; sha512sum * >sha512.sum" % os.path.join(dirpath))
- files.append('sha512.sum')
-
- # filter out files we don't need to consider
- for f in sorted(files):
- fn = os.path.join(dirpath, f)
- logging.info("processing %s" % fn)
-
- # ignore !packages (which we no longer use)
- # ignore !mail and !email (which we have already read)
- if f in ['!packages', '!mail', '!email']:
- files.remove(f)
- continue
-
- if f == 'sha512.sum':
- sums[relpath].append(fn)
- continue
-
- # only process files newer than !ready
- if os.path.getmtime(fn) > mtime:
- if mtime == 0:
- logging.warn("ignoring %s as there is no !ready" % f)
- else:
- logging.warn("ignoring %s as it is newer than !ready" % f)
- files.remove(f)
- continue
-
- if f.startswith('-'):
- logging.error("file deletion request %s not implemented yet" % f)
- files.remove(f)
- else:
- dest = os.path.join(releasedir, relpath, f)
- if os.path.isfile(dest):
- if filecmp.cmp(dest, fn, shallow=False):
- logging.warn("identical %s already in release area, ignoring" % f)
- else:
- logging.error("different %s already in release area" % f)
- error = True
- else:
- move[relpath].append(f)
-
- # read and validate package
- if files:
- # strict means we consider warnings as fatal for upload
- if package.read_package(packages, basedir, dirpath, files, strict=True):
- error = True
-
- # remove all the !ready files
- for f in readys:
- logging.info("rm %s", f)
- if not args.dryrun:
- os.unlink(f)
-
- # only move something if there were no errors
- if not error:
- for p in move:
- logging.info("mkdir %s" % os.path.join(releasedir, p))
- if not args.dryrun:
- os.makedirs(os.path.join(releasedir, p), exist_ok=True)
- for f in move[p]:
- logging.info("move %s to %s" % (os.path.join(basedir, p, f), os.path.join(releasedir, p, f)))
- if not args.dryrun:
- os.rename(os.path.join(basedir, p, f), os.path.join(releasedir, p, f))
-
- # Remove shs512.sum file in upload directory
- for f in sums[p]:
- logging.info("rm %s", f)
- if not args.dryrun:
- os.unlink(f)
-
- # Update sha512.sum file in target directory
- #
- # (this means that upset can use that file unconditionally,
- # rather than having to have a special case to generate the hash
- # itself for when that file hasn't yet been created by
- # sourceware.org scripts)
- if not args.dryrun:
- os.system("cd '%s' ; sha512sum * >sha512.sum" % os.path.join(releasedir, p))
+import uploads
#
@@ -190,14 +50,14 @@ def scan(m, arch, all_packages, args):
def main(args):
# create maintainer list
- mlist = {}
- mlist = maintainers.Maintainer.add_directories(mlist, args.homedir)
- mlist = maintainers.Maintainer.add_packages(mlist, args.pkglist, args.orphanmaint)
+ mlist = maintainers.Maintainer.read(args)
# make the list of all packages
- all_packages = list(itertools.chain.from_iterable(mlist[m].pkgs for m in mlist))
+ all_packages = maintainers.Maintainer.all_packages(mlist)
for arch in common_constants.ARCHES:
+ args.arch = arch
+
for name in sorted(mlist.keys()):
m = mlist[name]
@@ -210,7 +70,16 @@ def main(args):
logging_format='%(message)s')
logging.getLogger().addHandler(handler)
- scan(m, arch, all_packages, args)
+ # search for and validate uploaded packages
+ (error, packages, move, remove_always, remove_success) = uploads.scan(m, all_packages, args)
+
+ # always remove all the !ready files
+ uploads.remove(args, remove_always)
+
+ # but only move something if there were no errors
+ if not error:
+ uploads.move_uploaded_packages(m, args, move)
+ uploads.remove(args, remove_success)
if args.email:
handler.close()
diff --git a/uploads.py b/uploads.py
new file mode 100644
index 0000000..95c8e37
--- /dev/null
+++ b/uploads.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2015 Jon Turney
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+
+#
+# upload directory processing
+#
+
+from collections import defaultdict
+import filecmp
+import os
+import logging
+
+import package
+
+
+#
+#
+#
+
+def scan(m, all_packages, args):
+ basedir = os.path.join(m.homedir(), args.arch)
+ releasedir = os.path.join(args.rel_area, args.arch)
+
+ packages = defaultdict(package.Package)
+ move = defaultdict(list)
+ readys = []
+ sums = []
+ error = False
+ mtime = 0
+
+ # note mtime of !ready file
+ for ready in [os.path.join(basedir, '!ready'), os.path.join(basedir, 'release', '!ready')]:
+ if os.path.exists(ready):
+ mtime = os.path.getmtime(ready)
+ logging.info('processing files with mtime older than %d' % (mtime))
+ readys.append(ready)
+
+ # scan package directories
+ for (dirpath, subdirs, files) in os.walk(os.path.join(basedir, 'release')):
+ relpath = os.path.relpath(dirpath, basedir)
+
+ # skip uninteresting directories
+ if (not files) or (relpath == 'release'):
+ continue
+
+ logging.info('reading uploads from %s' % dirpath)
+
+ # It really only makes sense for !ready to be in the basedir, or
+ # basedir/release, but historically we have accepted it anywhere, which
+ # affected all files thereafter in some unspecified directory traversal.
+ if '!ready' in files:
+ logging.error("!ready at %s not supported, ignored" % relpath)
+ files.remove('!ready')
+
+ # package doesn't appear in package list at all
+ pkgname = os.path.basename(dirpath)
+ if pkgname not in all_packages:
+ logging.error("%s is not in the package list" % pkgname)
+ continue
+
+ # only process packages for which we are listed as a maintainer
+ if pkgname not in m.pkgs:
+ logging.warn("%s is not in the package list for this maintainer" % pkgname)
+ continue
+
+ # ensure sha512.sum exists
+ # XXX: either we make read_package able to calculate sh512 sum when
+ # sha512.sum doesn't exist, or we make sure shs512.sum exists. Not
+ # sure which is the better approach.
+ if 'sha512.sum' not in files:
+ logging.info('generating sha512.sum')
+ os.system("cd '%s' ; sha512sum * >sha512.sum" % os.path.join(dirpath))
+ files.append('sha512.sum')
+
+ # filter out files we don't need to consider
+ for f in sorted(files):
+ fn = os.path.join(dirpath, f)
+ rel_fn = os.path.join(relpath, f)
+ logging.info("processing %s" % rel_fn)
+
+ # ignore !packages (which we no longer use)
+ # ignore !mail and !email (which we have already read)
+ if f in ['!packages', '!mail', '!email']:
+ files.remove(f)
+ continue
+
+ if f == 'sha512.sum':
+ sums.append(fn)
+ continue
+
+ # only process files newer than !ready
+ if os.path.getmtime(fn) > mtime:
+ if mtime == 0:
+ logging.warn("ignoring %s as there is no !ready" % rel_fn)
+ else:
+ logging.warn("ignoring %s as it is newer than !ready" % rel_fn)
+ files.remove(f)
+ continue
+
+ if f.startswith('-'):
+ logging.error("file deletion request %s not implemented yet" % rel_fn)
+ files.remove(f)
+ else:
+ dest = os.path.join(releasedir, relpath, f)
+ if os.path.isfile(dest):
+ if filecmp.cmp(dest, fn, shallow=False):
+ logging.warn("identical %s already in release area, ignoring" % rel_fn)
+ else:
+ logging.error("different %s already in release area" % rel_fn)
+ error = True
+ else:
+ move[relpath].append(f)
+
+ # read and validate package
+ if files:
+ # strict means we consider warnings as fatal for upload
+ if package.read_package(packages, basedir, dirpath, files, strict=True):
+ error = True
+
+ return (error, packages, move, readys, sums)
+
+
+#
+#
+#
+
+def remove(args, readys):
+ for f in readys:
+ logging.info("rm %s", f)
+ if not args.dryrun:
+ os.unlink(f)
+
+
+#
+#
+#
+
+def move(m, args, move):
+ basedir = os.path.join(m.homedir(), args.arch)
+ releasedir = os.path.join(args.rel_area, args.arch)
+
+ for p in move:
+ logging.info("mkdir %s" % os.path.join(releasedir, p))
+ if not args.dryrun:
+ os.makedirs(os.path.join(releasedir, p), exist_ok=True)
+ for f in move[p]:
+ logging.info("move %s to %s" % (os.path.join(basedir, p, f), os.path.join(releasedir, p, f)))
+ if not args.dryrun:
+ os.rename(os.path.join(basedir, p, f), os.path.join(releasedir, p, f))
+
+ # Update sha512.sum file in target directory
+ #
+ # (this means that upset can use that file unconditionally, rather than
+ # having to have a special case to generate the hash itself for when
+ # that file hasn't yet been created by sourceware.org scripts)
+ if not args.dryrun:
+ os.system("cd '%s' ; sha512sum * >sha512.sum" % os.path.join(releasedir, p))