From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: (qmail 37457 invoked by alias); 5 Apr 2016 10:46:22 -0000 Mailing-List: contact cygwin-apps-cvs-help@sourceware.org; run by ezmlm Precedence: bulk List-Id: List-Subscribe: List-Post: List-Help: , Sender: cygwin-apps-cvs-owner@sourceware.org Received: (qmail 37411 invoked by uid 9795); 5 Apr 2016 10:46:21 -0000 Date: Tue, 05 Apr 2016 10:46:00 -0000 Message-ID: <20160405104621.37369.qmail@sourceware.org> From: jturney@sourceware.org To: cygwin-apps-cvs@sourceware.org Subject: [calm - Cygwin server-side packaging maintenance script] branch master, updated. 675bade3d222dd9f74a59f064f5bcaab8562030f X-Git-Refname: refs/heads/master X-Git-Reftype: branch X-Git-Oldrev: 44c6acc7bfd161dda54dd03ad72d00776f3b54e4 X-Git-Newrev: 675bade3d222dd9f74a59f064f5bcaab8562030f X-SW-Source: 2016-q2/txt/msg00000.txt.bz2 https://sourceware.org/git/gitweb.cgi?p=cygwin-apps/calm.git;h=675bade3d222dd9f74a59f064f5bcaab8562030f commit 675bade3d222dd9f74a59f064f5bcaab8562030f Author: Jon Turney Date: Mon Apr 4 10:07:30 2016 +0100 leads_email should be empty unless something went wrong Add AbeyanceHandler(), which stores log messages and discards them if a severity threshold is not crossed. Use that with a threshold of ERROR for leads_mail (so it is silent unless errors occured) and INFO for maintainer_mail (so it reports file moves). Also, move mail_logs to a more logical place https://sourceware.org/git/gitweb.cgi?p=cygwin-apps/calm.git;h=ff5cc69c6b9fa58e886ebc7862fc10bd9aaa6b13 commit ff5cc69c6b9fa58e886ebc7862fc10bd9aaa6b13 Author: Jon Turney Date: Mon Apr 4 10:07:41 2016 +0100 Include severity in the format used to make log mails This makes errors stand out more https://sourceware.org/git/gitweb.cgi?p=cygwin-apps/calm.git;h=f9cd4c8dbe8a9e730be3dc5d7d69c4f82f06e053 commit f9cd4c8dbe8a9e730be3dc5d7d69c4f82f06e053 Author: Jon Turney Date: Thu Mar 31 23:03:31 2016 +0100 Shuffle around logging levels Shuffle around logging levels a bit, so that moving files can be at INFO level Also, add missing verbs to some log messages to make sentences https://sourceware.org/git/gitweb.cgi?p=cygwin-apps/calm.git;h=30040bfdcb705082c422b6ab44b7b07fdadd029c commit 30040bfdcb705082c422b6ab44b7b07fdadd029c Author: Jon Turney Date: Wed Mar 23 21:23:11 2016 +0000 Improve email subject Include the hostname which sent the mail to make distinguishing mails coming from test instances easier. https://sourceware.org/git/gitweb.cgi?p=cygwin-apps/calm.git;h=2876d1e033ba8cf1b0b1e065189e34588d790f9e commit 2876d1e033ba8cf1b0b1e065189e34588d790f9e Author: Jon Turney Date: Thu Mar 31 22:31:47 2016 +0100 Make sure we report if an error occured reading uploaded packages https://sourceware.org/git/gitweb.cgi?p=cygwin-apps/calm.git;h=268af24f2814ad30b23fc5e40e5c43bf994df444 commit 268af24f2814ad30b23fc5e40e5c43bf994df444 Author: Jon Turney Date: Fri Apr 1 01:13:16 2016 +0100 Remove the !reminder-timestamp file if it's no longer needed If we didn't have to ignore any files because there is no !ready, we no longer need the timestamp of the last reminder about that, so we can remove the !reminder-timestamp file. This means that for a given maintainer, if another instance of missing !ready occurs, the reminder is issued immediately, rather than being delayed until the time the reminder for the first instance was scheduled. Diff: --- abeyance_handler.py | 73 ++++++++++++++++++++++++++++++++++ buffering_smtp_handler.py | 28 +------------ calm.py | 95 +++++++++++++++++++++++++++------------------ common_constants.py | 2 +- package.py | 26 ++++++------ pkg2html.py | 12 +++--- uploads.py | 37 +++++++++++------- 7 files changed, 175 insertions(+), 98 deletions(-) diff --git a/abeyance_handler.py b/abeyance_handler.py new file mode 100644 index 0000000..227de3c --- /dev/null +++ b/abeyance_handler.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 +# +# Copyright (c) 2016 Jon Turney +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import logging +from logging.handlers import BufferingHandler + + +# Loosely based on the "Buffering logging messages and outputting them +# conditionally" example from the python logging cookbook. +# +# AbeyanceHandler holds log output in a BufferingHandler. When closed, it will +# pass all log output of retainLevel or higher to the target logger if any of +# the log output reaches thresholdLevel level, otherwise it discards all log +# output. + +class AbeyanceHandler(BufferingHandler): + def __init__(self, target, thresholdLevel, retainLevel): + BufferingHandler.__init__(self, capacity=0) + self.target = target + self.thresholdLevel = thresholdLevel + + if retainLevel is None: + retainLevel = thresholdLevel + self.setLevel(retainLevel) + + def shouldFlush(self, record): + # the capacity we pass to BufferingHandler is irrelevant since we + # override shouldFlush so it never indicates we have reached capacity + return False + + def close(self): + # if there are any log records of thresholdLevel or higher ... + if len(self.buffer) > 0: + if any([record.levelno >= self.thresholdLevel for record in self.buffer]): + # ... send all records to the target + for record in self.buffer: + self.target.handle(record) + + self.target.close() + + # otherwise, just discard the buffers contents + super().close() + + def __enter__(self): + logging.getLogger().addHandler(self) + return self + + def __exit__(self, exception_type, exception_value, traceback): + self.close() + logging.getLogger().removeHandler(self) + + # process any exception in the with-block normally + return False diff --git a/buffering_smtp_handler.py b/buffering_smtp_handler.py index 2bcffcc..0bf51a4 100644 --- a/buffering_smtp_handler.py +++ b/buffering_smtp_handler.py @@ -20,7 +20,7 @@ # http://www.red-dove.com/python_logging.html # -from contextlib import ExitStack + import logging import logging.handlers import email.message @@ -34,7 +34,7 @@ class BufferingSMTPHandler(logging.handlers.BufferingHandler): subject, mailhost=common_constants.MAILHOST, fromaddr='cygwin-no-reply@cygwin.com', - logging_format='%(message)s'): + logging_format='%(levelname)s: %(message)s'): logging.handlers.BufferingHandler.__init__(self, capacity=0) self.mailhost = mailhost self.mailport = None @@ -42,7 +42,6 @@ class BufferingSMTPHandler(logging.handlers.BufferingHandler): self.toaddrs = toaddrs self.subject = subject self.formatter = logging_format - self.setLevel(logging.WARNING) self.setFormatter(logging.Formatter(logging_format)) def flush(self): @@ -90,26 +89,3 @@ class BufferingSMTPHandler(logging.handlers.BufferingHandler): # the capacity we pass to BufferingHandler is irrelevant since we # override shouldFlush so it never indicates we have reached capacity return False - - def __enter__(self): - logging.getLogger().addHandler(self) - return self - - def __exit__(self, exception_type, exception_value, traceback): - self.close() - logging.getLogger().removeHandler(self) - - # process any exception in the with-block normally - return False - - -# -# we only want to mail the logs if the email option was used -# (otherwise use ExitStack() as a 'do nothing' context) -# - -def mail_logs(enabled, toaddrs, subject): - if enabled: - return BufferingSMTPHandler(toaddrs, subject) - - return ExitStack() diff --git a/calm.py b/calm.py index 0dd001a..a383d46 100755 --- a/calm.py +++ b/calm.py @@ -45,6 +45,7 @@ # write setup.ini file # +from contextlib import ExitStack import argparse import logging import os @@ -52,7 +53,8 @@ import shutil import sys import tempfile -from buffering_smtp_handler import mail_logs +from abeyance_handler import AbeyanceHandler +from buffering_smtp_handler import BufferingSMTPHandler import common_constants import maintainers import package @@ -66,12 +68,13 @@ import uploads # def process_arch(args): + subject = 'calm: cygwin package upload report from %s' % (os.uname()[1]) details = '%s%s' % (args.arch, ',dry-run' if args.dryrun else '') - # send one email per run to leads - with mail_logs(args.email, toaddrs=args.email, subject='calm messages [%s]' % (details)) as leads_email: + # send one email per run to leads, if any errors occurred + with mail_logs(args.email, toaddrs=args.email, subject='%s [%s]' % (subject, details), thresholdLevel=logging.ERROR) as leads_email: if args.dryrun: - logging.warning("--dry-run in effect, nothing will really be done") + logging.warning("--dry-run is in effect, nothing will really be done") # build package list packages = package.read_packages(args.rel_area, args.arch) @@ -92,42 +95,45 @@ def process_arch(args): m = mlist[name] # also send a mail to each maintainer about their packages - with mail_logs(args.email, toaddrs=m.email, subject='calm messages for %s [%s]' % (name, details)) as maint_email: + with mail_logs(args.email, toaddrs=m.email, subject='%s for %s [%s]' % (subject, name, details), thresholdLevel=logging.INFO) as maint_email: (error, mpackages, to_relarea, to_vault, remove_always, remove_success) = uploads.scan(m, all_packages, args) uploads.remove(args, remove_always) + if error: + logging.error("error while reading uploaded packages for %s" % (name)) + continue + # if there are no uploaded packages for this maintainer, we # don't have anything to do if not mpackages: - logging.info("nothing to do for maintainer %s" % (name)) + logging.debug("nothing to do for maintainer %s" % (name)) continue - if not error: - # merge package set - merged_packages = package.merge(packages, mpackages) - - # remove file which are to be removed - # - # XXX: this doesn't properly account for removing setup.hint - # files - for p in to_vault: - for f in to_vault[p]: - package.delete(merged_packages, p, f) - - # validate the package set - if package.validate_packages(args, merged_packages): - # process the move list - uploads.move_to_vault(args, to_vault) - uploads.remove(args, remove_success) - uploads.move_to_relarea(m, args, to_relarea) - # use merged package list - packages = merged_packages - logging.info("added %d packages from maintainer %s" % (len(mpackages), name)) - else: - # otherwise we discard move list and merged_packages - logging.error("error while merging uploaded packages for %s" % (name)) + # merge package set + merged_packages = package.merge(packages, mpackages) + + # remove file which are to be removed + # + # XXX: this doesn't properly account for removing setup.hint + # files + for p in to_vault: + for f in to_vault[p]: + package.delete(merged_packages, p, f) + + # validate the package set + if package.validate_packages(args, merged_packages): + # process the move list + uploads.move_to_vault(args, to_vault) + uploads.remove(args, remove_success) + uploads.move_to_relarea(m, args, to_relarea) + # use merged package list + packages = merged_packages + logging.debug("added %d packages from maintainer %s" % (len(mpackages), name)) + else: + # otherwise we discard move list and merged_packages + logging.error("error while merging uploaded packages for %s" % (name)) # write setup.ini package.write_setup_ini(args, packages) @@ -146,11 +152,11 @@ def process_arch(args): def main(args): # for each arch for arch in common_constants.ARCHES: - logging.info("processing arch %s" % (arch)) + logging.debug("processing arch %s" % (arch)) args.arch = arch args.setup_version = setup_exe.extract_version(os.path.join(args.setupdir, 'setup-' + args.arch + '.exe')) - logging.info("setup version is '%s'" % (args.setup_version)) + logging.debug("setup version is '%s'" % (args.setup_version)) basedir = os.path.join(args.rel_area, args.arch) inifile = os.path.join(basedir, 'setup.ini') @@ -170,7 +176,7 @@ def main(args): else: # or, if it's changed in more than timestamp status = os.system('/usr/bin/diff -I^setup-timestamp -w -B -q %s %s >/dev/null' % (inifile, tmpfile.name)) - logging.info('diff exit status %d' % (status)) + logging.debug('diff exit status %d' % (status)) if (status >> 8) == 1: changed = True @@ -184,7 +190,7 @@ def main(args): shutil.copy2(inifile, inifile + '.bak') # replace setup.ini - logging.warning("moving %s to %s" % (tmpfile.name, inifile)) + logging.info("moving %s to %s" % (tmpfile.name, inifile)) shutil.move(tmpfile.name, inifile) # compress and re-sign @@ -208,11 +214,23 @@ def main(args): except FileNotFoundError: pass else: - logging.info("removing %s, unchanged %s" % (tmpfile.name, inifile)) + logging.debug("removing %s, unchanged %s" % (tmpfile.name, inifile)) os.remove(tmpfile.name) # +# we only want to mail the logs if the email option was used +# (otherwise use ExitStack() as a 'do nothing' context) +# + +def mail_logs(enabled, toaddrs, subject, thresholdLevel, retainLevel=None): + if enabled: + return AbeyanceHandler(BufferingSMTPHandler(toaddrs, subject), thresholdLevel, retainLevel) + + return ExitStack() + + +# # # @@ -250,7 +268,7 @@ if __name__ == "__main__": rfh = logging.handlers.RotatingFileHandler(os.path.join(args.logdir, 'calm.log'), backupCount=48) rfh.doRollover() # force a rotate on every run rfh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)-8s - %(message)s')) - rfh.setLevel(logging.INFO) + rfh.setLevel(logging.DEBUG) logging.getLogger().addHandler(rfh) # setup logging to stdout, of WARNING messages or higher (INFO if verbose) @@ -262,8 +280,9 @@ if __name__ == "__main__": ch.setLevel(logging.WARNING) logging.getLogger().addHandler(ch) - # change root logger level from the default of WARNING - logging.getLogger().setLevel(logging.INFO) + # change root logger level from the default of WARNING to NOTSET so it + # doesn't filter out any log messages due to level + logging.getLogger().setLevel(logging.NOTSET) if args.email: args.email = args.email.split(',') diff --git a/common_constants.py b/common_constants.py index e40c346..de4bf46 100644 --- a/common_constants.py +++ b/common_constants.py @@ -35,7 +35,7 @@ HOMEDIR = '/sourceware/cygwin-staging/home' # the 'release area', contains all released files, which are rsync'ed to mirrors FTP = '/var/ftp/pub/cygwin' -# logs are always emailed to these addresses +# logs are emailed to these addresses if any errors occurred EMAILS = ','.join(map(lambda m: m + '@sourceware.org', ['corinna', 'yselkowitz', 'jturney'])) # for testing purposes, every email we send is bcc'd to these addresses diff --git a/package.py b/package.py index b1ca42e..8e6a405 100755 --- a/package.py +++ b/package.py @@ -75,12 +75,12 @@ def read_packages(rel_area, arch): packages = defaultdict(Package) releasedir = os.path.join(rel_area, arch) - logging.info('reading packages from %s' % releasedir) + logging.debug('reading packages from %s' % releasedir) for (dirpath, subdirs, files) in os.walk(releasedir): read_package(packages, releasedir, dirpath, files) - logging.info("%d packages read" % len(packages)) + logging.debug("%d packages read" % len(packages)) return packages @@ -110,7 +110,7 @@ def read_package(packages, basedir, dirpath, files, strict=False): p = os.path.basename(dirpath) if not re.match(r'^[\w\-._+]*$', p): - logging.error("package name contains illegal characters" % p) + logging.error("package '%s' name contains illegal characters" % p) return True # check for duplicate package names at different paths @@ -133,7 +133,7 @@ def read_package(packages, basedir, dirpath, files, strict=False): # read sha512.sum sha512 = {} if 'sha512.sum' not in files: - logging.info("no sha512.sum for package '%s'" % p) + logging.debug("no sha512.sum for package '%s'" % p) else: files.remove('sha512.sum') @@ -182,7 +182,7 @@ def read_package(packages, basedir, dirpath, files, strict=False): tars[f].sha512 = sha512[f] else: tars[f].sha512 = sha512_file(os.path.join(dirpath, f)) - logging.info("no sha512.sum line for file %s in package '%s', computed sha512 hash is %s" % (f, p, tars[f].sha512)) + logging.debug("no sha512.sum line for file %s in package '%s', computed sha512 hash is %s" % (f, p, tars[f].sha512)) # warn about unexpected files, including tarfiles which don't match the # package name @@ -218,7 +218,7 @@ def read_package(packages, basedir, dirpath, files, strict=False): warnings = True elif (len(files) > 0) and (relpath.count(os.path.sep) > 0): - logging.warning("no setup.hint in %s but files: %s" % (dirpath, ', '.join(files))) + logging.warning("no setup.hint in %s but has files: %s" % (dirpath, ', '.join(files))) if strict: return warnings @@ -349,7 +349,7 @@ def validate_packages(args, packages): if len(levels) == 0: # XXX: versions which don't correspond to any stability level # should be reported, we might want to remove them at some point - logging.debug("package '%s' has no stability levels left for version '%s'" % (p, v)) + logging.log(5, "package '%s' has no stability levels left for version '%s'" % (p, v)) break l = levels[0] @@ -360,7 +360,7 @@ def validate_packages(args, packages): if v != packages[p].hints[l]: break else: - logging.debug("package '%s' stability '%s' override to version '%s'" % (p, l, v)) + logging.debug("package '%s' stability '%s' overridden to version '%s'" % (p, l, v)) else: # level 'test' must be assigned by override if l == 'test': @@ -369,7 +369,7 @@ def validate_packages(args, packages): continue level_found = True - logging.debug("package '%s' stability '%s' assigned version '%s'" % (p, l, v)) + logging.log(5, "package '%s' stability '%s' assigned version '%s'" % (p, l, v)) break if not level_found: @@ -472,7 +472,7 @@ def validate_package_maintainers(args, packages): if '_obsolete' in packages[p].hints['category']: continue if not is_in_package_list(packages[p].path, all_packages): - logging.warning("package '%s' is not in the package list" % (p)) + logging.error("package '%s' is not in the package list" % (p)) # @@ -480,7 +480,7 @@ def validate_package_maintainers(args, packages): # def write_setup_ini(args, packages): - logging.info('writing %s' % (args.inifile)) + logging.debug('writing %s' % (args.inifile)) with open(args.inifile, 'w') as f: os.fchmod(f.fileno(), 0o644) @@ -584,11 +584,11 @@ def merge(a, b): else: # package must exist at same relative path if a[p].path != b[p].path: - logging.error("package '%s' at paths %s and %s" % (p, a[p].path, b[p].path)) + logging.error("package '%s' is at paths %s and %s" % (p, a[p].path, b[p].path)) else: for t in b[p].tars: if t in c[p].tars: - logging.error("package '%s' duplicate tarfile %s" % (p, t)) + logging.error("package '%s' has duplicate tarfile %s" % (p, t)) else: c[p].tars[t] = b[p].tars[t] diff --git a/pkg2html.py b/pkg2html.py index e76fa7a..171d618 100755 --- a/pkg2html.py +++ b/pkg2html.py @@ -73,7 +73,7 @@ def update_package_listings(args, packages): htaccess = os.path.join(base, '.htaccess') if not os.path.exists(htaccess) or args.force: - logging.info('writing %s' % htaccess) + logging.debug('writing %s' % htaccess) if not args.dryrun: with open(htaccess, 'w') as f: @@ -102,7 +102,7 @@ def update_package_listings(args, packages): htaccess = os.path.join(dir, '.htaccess') if not os.path.exists(htaccess): - logging.info('writing %s' % htaccess) + logging.debug('writing %s' % htaccess) if not args.dryrun or args.force: with open(htaccess, 'w') as f: @@ -127,7 +127,7 @@ def update_package_listings(args, packages): # ... if it doesn't already exist, or force if not os.path.exists(html) or args.force: - logging.info('writing %s' % html) + logging.debug('writing %s' % html) if not args.dryrun: with open(html, 'w') as f: @@ -167,7 +167,7 @@ def update_package_listings(args, packages): '''), file=f) else: - logging.debug('not writing %s, already exists' % html) + logging.log(5, 'not writing %s, already exists' % html) # this file should exist, so remove from the toremove list if html in toremove: @@ -178,7 +178,7 @@ def update_package_listings(args, packages): # packages_inc = os.path.join(base, 'packages.inc') - logging.info('writing %s' % packages_inc) + logging.debug('writing %s' % packages_inc) if not args.dryrun: with open(packages_inc, 'w') as index: os.fchmod(index.fileno(), 0o755) @@ -210,7 +210,7 @@ def update_package_listings(args, packages): # for r in toremove: - logging.info('rm %s' % r) + logging.debug('rm %s' % r) if not args.dryrun: os.unlink(r) diff --git a/uploads.py b/uploads.py index 031e5c2..7aac67e 100644 --- a/uploads.py +++ b/uploads.py @@ -54,14 +54,14 @@ def scan(m, all_packages, args): error = False mtimes = [('', 0)] - logging.info('reading packages from %s' % (basedir)) + logging.debug('reading packages from %s' % (basedir)) # note mtime of any !ready file at top-level for ready in [os.path.join(basedir, '!ready'), os.path.join(basedir, 'release', '!ready')]: if os.path.exists(ready): mtime = os.path.getmtime(ready) mtimes.append(('', mtime)) - logging.info('processing files with mtime older than %d' % (mtime)) + logging.debug('processing files with mtime older than %d' % (mtime)) remove.append(ready) # the mtime of this file indicates when 'ignoring as there is no !ready' @@ -71,6 +71,7 @@ def scan(m, all_packages, args): reminder_time = os.path.getmtime(reminder_file) else: reminder_time = 0 + reminders = False logging.debug("reminder-timestamp %d, interval %d, next reminder %d, current time %d" % (reminder_time, REMINDER_INTERVAL, reminder_time + REMINDER_INTERVAL, time.time())) # scan package directories @@ -81,7 +82,7 @@ def scan(m, all_packages, args): if (not files) or (relpath == 'release'): continue - logging.info('reading uploads from %s' % dirpath) + logging.debug('reading uploads from %s' % dirpath) # note the mtime of the !ready file if '!ready' in files: @@ -90,7 +91,7 @@ def scan(m, all_packages, args): mtimes.append((relpath + '/', mtime)) remove.append(ready) files.remove('!ready') - logging.info("processing files below '%s' with mtime older than %d" % (relpath, mtime)) + logging.debug("processing files below '%s' with mtime older than %d" % (relpath, mtime)) else: # otherwise work back up a list of (path,mtimes) (which should be in # shortest-to-longest order, since os.walk() walks the tree @@ -98,7 +99,7 @@ def scan(m, all_packages, args): while True: (path, mtime) = mtimes[-1] if relpath.startswith(path): - logging.info("using mtime %d from subpath '%s' of '%s'" % (mtime, path, relpath)) + logging.debug("using mtime %d from subpath '%s' of '%s'" % (mtime, path, relpath)) break else: mtimes.pop() @@ -117,7 +118,7 @@ def scan(m, all_packages, args): for f in sorted(files): fn = os.path.join(dirpath, f) rel_fn = os.path.join(relpath, f) - logging.info("processing %s" % rel_fn) + logging.debug("processing %s" % rel_fn) # ignore !packages (which we no longer use) # ignore !mail and !email (which we have already read) @@ -128,6 +129,7 @@ def scan(m, all_packages, args): # only process files newer than !ready if os.path.getmtime(fn) > mtime: if mtime == 0: + reminders = True lvl = logging.INFO # if more than REMINDER_INTERVAL has elapsed since we warned @@ -152,16 +154,16 @@ def scan(m, all_packages, args): if os.path.isfile(dest): if f != 'setup.hint': if filecmp.cmp(dest, fn, shallow=False): - logging.warning("identical %s already in release area, ignoring" % fn) + logging.info("ignoring, identical %s is already in release area" % fn) else: - logging.error("different %s already in release area, ignoring (perhaps you should rebuild with a different version-release identifier?)" % fn) + logging.error("ignoring, different %s is already in release area (perhaps you should rebuild with a different version-release identifier?)" % fn) error = True files.remove(f) else: if filecmp.cmp(dest, fn, shallow=False): - logging.info("identical %s already in release area" % fn) + logging.debug("identical %s is already in release area" % fn) else: - logging.warning("replacing different %s already in release area" % fn) + logging.warning("replacing, different %s is already in release area" % fn) # we always consider setup.hint, as we can't have a valid package without it move[relpath].append(f) else: @@ -173,6 +175,13 @@ def scan(m, all_packages, args): if package.read_package(packages, basedir, dirpath, files, strict=True): error = True + # if we didn't need to check the reminder timestamp, it can be reset + if not reminders and not args.dryrun: + try: + os.remove(reminder_file) + except FileNotFoundError: + pass + return (error, packages, move, vault, remove, remove_success) @@ -191,7 +200,7 @@ def touch(fn, times=None): def remove(args, remove): for f in remove: - logging.info("rm %s", f) + logging.debug("rm %s", f) if not args.dryrun: os.unlink(f) @@ -202,16 +211,16 @@ def remove(args, remove): def move(args, movelist, fromdir, todir): for p in sorted(movelist): - logging.info("mkdir %s" % os.path.join(todir, p)) + logging.debug("mkdir %s" % os.path.join(todir, p)) if not args.dryrun: try: os.makedirs(os.path.join(todir, p), exist_ok=True) except FileExistsError: pass - logging.warning("move from '%s' to '%s':" % (os.path.join(fromdir, p), os.path.join(todir, p))) + logging.info("move from '%s' to '%s':" % (os.path.join(fromdir, p), os.path.join(todir, p))) for f in sorted(movelist[p]): if os.path.exists(os.path.join(fromdir, p, f)): - logging.warning("%s" % (f)) + logging.info("%s" % (f)) if not args.dryrun: os.rename(os.path.join(fromdir, p, f), os.path.join(todir, p, f)) else: