Commit 2212039d authored by jvoisin's avatar jvoisin

Make the logging more pep-282 compliant

See https://www.python.org/dev/peps/pep-0282/ for details,
but basically this is commit leaves the string
replacement to the logging function, instead of
doing it in place in its parameters with the '%' operator.
parent 9ea3c2cb
......@@ -87,25 +87,23 @@ class ZipStripper(GenericArchiveStripper):
ret_list = []
zipin = zipfile.ZipFile(self.filename, 'r')
if zipin.comment != '' and not list_unsupported:
logging.debug('%s has a comment' % self.filename)
logging.debug('%s has a comment', self.filename)
return False
for item in zipin.infolist():
zipin.extract(item, self.tempdir)
path = os.path.join(self.tempdir, item.filename)
if not self.__is_zipfile_clean(item) and not list_unsupported:
logging.debug('%s from %s has compromising zipinfo' %
(item.filename, self.filename))
logging.debug('%s from %s has compromising zipinfo', item.filename, self.filename)
return False
if os.path.isfile(path):
cfile = mat.create_class_file(path, False, add2archive=self.add2archive)
if cfile is not None:
if not cfile.is_clean():
logging.debug('%s from %s has metadata' % (item.filename, self.filename))
logging.debug('%s from %s has metadata', item.filename, self.filename)
if not list_unsupported:
return False
else:
logging.info('%s\'s fileformat is not supported or harmless.'
% item.filename)
logging.info('%s\'s fileformat is not supported or harmless.', item.filename)
basename, ext = os.path.splitext(path)
if os.path.basename(item.filename) not in ('mimetype', '.rels'):
if ext not in parser.NOMETA:
......@@ -136,8 +134,7 @@ class ZipStripper(GenericArchiveStripper):
if cfile_meta != {}:
metadata[item.filename] = str(cfile_meta)
else:
logging.info('%s\'s fileformat is not supported or harmless'
% item.filename)
logging.info('%s\'s fileformat is not supported or harmless', item.filename)
zipin.close()
return metadata
......@@ -188,9 +185,9 @@ class ZipStripper(GenericArchiveStripper):
os.chmod(path, old_stat | stat.S_IWUSR)
cfile.remove_all()
os.chmod(path, old_stat)
logging.debug('Processing %s from %s' % (item.filename, self.filename))
logging.debug('Processing %s from %s', item.filename, self.filename)
elif item.filename not in whitelist:
logging.info('%s\'s format is not supported or harmless' % item.filename)
logging.info("%s's format is not supported or harmless", item.filename)
basename, ext = os.path.splitext(path)
if not (self.add2archive or ext in parser.NOMETA):
continue
......@@ -205,7 +202,7 @@ class ZipStripper(GenericArchiveStripper):
zipin.close()
zipout.close()
logging.info('%s processed' % self.filename)
logging.info('%s processed', self.filename)
self.do_backup()
return True
......@@ -248,12 +245,11 @@ class TarStripper(GenericArchiveStripper):
cfile.remove_all()
os.chmod(path, old_stat)
elif self.add2archive or os.path.splitext(item.name)[1] in parser.NOMETA:
logging.debug('%s\' format is either not supported or harmless' % item.name)
logging.debug("%s' format is either not supported or harmless", item.name)
elif item.name in whitelist:
logging.debug('%s is not supported, but MAT was told to add it anyway.'
% item.name)
logging.debug('%s is not supported, but MAT was told to add it anyway.', item.name)
else: # Don't add the file to the archive
logging.debug('%s will not be added' % item.name)
logging.debug('%s will not be added', item.name)
continue
tarout.add(unicode(path.decode('utf-8')),
unicode(item.name.decode('utf-8')),
......@@ -291,8 +287,7 @@ class TarStripper(GenericArchiveStripper):
tarin = tarfile.open(self.filename, 'r' + self.compression)
for item in tarin.getmembers():
if not self.is_file_clean(item) and not list_unsupported:
logging.debug('%s from %s has compromising tarinfo' %
(item.name, self.filename))
logging.debug('%s from %s has compromising tarinfo', item.name, self.filename)
return False
tarin.extract(item, self.tempdir)
path = os.path.join(self.tempdir, item.name)
......@@ -300,15 +295,14 @@ class TarStripper(GenericArchiveStripper):
cfile = mat.create_class_file(path, False, add2archive=self.add2archive)
if cfile is not None:
if not cfile.is_clean():
logging.debug('%s from %s has metadata' %
(item.name.decode("utf8"), self.filename))
logging.debug('%s from %s has metadata', item.name.decode("utf8"), self.filename)
if not list_unsupported:
return False
# Nested archives are treated like unsupported files
elif isinstance(cfile, GenericArchiveStripper):
ret_list.append(item.name)
else:
logging.error('%s\'s format is not supported or harmless' % item.name)
logging.error("%s's format is not supported or harmless", item.name)
if os.path.splitext(path)[1] not in parser.NOMETA:
if not list_unsupported:
return False
......@@ -334,7 +328,7 @@ class TarStripper(GenericArchiveStripper):
if meta:
current_meta['file'] = str(meta)
else:
logging.error('%s\'s format is not supported or harmless' % item.name)
logging.error("%s's format is not supported or harmless", item.name)
if not self.is_file_clean(item): # if there is meta
current_meta['mtime'] = item.mtime
......
......@@ -119,7 +119,7 @@ def secure_remove(filename):
try: # I want the file removed, even if it's read-only
os.chmod(filename, 220)
except OSError:
logging.error('Unable to add write rights to %s' % filename)
logging.error('Unable to add write rights to %s', filename)
raise libmat.exceptions.UnableToWriteFile
try:
......@@ -131,12 +131,12 @@ def secure_remove(filename):
else:
raise OSError
except OSError:
logging.error('Unable to securely remove %s' % filename)
logging.error('Unable to securely remove %s', filename)
try:
os.remove(filename)
except OSError:
logging.error('Unable to remove %s' % filename)
logging.error('Unable to remove %s', filename)
raise libmat.exceptions.UnableToRemoveFile
return True
......@@ -150,13 +150,13 @@ def create_class_file(name, backup, **kwargs):
:param bool backup: shell the file be backuped?
"""
if not os.path.isfile(name): # check if the file exists
logging.error('%s is not a valid file' % name)
logging.error('%s is not a valid file', name)
return None
elif not os.access(name, os.R_OK): # check read permissions
logging.error('%s is is not readable' % name)
logging.error('%s is is not readable', name)
return None
elif not os.path.getsize(name): # check if the file is not empty (hachoir crash on empty files)
logging.error('%s is empty' % name)
logging.error('%s is empty', name)
return None
try:
......@@ -166,11 +166,11 @@ def create_class_file(name, backup, **kwargs):
parser = hachoir_parser.createParser(filename)
if not parser:
logging.info('Unable to parse %s with hachoir' % filename)
logging.info('Unable to parse %s with hachoir', filename)
mime = mimetypes.guess_type(name)[0]
if not mime:
logging.info('Unable to find mimetype of %s' % filename)
logging.info('Unable to find mimetype of %s', filename)
return None
if mime.startswith('application/vnd.oasis.opendocument'):
......@@ -183,7 +183,7 @@ def create_class_file(name, backup, **kwargs):
try:
stripper_class = strippers.STRIPPERS[mime]
except KeyError:
logging.info('Don\'t have stripper for %s format' % mime)
logging.info('Don\'t have stripper for %s format', mime)
return None
return stripper_class(filename, parser, mime, backup, is_writable, **kwargs)
......@@ -44,7 +44,7 @@ class OpenDocumentStripper(archive.TerminalZipStripper):
# method to get all attributes of a node
pass
except KeyError: # no meta.xml file found
logging.debug('%s has no opendocument metadata' % self.filename)
logging.debug('%s has no opendocument metadata', self.filename)
zipin.close()
return metadata
......@@ -153,7 +153,7 @@ class PdfStripper(parser.GenericParser):
surface = cairo.PDFSurface(output, 10, 10)
context = cairo.Context(surface) # context draws on the surface
logging.debug('PDF rendering of %s' % self.filename)
logging.debug('PDF rendering of %s', self.filename)
for pagenum in range(document.get_n_pages()):
page = document.get_page(pagenum)
page_width, page_height = page.get_size()
......@@ -168,13 +168,13 @@ class PdfStripper(parser.GenericParser):
surface.finish()
shutil.move(output, self.output)
except:
logging.error('Something went wrong when cleaning %s.' % self.filename)
logging.error('Something went wrong when cleaning %s.', self.filename)
return False
try:
import pdfrw # For now, poppler cannot write meta, so we must use pdfrw
logging.debug('Removing %s\'s superficial metadata' % self.filename)
logging.debug('Removing %s\'s superficial metadata', self.filename)
trailer = pdfrw.PdfReader(self.output)
trailer.Info.Producer = None
trailer.Info.Creator = None
......@@ -183,7 +183,7 @@ class PdfStripper(parser.GenericParser):
writer.write(self.output)
self.do_backup()
except:
logging.error('Unable to remove all metadata from %s, please install pdfrw' % self.output)
logging.error('Unable to remove all metadata from %s, please install pdfrw', self.output)
return False
return True
......
......@@ -36,17 +36,17 @@ class MatExtension(GObject.GObject, Nautilus.MenuProvider):
# We're only going to put ourselves on supported mimetypes' context menus
if not (file.get_mime_type()
in [i["mimetype"] for i in libmat.mat.list_supported_formats()]):
logging.debug("%s is not supported by MAT" % file.get_mime_type())
logging.debug("%s is not supported by MAT", file.get_mime_type())
return
# MAT can only handle local file:
if file.get_uri_scheme() != 'file':
logging.debug("%s files not supported by MAT" % file.get_uri_scheme())
logging.debug("%s files not supported by MAT", file.get_uri_scheme())
return
# MAT can not clean non-writable files
if not file.can_write():
logging.debug("%s is not writable by MAT" % file.get_uri_scheme())
logging.debug("%s is not writable by MAT", file.get_uri_scheme())
return
item = Nautilus.MenuItem(name="Nautilus::clean_metadata",
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment