Fix logger formating issues

This commit is contained in:
Cédric Leporcq 2021-10-16 19:29:52 +02:00
parent a93e7accc0
commit 8f27a84571
9 changed files with 87 additions and 141 deletions

103
ordigi.py
View File

@ -11,8 +11,8 @@ from ordigi import constants
from ordigi import log
from ordigi.collection import Collection
from ordigi.geolocation import GeoLocation
from ordigi.media import Media, get_all_subclasses
from ordigi.summary import Summary
# from ordigi.media import Media, get_all_subclasses
# from ordigi.summary import Summary
_logger_options = [
@ -160,9 +160,8 @@ def sort(**kwargs):
according to ordigi.conf preferences.
"""
debug = kwargs['debug']
destination = kwargs['destination']
verbose = kwargs['verbose']
log_level = log.level(kwargs['verbose'], kwargs['debug'])
paths = kwargs['paths']
@ -171,7 +170,7 @@ def sort(**kwargs):
else:
mode = 'move'
logger = log.get_logger(verbose, debug)
logger = log.get_logger(level=log_level)
max_deep = kwargs['max_deep']
if max_deep is not None:
@ -219,42 +218,22 @@ def sort(**kwargs):
kwargs['use_file_dates'],
)
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
summary, result = collection.sort_files(
paths, loc, kwargs['remove_duplicates'], kwargs['ignore_tags']
)
if kwargs['clean']:
remove_empty_folders(destination, logger)
collection.remove_empty_folders(destination)
if verbose or debug:
if log_level < 30:
summary.print()
if not result:
sys.exit(1)
def remove_empty_folders(path, logger, remove_root=True):
'Function to remove empty folders'
if not os.path.isdir(path):
return
# remove empty subfolders
files = os.listdir(path)
if len(files):
for f in files:
fullpath = os.path.join(path, f)
if os.path.isdir(fullpath):
remove_empty_folders(fullpath, logger)
# if folder empty, delete it
files = os.listdir(path)
if len(files) == 0 and remove_root:
logger.info(f"Removing empty folder: {path}")
os.rmdir(path)
@click.command('clean')
@add_options(_logger_options)
@add_options(_dry_run_options)
@ -297,15 +276,14 @@ def clean(**kwargs):
"""Remove empty folders
Usage: clean [--verbose|--debug] directory [removeRoot]"""
debug = kwargs['debug']
dry_run = kwargs['dry_run']
folders = kwargs['folders']
log_level = log.level(kwargs['verbose'], kwargs['debug'])
root = kwargs['root']
verbose = kwargs['verbose']
path = kwargs['path']
logger = log.get_logger(verbose, debug)
logger = log.get_logger(level=log_level)
clean_all = False
if not folders:
clean_all = True
@ -318,27 +296,28 @@ def clean(**kwargs):
exclude = _get_exclude(opt, kwargs['exclude'])
filter_by_ext = set(kwargs['filter_by_ext'])
collection = Collection(
root,
opt['path_format'],
dry_run=dry_run,
exclude=exclude,
filter_by_ext=filter_by_ext,
glob=kwargs['glob'],
logger=logger,
max_deep=kwargs['max_deep'],
mode='move',
)
if kwargs['path_string']:
collection = Collection(
root,
opt['path_format'],
dry_run=dry_run,
exclude=exclude,
filter_by_ext=filter_by_ext,
glob=kwargs['glob'],
logger=logger,
max_deep=kwargs['max_deep'],
mode='move',
)
dedup_regex = list(kwargs['dedup_regex'])
summary, result = collection.dedup_regex(
path, dedup_regex, kwargs['remove_duplicates']
)
if clean_all or folders:
remove_empty_folders(path, logger)
collection.remove_empty_folders(path)
if verbose or debug:
if log_level < 30:
summary.print()
if not result:
@ -352,13 +331,14 @@ def init(**kwargs):
"""Regenerate the hash.json database which contains all of the sha256 signatures of media files."""
config = Config(constants.CONFIG_FILE)
opt = config.get_options()
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
debug = kwargs['debug']
verbose = kwargs['verbose']
logger = log.get_logger(debug, verbose)
log_level = log.level(kwargs['verbose'], kwargs['debug'])
logger = log.get_logger(level=log_level)
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
collection = Collection(kwargs['path'], None, mode='move', logger=logger)
summary = collection.init(loc)
if verbose or debug:
if log_level < 30:
summary.print()
@ -369,13 +349,14 @@ def update(**kwargs):
"""Regenerate the hash.json database which contains all of the sha256 signatures of media files."""
config = Config(constants.CONFIG_FILE)
opt = config.get_options()
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
debug = kwargs['debug']
verbose = kwargs['verbose']
logger = log.get_logger(debug, verbose)
log_level = log.level(kwargs['verbose'], kwargs['debug'])
logger = log.get_logger(level=log_level)
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
collection = Collection(kwargs['path'], None, mode='move', logger=logger)
summary = collection.update(loc)
if verbose or debug:
if log_level < 30:
summary.print()
@ -384,14 +365,13 @@ def update(**kwargs):
@click.argument('path', required=True, nargs=1, type=click.Path())
def check(**kwargs):
"""check db and verify hashes"""
debug = kwargs['debug']
verbose = kwargs['verbose']
logger = log.get_logger(debug, verbose)
log_level = log.level(kwargs['verbose'], kwargs['debug'])
logger = log.get_logger(level=log_level)
collection = Collection(kwargs['path'], None, mode='move', logger=logger)
result = collection.check_db()
if result:
summary, result = collection.check_files()
if verbose or debug:
if log_level < 30:
summary.print()
if not result:
sys.exit(1)
@ -443,14 +423,13 @@ def check(**kwargs):
def compare(**kwargs):
'''Compare files in directories'''
debug = kwargs['debug']
dry_run = kwargs['dry_run']
log_level = log.level(kwargs['verbose'], kwargs['debug'])
root = kwargs['root']
verbose = kwargs['verbose']
path = kwargs['path']
logger = log.get_logger(verbose, debug)
logger = log.get_logger(level=log_level)
if not root:
root = kwargs['path']
@ -476,7 +455,7 @@ def compare(**kwargs):
else:
summary, result = collection.sort_similar_images(path, kwargs['similarity'])
if verbose or debug:
if log_level < 30:
summary.print()
if not result:

View File

@ -17,7 +17,7 @@ import shutil
from ordigi import media
from ordigi.database import Sqlite
from ordigi.media import Media, get_all_subclasses
from ordigi.media import Media
from ordigi.images import Image, Images
from ordigi import request
from ordigi.summary import Summary
@ -71,7 +71,7 @@ class Collection:
self.glob = glob
self.items = self.get_items()
self.interactive = interactive
self.logger = logger
self.logger = logger.getChild(self.__class__.__name__)
self.max_deep = max_deep
self.mode = mode
# List to store media metadata
@ -882,6 +882,25 @@ class Collection:
return self.summary, record
def remove_empty_folders(path, remove_root=True):
'Function to remove empty folders'
if not os.path.isdir(path):
return
# remove empty subfolders
files = os.listdir(path)
if len(files):
for f in files:
fullpath = os.path.join(path, f)
if os.path.isdir(fullpath):
self.remove_empty_folders(fullpath)
# if folder empty, delete it
files = os.listdir(path)
if len(files) == 0 and remove_root:
self.logger.info(f"Removing empty folder: {path}")
os.rmdir(path)
def move_file(self, img_path, dest_path):
if not self.dry_run:
shutil.move(img_path, dest_path)

View File

@ -61,7 +61,7 @@ class _ExifToolProc:
"""construct _ExifToolProc singleton object or return instance of already created object
exiftool: optional path to exiftool binary (if not provided, will search path to find it)"""
self.logger = logger
self.logger = logger.getChild(self.__class__.__name__)
if hasattr(self, "_process_running") and self._process_running:
# already running
if exiftool is not None and exiftool != self._exiftool:

View File

@ -15,10 +15,12 @@ class GeoLocation:
def __init__(
self,
geocoder='Nominatim',
logger=logging.getLogger(),
prefer_english_names=False,
timeout=options.default_timeout,
):
self.geocoder = geocoder
self.logger = logger.getChild(self.__class__.__name__)
self.prefer_english_names = prefer_english_names
self.timeout = timeout
@ -46,9 +48,7 @@ class GeoLocation:
return None
def place_name(
self, lat, lon, logger=logging.getLogger(), timeout=options.default_timeout
):
def place_name(self, lat, lon, timeout=options.default_timeout):
lookup_place_name_default = {'default': None}
if lat is None or lon is None:
return lookup_place_name_default
@ -62,7 +62,7 @@ class GeoLocation:
lookup_place_name = {}
geocoder = self.geocoder
if geocoder == 'Nominatim':
geolocation_info = self.lookup_osm(lat, lon, logger, timeout)
geolocation_info = self.lookup_osm(lat, lon, timeout)
else:
raise NameError(geocoder)
@ -83,9 +83,7 @@ class GeoLocation:
return lookup_place_name
def lookup_osm(
self, lat, lon, logger=logging.getLogger(), timeout=options.default_timeout
):
def lookup_osm( self, lat, lon, timeout=options.default_timeout):
try:
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
@ -100,9 +98,9 @@ class GeoLocation:
else:
return None
except geopy.exc.GeocoderUnavailable or geopy.exc.GeocoderServiceError as e:
logger.error(e)
self.logger.error(e)
return None
# Fix *** TypeError: `address` must not be None
except (TypeError, ValueError) as e:
logger.error(e)
self.logger.error(e)
return None

View File

@ -23,7 +23,7 @@ try:
# Allow to open HEIF/HEIC image from pillow
register_heif_opener()
except ImportError as e:
logging.info(e)
pass
class Image:
@ -94,7 +94,9 @@ class Images:
self.images = images
self.duplicates = []
self.hash_size = hash_size
self.logger = logger
self.logger = logger.getChild(self.__class__.__name__)
if PYHEIF == False:
self.logger.info("No module named 'pyheif_pillow_opener'")
def add_images(self, file_paths):
for img_path in file_paths:

View File

@ -1,16 +1,17 @@
import logging
def get_logger(verbose, debug):
def level(verbose, debug):
if debug:
level = logging.DEBUG
return logging.DEBUG
elif verbose:
level = logging.INFO
else:
level = logging.WARNING
return logging.INFO
return logging.WARNING
def get_logger(name='ordigi', level=30):
logging.basicConfig(format='%(levelname)s:%(message)s', level=level)
logging.getLogger('asyncio').setLevel(level)
logger = logging.getLogger('ordigi')
logger.level = level
logger = logging.getLogger(name)
return logger

View File

@ -2,6 +2,7 @@
Media :class:`Media` class to get file metadata
"""
from dateutil.parser import parse
import inquirer
import logging
import mimetypes
@ -9,10 +10,6 @@ import os
import re
import sys
# import pprint
# load modules
from dateutil.parser import parse
from ordigi.exiftool import ExifTool, ExifToolCaching
from ordigi import utils
from ordigi import request
@ -54,7 +51,7 @@ class Media:
self.exif_metadata = None
self.ignore_tags = ignore_tags
self.interactive = interactive
self.logger = logger
self.logger = logger.getChild(self.__class__.__name__)
self.metadata = None
self.tags_keys = self.get_tags()
self.use_date_filename = use_date_filename
@ -453,7 +450,7 @@ class Media:
self.metadata[key] = None
place_name = loc.place_name(
self.metadata['latitude'], self.metadata['longitude'], self.logger
self.metadata['latitude'], self.metadata['longitude']
)
for key in ('city', 'state', 'country', 'default'):
# mask = 'city'
@ -496,23 +493,6 @@ class Media:
return False
@classmethod
def get_class_by_file(
cls, _file, classes, ignore_tags=set(), logger=logging.getLogger()
):
"""Static method to get a media object by file."""
if not os.path.isfile(_file):
return None
extension = os.path.splitext(_file)[1][1:].lower()
if len(extension) > 0:
for i in classes:
if extension in i.extensions:
return i(_file, ignore_tags=ignore_tags, logger=logger)
return Media(_file, logger, ignore_tags=ignore_tags, logger=logger)
def set_value(self, tag, value):
"""Set value of a tag.
@ -520,7 +500,7 @@ class Media:
"""
return ExifTool(self.file_path, logger=self.logger).setvalue(tag, value)
def set_date_media(self, date_key, time):
def set_date_media(self, time):
"""Set the date/time a photo was taken.
:param datetime time: datetime object of when the photo was taken
@ -570,35 +550,3 @@ class Media:
return self.set_value('album', self.file_path.parent.name)
def get_all_subclasses(cls=None):
"""Module method to get all subclasses of Media."""
subclasses = set()
this_class = Media
if cls is not None:
this_class = cls
subclasses.add(this_class)
this_class_subclasses = this_class.__subclasses__()
for child_class in this_class_subclasses:
subclasses.update(get_all_subclasses(child_class))
return subclasses
def get_media_class(_file, ignore_tags=set(), logger=logging.getLogger()):
if not os.path.exists(_file):
logger.warning(f'Could not find {_file}')
logger.error(f'Could not find {_file}')
return False
media = Media.get_class_by_file(
_file, get_all_subclasses(), ignore_tags=set(), logger=logger
)
if not media:
logger.warning(f'File{_file} is not supported')
logger.error(f'File {_file} can\'t be imported')
return False
return media

View File

@ -26,7 +26,7 @@ class TestCollection:
def setup_class(cls, sample_files_paths):
cls.src_path, cls.file_paths = sample_files_paths
cls.path_format = constants.default_path + '/' + constants.default_name
cls.logger = log.get_logger(True, True)
cls.logger = log.get_logger(level=10)
def teardown_class(self):
terminate_exiftool()

View File

@ -8,7 +8,6 @@ import tempfile
from ordigi import constants
from ordigi.media import Media
from ordigi.images import Images
from ordigi.exiftool import ExifTool, ExifToolCaching
from ordigi.utils import get_date_from_string