User LOG global variable for logging
This commit is contained in:
parent
506869ca4f
commit
a9913e61d9
|
@ -0,0 +1,3 @@
|
|||
from ordigi import log
|
||||
|
||||
LOG = log.get_logger()
|
|
@ -7,23 +7,17 @@ import sys
|
|||
|
||||
import click
|
||||
|
||||
from ordigi import LOG
|
||||
from ordigi.config import Config
|
||||
from ordigi import log
|
||||
from ordigi.collection import Collection
|
||||
from ordigi.geolocation import GeoLocation
|
||||
|
||||
_logger_options = [
|
||||
click.option(
|
||||
'--debug',
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help='Override the value in constants.py with True.',
|
||||
),
|
||||
click.option(
|
||||
'--verbose',
|
||||
'-v',
|
||||
default=False,
|
||||
is_flag=True,
|
||||
default='WARNING',
|
||||
help='True if you want to see details of file processing',
|
||||
),
|
||||
]
|
||||
|
@ -166,12 +160,11 @@ def _import(**kwargs):
|
|||
"""Sort files or directories by reading their EXIF and organizing them
|
||||
according to ordigi.conf preferences.
|
||||
"""
|
||||
log_level = log.get_level(kwargs['verbose'])
|
||||
LOG = log.get_logger(level=log_level)
|
||||
|
||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
||||
logger = log.get_logger(level=log_level)
|
||||
|
||||
src_paths = kwargs['src']
|
||||
root = kwargs['dest']
|
||||
src_paths = kwargs['src']
|
||||
src_paths, root = _get_paths(src_paths, root)
|
||||
|
||||
if kwargs['copy']:
|
||||
|
@ -200,13 +193,12 @@ def _import(**kwargs):
|
|||
kwargs['glob'],
|
||||
kwargs['interactive'],
|
||||
kwargs['ignore_tags'],
|
||||
logger,
|
||||
opt['max_deep'],
|
||||
kwargs['use_date_filename'],
|
||||
kwargs['use_file_dates'],
|
||||
)
|
||||
|
||||
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
|
||||
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
|
||||
|
||||
summary = collection.sort_files(
|
||||
src_paths, path_format, loc, import_mode, kwargs['remove_duplicates']
|
||||
|
@ -238,9 +230,8 @@ def _sort(**kwargs):
|
|||
"""Sort files or directories by reading their EXIF and organizing them
|
||||
according to ordigi.conf preferences.
|
||||
"""
|
||||
|
||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
||||
logger = log.get_logger(level=log_level)
|
||||
log_level = log.get_level(kwargs['verbose'])
|
||||
LOG = log.get_logger(level=log_level)
|
||||
|
||||
subdirs = kwargs['subdirs']
|
||||
root = kwargs['dest']
|
||||
|
@ -271,13 +262,12 @@ def _sort(**kwargs):
|
|||
kwargs['glob'],
|
||||
kwargs['interactive'],
|
||||
kwargs['ignore_tags'],
|
||||
logger,
|
||||
opt['max_deep'],
|
||||
kwargs['use_date_filename'],
|
||||
kwargs['use_file_dates'],
|
||||
)
|
||||
|
||||
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
|
||||
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
|
||||
|
||||
summary = collection.sort_files(
|
||||
paths, path_format, loc, kwargs['remove_duplicates']
|
||||
|
@ -327,8 +317,8 @@ def _clean(**kwargs):
|
|||
|
||||
dry_run = kwargs['dry_run']
|
||||
folders = kwargs['folders']
|
||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
||||
logger = log.get_logger(level=log_level)
|
||||
log_level = log.get_level(kwargs['verbose'])
|
||||
LOG = log.get_logger(level=log_level)
|
||||
|
||||
subdirs = kwargs['subdirs']
|
||||
root = kwargs['collection']
|
||||
|
@ -350,13 +340,12 @@ def _clean(**kwargs):
|
|||
exclude=exclude,
|
||||
extensions=extensions,
|
||||
glob=kwargs['glob'],
|
||||
logger=logger,
|
||||
max_deep=opt['max_deep'],
|
||||
)
|
||||
|
||||
if kwargs['path_string']:
|
||||
dedup_regex = set(kwargs['dedup_regex'])
|
||||
collection.dedup_regex(
|
||||
collection.dedup_path(
|
||||
paths, dedup_regex, kwargs['remove_duplicates']
|
||||
)
|
||||
|
||||
|
@ -386,11 +375,11 @@ def _init(**kwargs):
|
|||
root = Path(kwargs['path']).expanduser().absolute()
|
||||
config = get_collection_config(root)
|
||||
opt = config.get_options()
|
||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
||||
log_level = log.get_level(kwargs['verbose'])
|
||||
LOG = log.get_logger(level=log_level)
|
||||
|
||||
logger = log.get_logger(level=log_level)
|
||||
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
|
||||
collection = Collection(root, exclude=opt['exclude'], logger=logger)
|
||||
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
|
||||
collection = Collection(root, exclude=opt['exclude'])
|
||||
summary = collection.init(loc)
|
||||
|
||||
if log_level < 30:
|
||||
|
@ -407,11 +396,11 @@ def _update(**kwargs):
|
|||
root = Path(kwargs['path']).expanduser().absolute()
|
||||
config = get_collection_config(root)
|
||||
opt = config.get_options()
|
||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
||||
log_level = log.get_level(kwargs['verbose'])
|
||||
LOG = log.get_logger(level=log_level)
|
||||
|
||||
logger = log.get_logger(level=log_level)
|
||||
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
|
||||
collection = Collection(root, exclude=opt['exclude'], logger=logger)
|
||||
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
|
||||
collection = Collection(root, exclude=opt['exclude'])
|
||||
summary = collection.update(loc)
|
||||
|
||||
if log_level < 30:
|
||||
|
@ -425,12 +414,13 @@ def _check(**kwargs):
|
|||
"""
|
||||
Check media collection.
|
||||
"""
|
||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
||||
logger = log.get_logger(level=log_level)
|
||||
root = Path(kwargs['path']).expanduser().absolute()
|
||||
|
||||
log_level = log.get_level(kwargs['verbose'])
|
||||
LOG = log.get_logger(level=log_level)
|
||||
config = get_collection_config(root)
|
||||
opt = config.get_options()
|
||||
collection = Collection(root, exclude=opt['exclude'], logger=logger)
|
||||
collection = Collection(root, exclude=opt['exclude'])
|
||||
result = collection.check_db()
|
||||
if result:
|
||||
summary = collection.check_files()
|
||||
|
@ -439,7 +429,7 @@ def _check(**kwargs):
|
|||
if summary.errors:
|
||||
sys.exit(1)
|
||||
else:
|
||||
logger.error('Db data is not accurate run `ordigi update`')
|
||||
LOG.error('Db data is not accurate run `ordigi update`')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
@ -469,11 +459,11 @@ def _compare(**kwargs):
|
|||
"""
|
||||
|
||||
dry_run = kwargs['dry_run']
|
||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
||||
logger = log.get_logger(level=log_level)
|
||||
|
||||
subdirs = kwargs['subdirs']
|
||||
root = kwargs['collection']
|
||||
|
||||
log_level = log.get_level(kwargs['verbose'])
|
||||
LOG = log.get_logger(level=log_level)
|
||||
paths, root = _get_paths(subdirs, root)
|
||||
|
||||
config = get_collection_config(root)
|
||||
|
@ -488,7 +478,6 @@ def _compare(**kwargs):
|
|||
extensions=extensions,
|
||||
glob=kwargs['glob'],
|
||||
dry_run=dry_run,
|
||||
logger=logger,
|
||||
)
|
||||
|
||||
for path in paths:
|
||||
|
|
|
@ -9,11 +9,11 @@ import os
|
|||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import logging
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
import inquirer
|
||||
|
||||
from ordigi import LOG
|
||||
from ordigi.database import Sqlite
|
||||
from ordigi.media import Medias
|
||||
from ordigi.images import Image, Images
|
||||
|
@ -25,10 +25,10 @@ from ordigi import utils
|
|||
class FPath:
|
||||
"""Featured path object"""
|
||||
|
||||
def __init__(self, path_format, day_begins=0, logger=logging.getLogger()):
|
||||
def __init__(self, path_format, day_begins=0):
|
||||
self.day_begins = day_begins
|
||||
self.items = self.get_items()
|
||||
self.logger = logger
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
self.path_format = path_format
|
||||
self.whitespace_regex = '[ \t\n\r\f\v]+'
|
||||
self.whitespace_sub = '_'
|
||||
|
@ -63,7 +63,7 @@ class FPath:
|
|||
return date.strftime(mask)
|
||||
|
||||
if date.hour < self.day_begins:
|
||||
self.logger.info(
|
||||
self.log.info(
|
||||
"moving this photo to the previous day for classification purposes"
|
||||
)
|
||||
|
||||
|
@ -230,7 +230,7 @@ class FPath:
|
|||
if part != '':
|
||||
# Check if all masks are substituted
|
||||
if True in [c in part for c in '{}']:
|
||||
self.logger.error(
|
||||
self.log.error(
|
||||
f"Format path part invalid: {this_part}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
@ -271,34 +271,34 @@ class CollectionDb:
|
|||
|
||||
class FileIO:
|
||||
"""File Input/Ouput operations for collection"""
|
||||
def __init__(self, dry_run=False, logger=logging.getLogger()):
|
||||
def __init__(self, dry_run=False):
|
||||
# Options
|
||||
self.dry_run = dry_run
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
|
||||
def copy(self, src_path, dest_path):
|
||||
if not self.dry_run:
|
||||
shutil.copy2(src_path, dest_path)
|
||||
self.logger.info(f'copy: {src_path} -> {dest_path}')
|
||||
self.log.info(f'copy: {src_path} -> {dest_path}')
|
||||
|
||||
def move(self, src_path, dest_path):
|
||||
if not self.dry_run:
|
||||
# Move the file into the destination directory
|
||||
shutil.move(src_path, dest_path)
|
||||
|
||||
self.logger.info(f'move: {src_path} -> {dest_path}')
|
||||
self.log.info(f'move: {src_path} -> {dest_path}')
|
||||
|
||||
def remove(self, path):
|
||||
if not self.dry_run:
|
||||
os.remove(path)
|
||||
|
||||
self.logger.info(f'remove: {path}')
|
||||
self.log.info(f'remove: {path}')
|
||||
|
||||
def rmdir(self, directory):
|
||||
if not self.dry_run:
|
||||
directory.rmdir()
|
||||
|
||||
self.logger.info(f'remove dir: {directory}')
|
||||
self.log.info(f'remove dir: {directory}')
|
||||
|
||||
|
||||
class Paths:
|
||||
|
@ -310,7 +310,6 @@ class Paths:
|
|||
extensions=None,
|
||||
glob='**/*',
|
||||
interactive=False,
|
||||
logger=logging.getLogger(),
|
||||
max_deep=None,
|
||||
):
|
||||
|
||||
|
@ -325,7 +324,7 @@ class Paths:
|
|||
|
||||
self.glob = glob
|
||||
self.interactive = interactive
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
self.max_deep = max_deep
|
||||
self.paths_list = []
|
||||
|
||||
|
@ -339,7 +338,7 @@ class Paths:
|
|||
"""
|
||||
# some error checking
|
||||
if not path.exists():
|
||||
self.logger.error(f'Directory {path} does not exist')
|
||||
self.log.error(f'Directory {path} does not exist')
|
||||
sys.exit(1)
|
||||
|
||||
return path
|
||||
|
@ -451,7 +450,6 @@ class SortMedias:
|
|||
db=None,
|
||||
dry_run=False,
|
||||
interactive=False,
|
||||
logger=logging.getLogger(),
|
||||
):
|
||||
|
||||
# Arguments
|
||||
|
@ -463,7 +461,7 @@ class SortMedias:
|
|||
self.db = db
|
||||
self.dry_run = dry_run
|
||||
self.interactive = interactive
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
self.summary = Summary(self.root)
|
||||
|
||||
# Attributes
|
||||
|
@ -477,7 +475,7 @@ class SortMedias:
|
|||
dest_checksum = utils.checksum(dest_path)
|
||||
|
||||
if dest_checksum != src_checksum:
|
||||
self.logger.info(
|
||||
self.log.info(
|
||||
"Source checksum and destination checksum are not the same"
|
||||
)
|
||||
return False
|
||||
|
@ -489,7 +487,7 @@ class SortMedias:
|
|||
# Check if file remain the same
|
||||
checksum = metadata['checksum']
|
||||
if not self._checkcomp(dest_path, checksum):
|
||||
self.logger.error(f'Files {src_path} and {dest_path} are not identical')
|
||||
self.log.error(f'Files {src_path} and {dest_path} are not identical')
|
||||
self.summary.append('check', False, src_path, dest_path)
|
||||
return False
|
||||
|
||||
|
@ -551,7 +549,7 @@ class SortMedias:
|
|||
for i, _ in enumerate(parts):
|
||||
dir_path = self.root / Path(*parts[0 : i + 1])
|
||||
if dir_path.is_file():
|
||||
self.logger.warning(f'Target directory {dir_path} is a file')
|
||||
self.log.warning(f'Target directory {dir_path} is a file')
|
||||
# Rename the src_file
|
||||
if self.interactive:
|
||||
prompt = [
|
||||
|
@ -565,7 +563,7 @@ class SortMedias:
|
|||
else:
|
||||
file_path = dir_path.parent / (dir_path.name + '_file')
|
||||
|
||||
self.logger.warning(f'Renaming {dir_path} to {file_path}')
|
||||
self.log.warning(f'Renaming {dir_path} to {file_path}')
|
||||
if not self.dry_run:
|
||||
shutil.move(dir_path, file_path)
|
||||
metadata = self.medias.datas[dir_path]
|
||||
|
@ -574,7 +572,7 @@ class SortMedias:
|
|||
|
||||
if not self.dry_run:
|
||||
directory_path.mkdir(parents=True, exist_ok=True)
|
||||
self.logger.info(f'Create {directory_path}')
|
||||
self.log.info(f'Create {directory_path}')
|
||||
|
||||
def check_conflicts(self, src_path, dest_path, remove_duplicates=False):
|
||||
"""
|
||||
|
@ -583,24 +581,24 @@ class SortMedias:
|
|||
|
||||
# check for collisions
|
||||
if src_path == dest_path:
|
||||
self.logger.info(f"File {dest_path} already sorted")
|
||||
self.log.info(f"File {dest_path} already sorted")
|
||||
return 2
|
||||
|
||||
if dest_path.is_dir():
|
||||
self.logger.info(f"File {dest_path} is a existing directory")
|
||||
self.log.info(f"File {dest_path} is a existing directory")
|
||||
return 1
|
||||
|
||||
if dest_path.is_file():
|
||||
self.logger.info(f"File {dest_path} already exist")
|
||||
self.log.info(f"File {dest_path} already exist")
|
||||
if remove_duplicates:
|
||||
if filecmp.cmp(src_path, dest_path):
|
||||
self.logger.info(
|
||||
self.log.info(
|
||||
"File in source and destination are identical. Duplicate will be ignored."
|
||||
)
|
||||
return 3
|
||||
|
||||
# name is same, but file is different
|
||||
self.logger.info(
|
||||
self.log.info(
|
||||
f"File {src_path} and {dest_path} are different."
|
||||
)
|
||||
return 1
|
||||
|
@ -633,7 +631,7 @@ class SortMedias:
|
|||
if conflict == 1:
|
||||
# i = 100:
|
||||
unresolved_conflicts.append((src_path, dest_path, metadata))
|
||||
self.logger.error(f"Too many appends for {dest_path}")
|
||||
self.log.error(f"Too many appends for {dest_path}")
|
||||
|
||||
metadata['file_path'] = os.path.relpath(dest_path, self.root)
|
||||
|
||||
|
@ -705,7 +703,6 @@ class Collection(SortMedias):
|
|||
glob='**/*',
|
||||
interactive=False,
|
||||
ignore_tags=None,
|
||||
logger=logging.getLogger(),
|
||||
max_deep=None,
|
||||
use_date_filename=False,
|
||||
use_file_dates=False,
|
||||
|
@ -713,13 +710,12 @@ class Collection(SortMedias):
|
|||
|
||||
# Modules
|
||||
self.db = CollectionDb(root)
|
||||
self.fileio = FileIO(dry_run, logger)
|
||||
self.fileio = FileIO(dry_run)
|
||||
self.paths = Paths(
|
||||
exclude,
|
||||
extensions,
|
||||
glob,
|
||||
interactive,
|
||||
logger,
|
||||
max_deep,
|
||||
)
|
||||
|
||||
|
@ -731,7 +727,6 @@ class Collection(SortMedias):
|
|||
self.db,
|
||||
interactive,
|
||||
ignore_tags,
|
||||
logger,
|
||||
use_date_filename,
|
||||
use_file_dates,
|
||||
)
|
||||
|
@ -744,18 +739,17 @@ class Collection(SortMedias):
|
|||
self.db,
|
||||
dry_run,
|
||||
interactive,
|
||||
logger,
|
||||
)
|
||||
|
||||
# Arguments
|
||||
if not self.root.exists():
|
||||
self.logger.error(f'Directory {self.root} does not exist')
|
||||
self.log.error(f'Directory {self.root} does not exist')
|
||||
sys.exit(1)
|
||||
|
||||
# Options
|
||||
self.day_begins = day_begins
|
||||
self.glob = glob
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
|
||||
self.summary = Summary(self.root)
|
||||
|
||||
|
@ -769,7 +763,6 @@ class Collection(SortMedias):
|
|||
paths = Paths(
|
||||
exclude,
|
||||
interactive=self.interactive,
|
||||
logger=self.logger,
|
||||
)
|
||||
for file_path in paths.get_files(self.root):
|
||||
yield file_path
|
||||
|
@ -796,14 +789,14 @@ class Collection(SortMedias):
|
|||
relpath = os.path.relpath(file_path, self.root)
|
||||
# If file not in database
|
||||
if relpath not in db_rows:
|
||||
self.logger.error('Db data is not accurate')
|
||||
self.logger.info(f'{file_path} not in db')
|
||||
self.log.error('Db data is not accurate')
|
||||
self.log.info(f'{file_path} not in db')
|
||||
return False
|
||||
|
||||
nb_files = len(file_paths)
|
||||
nb_row = len(db_rows)
|
||||
if nb_row != nb_files:
|
||||
self.logger.error('Db data is not accurate')
|
||||
self.log.error('Db data is not accurate')
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -812,7 +805,7 @@ class Collection(SortMedias):
|
|||
if self.db.sqlite.is_empty('metadata'):
|
||||
self.init(loc)
|
||||
elif not self.check_db():
|
||||
self.logger.error('Db data is not accurate run `ordigi update`')
|
||||
self.log.error('Db data is not accurate run `ordigi update`')
|
||||
sys.exit(1)
|
||||
|
||||
def update(self, loc):
|
||||
|
@ -864,7 +857,7 @@ class Collection(SortMedias):
|
|||
if checksum == self.db.sqlite.get_checksum(relpath):
|
||||
self.summary.append('check',True, file_path)
|
||||
else:
|
||||
self.logger.error('{file_path} is corrupted')
|
||||
self.log.error('{file_path} is corrupted')
|
||||
self.summary.append('check', False, file_path)
|
||||
|
||||
return self.summary
|
||||
|
@ -893,7 +886,7 @@ class Collection(SortMedias):
|
|||
"""Remove empty subdir after moving files"""
|
||||
parents = set()
|
||||
for directory in directories:
|
||||
self.logger.info("remove empty subdirs")
|
||||
self.log.info("remove empty subdirs")
|
||||
if not directory.is_dir():
|
||||
continue
|
||||
|
||||
|
@ -928,7 +921,7 @@ class Collection(SortMedias):
|
|||
# if folder empty, delete it
|
||||
files = os.listdir(directory)
|
||||
if len(files) == 0 and remove_root:
|
||||
self.logger.info(f"Removing empty folder: {directory}")
|
||||
self.log.info(f"Removing empty folder: {directory}")
|
||||
if not self.dry_run:
|
||||
os.rmdir(directory)
|
||||
self.summary.append('remove', True, directory)
|
||||
|
@ -949,7 +942,7 @@ class Collection(SortMedias):
|
|||
subdirs = set()
|
||||
for src_path, metadata in self.medias.get_metadatas(src_dirs, imp=imp, loc=loc):
|
||||
# Get the destination path according to metadata
|
||||
fpath = FPath(path_format, self.day_begins, self.logger)
|
||||
fpath = FPath(path_format, self.day_begins)
|
||||
metadata['file_path'] = fpath.get_path(metadata)
|
||||
subdirs.add(src_path.parent)
|
||||
|
||||
|
@ -966,7 +959,7 @@ class Collection(SortMedias):
|
|||
|
||||
return self.summary
|
||||
|
||||
def dedup_regex(self, paths, dedup_regex=None, remove_duplicates=False):
|
||||
def dedup_path(self, paths, dedup_regex=None, remove_duplicates=False):
|
||||
"""Deduplicate file path parts"""
|
||||
|
||||
# Check db
|
||||
|
@ -1048,7 +1041,7 @@ class Collection(SortMedias):
|
|||
path = self.paths.check(path)
|
||||
|
||||
images_paths = set(self.paths.get_images(path))
|
||||
images = Images(images_paths, logger=self.logger)
|
||||
images = Images(images_paths)
|
||||
nb_row_ini = self.db.sqlite.len('metadata')
|
||||
for image in images_paths:
|
||||
# Clear datas in every loops
|
||||
|
@ -1062,7 +1055,7 @@ class Collection(SortMedias):
|
|||
|
||||
nb_row_end = self.db.sqlite.len('metadata')
|
||||
if nb_row_ini and nb_row_ini != nb_row_end:
|
||||
self.logger.error('Nb of row have changed unexpectedly')
|
||||
self.log.error('Nb of row have changed unexpectedly')
|
||||
|
||||
if not self.check_db():
|
||||
self.summary.append('check', False)
|
||||
|
|
|
@ -4,7 +4,6 @@ https://github.com/RhetTbull/osxphotos/blob/master/osxphotos/exiftool.py
|
|||
|
||||
import atexit
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
@ -13,6 +12,8 @@ import subprocess
|
|||
from abc import ABC, abstractmethod
|
||||
from functools import lru_cache # pylint: disable=syntax-error
|
||||
|
||||
from ordigi import LOG
|
||||
|
||||
# exiftool -stay_open commands outputs this EOF marker after command is run
|
||||
EXIFTOOL_STAYOPEN_EOF = "{ready}"
|
||||
EXIFTOOL_STAYOPEN_EOF_LEN = len(EXIFTOOL_STAYOPEN_EOF)
|
||||
|
@ -58,16 +59,16 @@ class _ExifToolProc:
|
|||
|
||||
return cls.instance
|
||||
|
||||
def __init__(self, exiftool=None, logger=logging.getLogger()):
|
||||
def __init__(self, exiftool=None):
|
||||
"""construct _ExifToolProc singleton object or return instance of already created object
|
||||
exiftool: optional path to exiftool binary (if not provided, will search path to find it)"""
|
||||
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
self._exiftool = exiftool or get_exiftool_path()
|
||||
if hasattr(self, "_process_running") and self._process_running:
|
||||
# already running
|
||||
if exiftool is not None and exiftool != self._exiftool:
|
||||
self.logger.warning(
|
||||
self.log.warning(
|
||||
f"exiftool subprocess already running, "
|
||||
f"ignoring exiftool={exiftool}"
|
||||
)
|
||||
|
@ -99,7 +100,7 @@ class _ExifToolProc:
|
|||
"""start exiftool in batch mode"""
|
||||
|
||||
if self._process_running:
|
||||
self.logger.warning("exiftool already running: {self._process}")
|
||||
self.log.warning("exiftool already running: {self._process}")
|
||||
return
|
||||
|
||||
# open exiftool process
|
||||
|
@ -155,7 +156,6 @@ class ExifTool:
|
|||
exiftool=None,
|
||||
overwrite=True,
|
||||
flags=None,
|
||||
logger=logging.getLogger(),
|
||||
):
|
||||
"""Create ExifTool object
|
||||
|
||||
|
@ -176,7 +176,7 @@ class ExifTool:
|
|||
self.error = None
|
||||
# if running as a context manager, self._context_mgr will be True
|
||||
self._context_mgr = False
|
||||
self._exiftoolproc = _ExifToolProc(exiftool=exiftool, logger=logger)
|
||||
self._exiftoolproc = _ExifToolProc(exiftool=exiftool)
|
||||
self._read_exif()
|
||||
|
||||
@property
|
||||
|
@ -402,17 +402,17 @@ class ExifToolCaching(ExifTool):
|
|||
|
||||
_singletons: dict[Path, ExifTool] = {}
|
||||
|
||||
def __new__(cls, filepath, exiftool=None, logger=logging.getLogger()):
|
||||
def __new__(cls, filepath, exiftool=None):
|
||||
"""create new object or return instance of already created singleton"""
|
||||
if filepath not in cls._singletons:
|
||||
cls._singletons[filepath] = _ExifToolCaching(
|
||||
filepath, exiftool=exiftool, logger=logger
|
||||
filepath, exiftool=exiftool
|
||||
)
|
||||
return cls._singletons[filepath]
|
||||
|
||||
|
||||
class _ExifToolCaching(ExifTool):
|
||||
def __init__(self, filepath, exiftool=None, logger=logging.getLogger()):
|
||||
def __init__(self, filepath, exiftool=None):
|
||||
"""Create read-only ExifTool object that caches values
|
||||
|
||||
Args:
|
||||
|
@ -425,7 +425,7 @@ class _ExifToolCaching(ExifTool):
|
|||
self._json_cache = None
|
||||
self._asdict_cache = {}
|
||||
super().__init__(
|
||||
filepath, exiftool=exiftool, overwrite=False, flags=None, logger=logger
|
||||
filepath, exiftool=exiftool, overwrite=False, flags=None
|
||||
)
|
||||
|
||||
def run_commands(self, *commands, no_file=False):
|
||||
|
|
|
@ -2,8 +2,8 @@ from os import path
|
|||
|
||||
import geopy
|
||||
from geopy.geocoders import Nominatim, options
|
||||
import logging
|
||||
|
||||
from ordigi import LOG
|
||||
from ordigi import config
|
||||
|
||||
__KEY__ = None
|
||||
|
@ -15,17 +15,16 @@ class GeoLocation:
|
|||
def __init__(
|
||||
self,
|
||||
geocoder='Nominatim',
|
||||
logger=logging.getLogger(),
|
||||
prefer_english_names=False,
|
||||
timeout=options.default_timeout,
|
||||
):
|
||||
self.geocoder = geocoder
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
self.prefer_english_names = prefer_english_names
|
||||
self.timeout = timeout
|
||||
|
||||
def coordinates_by_name(self, name, timeout=options.default_timeout):
|
||||
# If the name is not cached then we go ahead with an API lookup
|
||||
"""Get coordinates from given location name"""
|
||||
geocoder = self.geocoder
|
||||
if geocoder == 'Nominatim':
|
||||
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
|
||||
|
@ -41,6 +40,7 @@ class GeoLocation:
|
|||
return None
|
||||
|
||||
def place_name(self, lat, lon, timeout=options.default_timeout):
|
||||
"""get place name from coordinates"""
|
||||
lookup_place_name_default = {'default': None}
|
||||
if lat is None or lon is None:
|
||||
return lookup_place_name_default
|
||||
|
@ -76,6 +76,7 @@ class GeoLocation:
|
|||
return lookup_place_name
|
||||
|
||||
def lookup_osm( self, lat, lon, timeout=options.default_timeout):
|
||||
"""Get Geolocation address data from latitude and longitude"""
|
||||
|
||||
try:
|
||||
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
|
||||
|
@ -87,12 +88,14 @@ class GeoLocation:
|
|||
locator_reverse = locator.reverse(coords, language=lang)
|
||||
if locator_reverse is not None:
|
||||
return locator_reverse.raw
|
||||
else:
|
||||
|
||||
return None
|
||||
|
||||
except geopy.exc.GeocoderUnavailable or geopy.exc.GeocoderServiceError as e:
|
||||
self.logger.error(e)
|
||||
self.log.error(e)
|
||||
return None
|
||||
|
||||
# Fix *** TypeError: `address` must not be None
|
||||
except (TypeError, ValueError) as e:
|
||||
self.logger.error(e)
|
||||
self.log.error(e)
|
||||
return None
|
||||
|
|
|
@ -5,14 +5,15 @@ image objects (JPG, DNG, etc.).
|
|||
.. moduleauthor:: Jaisen Mathai <jaisen@jmathai.com>
|
||||
"""
|
||||
|
||||
import imagehash
|
||||
import imghdr
|
||||
import logging
|
||||
import numpy as np
|
||||
import os
|
||||
|
||||
import imagehash
|
||||
import numpy as np
|
||||
from PIL import Image as img
|
||||
from PIL import UnidentifiedImageError
|
||||
import time
|
||||
|
||||
from ordigi import LOG
|
||||
|
||||
# HEIC extension support (experimental, not tested)
|
||||
PYHEIF = False
|
||||
|
@ -23,10 +24,12 @@ try:
|
|||
# Allow to open HEIF/HEIC image from pillow
|
||||
register_heif_opener()
|
||||
except ImportError as e:
|
||||
pass
|
||||
LOG.info(e)
|
||||
|
||||
|
||||
class Image:
|
||||
"""Image file class"""
|
||||
|
||||
def __init__(self, img_path, hash_size=8):
|
||||
|
||||
self.img_path = img_path
|
||||
|
@ -61,6 +64,7 @@ class Image:
|
|||
return True
|
||||
|
||||
def get_hash(self):
|
||||
"""Get image hash"""
|
||||
try:
|
||||
with img.open(self.img_path) as image:
|
||||
return imagehash.average_hash(image, self.hash_size).hash
|
||||
|
@ -89,14 +93,13 @@ class Images:
|
|||
'rw2',
|
||||
)
|
||||
|
||||
def __init__(self, images=set(), hash_size=8, logger=logging.getLogger()):
|
||||
|
||||
def __init__(self, images=set(), hash_size=8):
|
||||
self.images = images
|
||||
self.duplicates = []
|
||||
self.hash_size = hash_size
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
if PYHEIF == False:
|
||||
self.logger.info("No module named 'pyheif_pillow_opener'")
|
||||
self.log.info("No module named 'pyheif_pillow_opener'")
|
||||
|
||||
def add_images(self, file_paths):
|
||||
for img_path in file_paths:
|
||||
|
@ -117,7 +120,7 @@ class Images:
|
|||
hashes = {}
|
||||
for temp_hash in self.get_images_hashes():
|
||||
if temp_hash in hashes:
|
||||
self.logger.info(
|
||||
self.log.info(
|
||||
"Duplicate {} \nfound for image {}\n".format(
|
||||
img_path, hashes[temp_hash]
|
||||
)
|
||||
|
@ -129,25 +132,28 @@ class Images:
|
|||
return duplicates
|
||||
|
||||
def remove_duplicates(self, duplicates):
|
||||
"""Remove duplicate files"""
|
||||
for duplicate in duplicates:
|
||||
try:
|
||||
os.remove(duplicate)
|
||||
except OSError as error:
|
||||
self.logger.error(error)
|
||||
self.log.error(error)
|
||||
|
||||
def remove_duplicates_interactive(self, duplicates):
|
||||
"""Remove duplicate files: interactive mode"""
|
||||
if len(duplicates) != 0:
|
||||
answer = input(f"Do you want to delete these {duplicates} images? Y/n: ")
|
||||
if answer.strip().lower() == 'y':
|
||||
self.remove_duplicates(duplicates)
|
||||
self.logger.info('Duplicates images deleted successfully!')
|
||||
self.log.info('Duplicates images deleted successfully!')
|
||||
else:
|
||||
self.logger.info("No duplicates found")
|
||||
self.log.info("No duplicates found")
|
||||
|
||||
def diff(self, hash1, hash2):
|
||||
return np.count_nonzero(hash1 != hash2)
|
||||
|
||||
def similarity(self, img_diff):
|
||||
"""Similarity rate in %"""
|
||||
threshold_img = img_diff / (self.hash_size ** 2)
|
||||
similarity_img = round((1 - threshold_img) * 100)
|
||||
|
||||
|
@ -163,7 +169,7 @@ class Images:
|
|||
if hash1 is None:
|
||||
return None
|
||||
|
||||
self.logger.info(f'Finding similar images to {image.img_path}')
|
||||
self.log.info(f'Finding similar images to {image.img_path}')
|
||||
|
||||
threshold = 1 - similarity / 100
|
||||
diff_limit = int(threshold * (self.hash_size ** 2))
|
||||
|
@ -181,7 +187,7 @@ class Images:
|
|||
img_diff = self.diff(hash1, hash2)
|
||||
if img_diff <= diff_limit:
|
||||
similarity_img = self.similarity(img_diff)
|
||||
self.logger.info(
|
||||
self.log.info(
|
||||
f'{img.img_path} image found {similarity_img}% similar to {image}'
|
||||
)
|
||||
yield img.img_path
|
||||
|
|
|
@ -1,22 +1,26 @@
|
|||
"""Logging module"""
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
def level(verbose, debug):
|
||||
if debug:
|
||||
return logging.DEBUG
|
||||
elif verbose:
|
||||
return logging.INFO
|
||||
def get_level(verbose):
|
||||
"""Return int logging level from string"""
|
||||
if verbose.isnumeric():
|
||||
return int(verbose)
|
||||
|
||||
return logging.WARNING
|
||||
return int(logging.getLevelName(verbose))
|
||||
|
||||
|
||||
def get_logger(name='ordigi', level=30):
|
||||
"""Get configured logger"""
|
||||
if level > 10:
|
||||
format='%(levelname)s:%(message)s'
|
||||
log_format='%(levelname)s:%(message)s'
|
||||
else:
|
||||
format='%(levelname)s:%(name)s:%(message)s'
|
||||
log_format='%(levelname)s:%(name)s:%(message)s'
|
||||
|
||||
logging.basicConfig(format=format, level=level)
|
||||
logging.basicConfig(format=log_format, level=level)
|
||||
logging.getLogger('asyncio').setLevel(level)
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(level)
|
||||
|
||||
return logger
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
|
@ -7,6 +6,7 @@ import sys
|
|||
from dateutil import parser
|
||||
import inquirer
|
||||
|
||||
from ordigi import LOG
|
||||
from ordigi.exiftool import ExifTool, ExifToolCaching
|
||||
from ordigi import utils
|
||||
from ordigi import request
|
||||
|
@ -76,13 +76,12 @@ class ReadExif(ExifMetadata):
|
|||
file_path,
|
||||
exif_metadata=None,
|
||||
ignore_tags=None,
|
||||
logger=logging.getLogger(),
|
||||
):
|
||||
|
||||
super().__init__(file_path, ignore_tags)
|
||||
|
||||
# Options
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
|
||||
if exif_metadata:
|
||||
self.exif_metadata = exif_metadata
|
||||
|
@ -93,7 +92,7 @@ class ReadExif(ExifMetadata):
|
|||
def get_exif_metadata(self):
|
||||
"""Get metadata from exiftool."""
|
||||
|
||||
return ExifToolCaching(self.file_path, logger=self.logger).asdict()
|
||||
return ExifToolCaching(self.file_path).asdict()
|
||||
|
||||
def get_key_values(self, key):
|
||||
"""
|
||||
|
@ -150,14 +149,13 @@ class WriteExif(ExifMetadata):
|
|||
file_path,
|
||||
metadata,
|
||||
ignore_tags=None,
|
||||
logger=logging.getLogger(),
|
||||
):
|
||||
|
||||
super().__init__(file_path, ignore_tags)
|
||||
|
||||
self.metadata = metadata
|
||||
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
|
||||
def set_value(self, tag, value):
|
||||
"""Set value of a tag.
|
||||
|
@ -165,7 +163,7 @@ class WriteExif(ExifMetadata):
|
|||
:returns: value (str)
|
||||
"""
|
||||
# TODO overwrite mode check if fail
|
||||
return ExifTool(self.file_path, logger=self.logger).setvalue(tag, value)
|
||||
return ExifTool(self.file_path).setvalue(tag, value)
|
||||
|
||||
def set_key_values(self, key, value):
|
||||
"""Set tags values for given key"""
|
||||
|
@ -240,21 +238,19 @@ class Media(ReadExif):
|
|||
album_from_folder=False,
|
||||
ignore_tags=None,
|
||||
interactive=False,
|
||||
logger=logging.getLogger(),
|
||||
use_date_filename=False,
|
||||
use_file_dates=False,
|
||||
):
|
||||
super().__init__(
|
||||
file_path,
|
||||
ignore_tags=ignore_tags,
|
||||
logger=logger,
|
||||
)
|
||||
|
||||
self.src_dir = src_dir
|
||||
|
||||
self.album_from_folder = album_from_folder
|
||||
self.interactive = interactive
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
self.metadata = None
|
||||
self.use_date_filename = use_date_filename
|
||||
self.use_file_dates = use_file_dates
|
||||
|
@ -292,7 +288,7 @@ class Media(ReadExif):
|
|||
value = re.sub(regex, r'\g<1>-\g<2>-\g<3>', value)
|
||||
return parser.parse(value)
|
||||
except BaseException or parser._parser.ParserError as e:
|
||||
self.logger.warning(e.args, value)
|
||||
self.log.warning(e.args, value)
|
||||
return None
|
||||
|
||||
def _get_date_media_interactive(self, choices, default):
|
||||
|
@ -338,7 +334,7 @@ class Media(ReadExif):
|
|||
date_modified = self.metadata['date_modified']
|
||||
if self.metadata['date_original']:
|
||||
if date_filename and date_filename != date_original:
|
||||
self.logger.warning(
|
||||
self.log.warning(
|
||||
f"{filename} time mark is different from {date_original}"
|
||||
)
|
||||
if self.interactive:
|
||||
|
@ -353,14 +349,14 @@ class Media(ReadExif):
|
|||
|
||||
return self.metadata['date_original']
|
||||
|
||||
self.logger.warning(f"could not find original date for {self.file_path}")
|
||||
self.log.warning(f"could not find original date for {self.file_path}")
|
||||
|
||||
if self.use_date_filename and date_filename:
|
||||
self.logger.info(
|
||||
self.log.info(
|
||||
f"use date from filename:{date_filename} for {self.file_path}"
|
||||
)
|
||||
if date_created and date_filename > date_created:
|
||||
self.logger.warning(
|
||||
self.log.warning(
|
||||
f"{filename} time mark is more recent than {date_created}"
|
||||
)
|
||||
if self.interactive:
|
||||
|
@ -376,13 +372,13 @@ class Media(ReadExif):
|
|||
|
||||
if self.use_file_dates:
|
||||
if date_created:
|
||||
self.logger.warning(
|
||||
self.log.warning(
|
||||
f"use date created:{date_created} for {self.file_path}"
|
||||
)
|
||||
return date_created
|
||||
|
||||
if date_modified:
|
||||
self.logger.warning(
|
||||
self.log.warning(
|
||||
f"use date modified:{date_modified} for {self.file_path}"
|
||||
)
|
||||
return date_modified
|
||||
|
@ -485,12 +481,12 @@ class Media(ReadExif):
|
|||
file_checksum = self.metadata['checksum']
|
||||
# Check if checksum match
|
||||
if db_checksum and db_checksum != file_checksum:
|
||||
self.logger.error(f'{self.file_path} checksum has changed')
|
||||
self.logger.error('(modified or corrupted file).')
|
||||
self.logger.error(
|
||||
self.log.error(f'{self.file_path} checksum has changed')
|
||||
self.log.error('(modified or corrupted file).')
|
||||
self.log.error(
|
||||
f'file_checksum={file_checksum},\ndb_checksum={db_checksum}'
|
||||
)
|
||||
self.logger.info(
|
||||
self.log.info(
|
||||
'Use --reset-cache, check database integrity or try to restore the file'
|
||||
)
|
||||
# We d'ont want to silently ignore or correct this without
|
||||
|
@ -620,7 +616,6 @@ class Medias:
|
|||
db=None,
|
||||
interactive=False,
|
||||
ignore_tags=None,
|
||||
logger=logging.getLogger(),
|
||||
use_date_filename=False,
|
||||
use_file_dates=False,
|
||||
):
|
||||
|
@ -637,7 +632,7 @@ class Medias:
|
|||
self.album_from_folder = album_from_folder
|
||||
self.ignore_tags = ignore_tags
|
||||
self.interactive = interactive
|
||||
self.logger = logger.getChild(self.__class__.__name__)
|
||||
self.log = LOG.getChild(self.__class__.__name__)
|
||||
self.use_date_filename = use_date_filename
|
||||
self.use_file_dates = use_file_dates
|
||||
|
||||
|
@ -653,7 +648,6 @@ class Medias:
|
|||
self.album_from_folder,
|
||||
self.ignore_tags,
|
||||
self.interactive,
|
||||
self.logger,
|
||||
self.use_date_filename,
|
||||
self.use_file_dates,
|
||||
)
|
||||
|
@ -677,7 +671,7 @@ class Medias:
|
|||
for src_path in paths:
|
||||
if self.root not in src_path.parents:
|
||||
if not imp:
|
||||
self.logger.error(f"""{src_path} not in {self.root}
|
||||
self.log.error(f"""{src_path} not in {self.root}
|
||||
collection, use `ordigi import`""")
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -693,7 +687,6 @@ class Medias:
|
|||
file_path,
|
||||
metadata,
|
||||
ignore_tags=self.ignore_tags,
|
||||
logger=self.logger
|
||||
)
|
||||
|
||||
updated = False
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[pytest]
|
||||
addopts = --ignore=old_tests -s
|
||||
# addopts = --ignore=old_tests -s
|
||||
|
||||
# collect_ignore = ["old_test"]
|
||||
|
||||
|
|
|
@ -25,10 +25,7 @@ class TestOrdigi:
|
|||
def setup_class(cls, sample_files_paths):
|
||||
cls.runner = CliRunner()
|
||||
cls.src_path, cls.file_paths = sample_files_paths
|
||||
cls.logger_options = (
|
||||
'--debug',
|
||||
'--verbose',
|
||||
)
|
||||
cls.logger_options = (('--verbose', 'DEBUG'),)
|
||||
cls.filter_options = (
|
||||
('--exclude', '.DS_Store'),
|
||||
('--ignore-tags', 'CreateDate'),
|
||||
|
@ -45,7 +42,7 @@ class TestOrdigi:
|
|||
|
||||
def assert_cli(self, command, attributes):
|
||||
result = self.runner.invoke(command, [*attributes])
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, attributes
|
||||
|
||||
def assert_options(self, command, bool_options, arg_options, paths):
|
||||
for bool_option in bool_options:
|
||||
|
@ -62,7 +59,6 @@ class TestOrdigi:
|
|||
|
||||
def test_sort(self):
|
||||
bool_options = (
|
||||
*self.logger_options,
|
||||
# '--interactive',
|
||||
'--dry-run',
|
||||
'--album-from-folder',
|
||||
|
@ -73,6 +69,7 @@ class TestOrdigi:
|
|||
)
|
||||
|
||||
arg_options = (
|
||||
*self.logger_options,
|
||||
*self.filter_options,
|
||||
('--path-format', '{%Y}/{folder}/{name}.{ext}'),
|
||||
|
||||
|
@ -86,32 +83,22 @@ class TestOrdigi:
|
|||
self.assert_all_options(cli._sort, bool_options, arg_options, paths)
|
||||
|
||||
def assert_init(self):
|
||||
for bool_option in self.logger_options:
|
||||
result = self.runner.invoke(
|
||||
cli._init, [bool_option, str(self.src_path
|
||||
)])
|
||||
assert result.exit_code == 0, bool_option
|
||||
for opt, arg in self.logger_options:
|
||||
self.assert_cli(cli._init, [opt, arg, str(self.src_path)])
|
||||
|
||||
def assert_update(self):
|
||||
file_path = Path(ORDIGI_PATH, 'samples/test_exif/photo.cr2')
|
||||
dest_path = self.src_path / 'photo_moved.cr2'
|
||||
shutil.copyfile(file_path, dest_path)
|
||||
for bool_option in self.logger_options:
|
||||
result = self.runner.invoke(
|
||||
cli._update, [bool_option, str(self.src_path
|
||||
)])
|
||||
assert result.exit_code == 0, bool_option
|
||||
for opt, arg in self.logger_options:
|
||||
self.assert_cli(cli._update, [opt, arg, str(self.src_path)])
|
||||
|
||||
def assert_check(self):
|
||||
for bool_option in self.logger_options:
|
||||
result = self.runner.invoke(
|
||||
cli._check, [bool_option, str(self.src_path
|
||||
)])
|
||||
assert result.exit_code == 0, bool_option
|
||||
for opt, arg in self.logger_options:
|
||||
self.assert_cli(cli._check, [opt, arg, str(self.src_path)])
|
||||
|
||||
def assert_clean(self):
|
||||
bool_options = (
|
||||
*self.logger_options,
|
||||
# '--interactive',
|
||||
'--dry-run',
|
||||
'--delete-excluded',
|
||||
|
@ -121,6 +108,7 @@ class TestOrdigi:
|
|||
)
|
||||
|
||||
arg_options = (
|
||||
*self.logger_options,
|
||||
*self.filter_options,
|
||||
('--dedup-regex', r'\d{4}-\d{2}'),
|
||||
)
|
||||
|
@ -142,7 +130,6 @@ class TestOrdigi:
|
|||
|
||||
def test_import(self, tmp_path):
|
||||
bool_options = (
|
||||
*self.logger_options,
|
||||
# '--interactive',
|
||||
'--dry-run',
|
||||
'--album-from-folder',
|
||||
|
@ -153,6 +140,7 @@ class TestOrdigi:
|
|||
)
|
||||
|
||||
arg_options = (
|
||||
*self.logger_options,
|
||||
*self.filter_options,
|
||||
('--path-format', '{%Y}/{folder}/{stem}.{ext}'),
|
||||
|
||||
|
@ -168,7 +156,6 @@ class TestOrdigi:
|
|||
|
||||
def test_compare(self):
|
||||
bool_options = (
|
||||
*self.logger_options,
|
||||
# '--interactive',
|
||||
'--dry-run',
|
||||
'--find-duplicates',
|
||||
|
@ -176,6 +163,7 @@ class TestOrdigi:
|
|||
)
|
||||
|
||||
arg_options = (
|
||||
*self.logger_options,
|
||||
*self.filter_options,
|
||||
# ('--similar-to', ''),
|
||||
('--similarity', '65'),
|
||||
|
|
|
@ -6,16 +6,17 @@ import re
|
|||
import pytest
|
||||
import inquirer
|
||||
|
||||
from ordigi import LOG
|
||||
from ordigi import constants
|
||||
from ordigi.collection import Collection, FPath, Paths
|
||||
from ordigi.exiftool import ExifToolCaching, exiftool_is_running, terminate_exiftool
|
||||
from ordigi.geolocation import GeoLocation
|
||||
from ordigi import log
|
||||
from ordigi.media import Media, ReadExif
|
||||
from ordigi import utils
|
||||
from .conftest import randomize_files, randomize_db
|
||||
from ordigi.summary import Summary
|
||||
|
||||
LOG.setLevel(10)
|
||||
|
||||
class TestFPath:
|
||||
|
||||
|
@ -23,13 +24,12 @@ class TestFPath:
|
|||
def setup_class(cls, sample_files_paths):
|
||||
cls.src_path, cls.file_paths = sample_files_paths
|
||||
cls.path_format = constants.DEFAULT_PATH + '/' + constants.DEFAULT_NAME
|
||||
cls.logger = log.get_logger(level=10)
|
||||
|
||||
def test_get_part(self, tmp_path):
|
||||
"""
|
||||
Test all parts
|
||||
"""
|
||||
fpath = FPath(self.path_format, 4, self.logger)
|
||||
fpath = FPath(self.path_format, 4)
|
||||
# Item to search for:
|
||||
items = fpath.get_items()
|
||||
masks = [
|
||||
|
@ -107,7 +107,7 @@ class TestFPath:
|
|||
|
||||
def test_get_early_morning_photos_date(self):
|
||||
date = datetime(2021, 10, 16, 2, 20, 40)
|
||||
fpath = FPath(self.path_format, 4, self.logger)
|
||||
fpath = FPath(self.path_format, 4)
|
||||
part = fpath.get_early_morning_photos_date(date, '%Y-%m-%d')
|
||||
assert part == '2021-10-15'
|
||||
|
||||
|
@ -121,7 +121,6 @@ class TestCollection:
|
|||
def setup_class(cls, sample_files_paths):
|
||||
cls.src_path, cls.file_paths = sample_files_paths
|
||||
cls.path_format = constants.DEFAULT_PATH + '/' + constants.DEFAULT_NAME
|
||||
cls.logger = log.get_logger(level=10)
|
||||
|
||||
def teardown_class(self):
|
||||
terminate_exiftool()
|
||||
|
@ -138,8 +137,7 @@ class TestCollection:
|
|||
assert summary.success_table.sum('sort') == nb
|
||||
|
||||
def test_sort_files(self, tmp_path):
|
||||
collection = Collection(tmp_path, album_from_folder=True,
|
||||
logger=self.logger)
|
||||
collection = Collection(tmp_path, album_from_folder=True)
|
||||
loc = GeoLocation()
|
||||
summary = collection.sort_files([self.src_path],
|
||||
self.path_format, loc, imp='copy')
|
||||
|
@ -235,7 +233,7 @@ class TestCollection:
|
|||
def test_sort_similar_images(self, tmp_path):
|
||||
path = tmp_path / 'collection'
|
||||
shutil.copytree(self.src_path, path)
|
||||
collection = Collection(path, logger=self.logger)
|
||||
collection = Collection(path)
|
||||
loc = GeoLocation()
|
||||
summary = collection.init(loc)
|
||||
summary = collection.sort_similar_images(path, similarity=60)
|
||||
|
@ -247,7 +245,7 @@ class TestCollection:
|
|||
def test_fill_data(self, tmp_path, monkeypatch):
|
||||
path = tmp_path / 'collection'
|
||||
shutil.copytree(self.src_path, path)
|
||||
collection = Collection(path, logger=self.logger)
|
||||
collection = Collection(path)
|
||||
# loc = GeoLocation()
|
||||
|
||||
# def mockreturn(prompt, theme):
|
||||
|
|
Loading…
Reference in New Issue