User LOG global variable for logging
This commit is contained in:
parent
506869ca4f
commit
a9913e61d9
|
@ -0,0 +1,3 @@
|
||||||
|
from ordigi import log
|
||||||
|
|
||||||
|
LOG = log.get_logger()
|
|
@ -7,23 +7,17 @@ import sys
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
|
from ordigi import LOG
|
||||||
from ordigi.config import Config
|
from ordigi.config import Config
|
||||||
from ordigi import log
|
from ordigi import log
|
||||||
from ordigi.collection import Collection
|
from ordigi.collection import Collection
|
||||||
from ordigi.geolocation import GeoLocation
|
from ordigi.geolocation import GeoLocation
|
||||||
|
|
||||||
_logger_options = [
|
_logger_options = [
|
||||||
click.option(
|
|
||||||
'--debug',
|
|
||||||
default=False,
|
|
||||||
is_flag=True,
|
|
||||||
help='Override the value in constants.py with True.',
|
|
||||||
),
|
|
||||||
click.option(
|
click.option(
|
||||||
'--verbose',
|
'--verbose',
|
||||||
'-v',
|
'-v',
|
||||||
default=False,
|
default='WARNING',
|
||||||
is_flag=True,
|
|
||||||
help='True if you want to see details of file processing',
|
help='True if you want to see details of file processing',
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -166,12 +160,11 @@ def _import(**kwargs):
|
||||||
"""Sort files or directories by reading their EXIF and organizing them
|
"""Sort files or directories by reading their EXIF and organizing them
|
||||||
according to ordigi.conf preferences.
|
according to ordigi.conf preferences.
|
||||||
"""
|
"""
|
||||||
|
log_level = log.get_level(kwargs['verbose'])
|
||||||
|
LOG = log.get_logger(level=log_level)
|
||||||
|
|
||||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
|
||||||
logger = log.get_logger(level=log_level)
|
|
||||||
|
|
||||||
src_paths = kwargs['src']
|
|
||||||
root = kwargs['dest']
|
root = kwargs['dest']
|
||||||
|
src_paths = kwargs['src']
|
||||||
src_paths, root = _get_paths(src_paths, root)
|
src_paths, root = _get_paths(src_paths, root)
|
||||||
|
|
||||||
if kwargs['copy']:
|
if kwargs['copy']:
|
||||||
|
@ -200,13 +193,12 @@ def _import(**kwargs):
|
||||||
kwargs['glob'],
|
kwargs['glob'],
|
||||||
kwargs['interactive'],
|
kwargs['interactive'],
|
||||||
kwargs['ignore_tags'],
|
kwargs['ignore_tags'],
|
||||||
logger,
|
|
||||||
opt['max_deep'],
|
opt['max_deep'],
|
||||||
kwargs['use_date_filename'],
|
kwargs['use_date_filename'],
|
||||||
kwargs['use_file_dates'],
|
kwargs['use_file_dates'],
|
||||||
)
|
)
|
||||||
|
|
||||||
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
|
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
|
||||||
|
|
||||||
summary = collection.sort_files(
|
summary = collection.sort_files(
|
||||||
src_paths, path_format, loc, import_mode, kwargs['remove_duplicates']
|
src_paths, path_format, loc, import_mode, kwargs['remove_duplicates']
|
||||||
|
@ -238,9 +230,8 @@ def _sort(**kwargs):
|
||||||
"""Sort files or directories by reading their EXIF and organizing them
|
"""Sort files or directories by reading their EXIF and organizing them
|
||||||
according to ordigi.conf preferences.
|
according to ordigi.conf preferences.
|
||||||
"""
|
"""
|
||||||
|
log_level = log.get_level(kwargs['verbose'])
|
||||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
LOG = log.get_logger(level=log_level)
|
||||||
logger = log.get_logger(level=log_level)
|
|
||||||
|
|
||||||
subdirs = kwargs['subdirs']
|
subdirs = kwargs['subdirs']
|
||||||
root = kwargs['dest']
|
root = kwargs['dest']
|
||||||
|
@ -271,13 +262,12 @@ def _sort(**kwargs):
|
||||||
kwargs['glob'],
|
kwargs['glob'],
|
||||||
kwargs['interactive'],
|
kwargs['interactive'],
|
||||||
kwargs['ignore_tags'],
|
kwargs['ignore_tags'],
|
||||||
logger,
|
|
||||||
opt['max_deep'],
|
opt['max_deep'],
|
||||||
kwargs['use_date_filename'],
|
kwargs['use_date_filename'],
|
||||||
kwargs['use_file_dates'],
|
kwargs['use_file_dates'],
|
||||||
)
|
)
|
||||||
|
|
||||||
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
|
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
|
||||||
|
|
||||||
summary = collection.sort_files(
|
summary = collection.sort_files(
|
||||||
paths, path_format, loc, kwargs['remove_duplicates']
|
paths, path_format, loc, kwargs['remove_duplicates']
|
||||||
|
@ -327,8 +317,8 @@ def _clean(**kwargs):
|
||||||
|
|
||||||
dry_run = kwargs['dry_run']
|
dry_run = kwargs['dry_run']
|
||||||
folders = kwargs['folders']
|
folders = kwargs['folders']
|
||||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
log_level = log.get_level(kwargs['verbose'])
|
||||||
logger = log.get_logger(level=log_level)
|
LOG = log.get_logger(level=log_level)
|
||||||
|
|
||||||
subdirs = kwargs['subdirs']
|
subdirs = kwargs['subdirs']
|
||||||
root = kwargs['collection']
|
root = kwargs['collection']
|
||||||
|
@ -350,13 +340,12 @@ def _clean(**kwargs):
|
||||||
exclude=exclude,
|
exclude=exclude,
|
||||||
extensions=extensions,
|
extensions=extensions,
|
||||||
glob=kwargs['glob'],
|
glob=kwargs['glob'],
|
||||||
logger=logger,
|
|
||||||
max_deep=opt['max_deep'],
|
max_deep=opt['max_deep'],
|
||||||
)
|
)
|
||||||
|
|
||||||
if kwargs['path_string']:
|
if kwargs['path_string']:
|
||||||
dedup_regex = set(kwargs['dedup_regex'])
|
dedup_regex = set(kwargs['dedup_regex'])
|
||||||
collection.dedup_regex(
|
collection.dedup_path(
|
||||||
paths, dedup_regex, kwargs['remove_duplicates']
|
paths, dedup_regex, kwargs['remove_duplicates']
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -386,11 +375,11 @@ def _init(**kwargs):
|
||||||
root = Path(kwargs['path']).expanduser().absolute()
|
root = Path(kwargs['path']).expanduser().absolute()
|
||||||
config = get_collection_config(root)
|
config = get_collection_config(root)
|
||||||
opt = config.get_options()
|
opt = config.get_options()
|
||||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
log_level = log.get_level(kwargs['verbose'])
|
||||||
|
LOG = log.get_logger(level=log_level)
|
||||||
|
|
||||||
logger = log.get_logger(level=log_level)
|
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
|
||||||
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
|
collection = Collection(root, exclude=opt['exclude'])
|
||||||
collection = Collection(root, exclude=opt['exclude'], logger=logger)
|
|
||||||
summary = collection.init(loc)
|
summary = collection.init(loc)
|
||||||
|
|
||||||
if log_level < 30:
|
if log_level < 30:
|
||||||
|
@ -407,11 +396,11 @@ def _update(**kwargs):
|
||||||
root = Path(kwargs['path']).expanduser().absolute()
|
root = Path(kwargs['path']).expanduser().absolute()
|
||||||
config = get_collection_config(root)
|
config = get_collection_config(root)
|
||||||
opt = config.get_options()
|
opt = config.get_options()
|
||||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
log_level = log.get_level(kwargs['verbose'])
|
||||||
|
LOG = log.get_logger(level=log_level)
|
||||||
|
|
||||||
logger = log.get_logger(level=log_level)
|
loc = GeoLocation(opt['geocoder'], opt['prefer_english_names'], opt['timeout'])
|
||||||
loc = GeoLocation(opt['geocoder'], logger, opt['prefer_english_names'], opt['timeout'])
|
collection = Collection(root, exclude=opt['exclude'])
|
||||||
collection = Collection(root, exclude=opt['exclude'], logger=logger)
|
|
||||||
summary = collection.update(loc)
|
summary = collection.update(loc)
|
||||||
|
|
||||||
if log_level < 30:
|
if log_level < 30:
|
||||||
|
@ -425,12 +414,13 @@ def _check(**kwargs):
|
||||||
"""
|
"""
|
||||||
Check media collection.
|
Check media collection.
|
||||||
"""
|
"""
|
||||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
|
||||||
logger = log.get_logger(level=log_level)
|
|
||||||
root = Path(kwargs['path']).expanduser().absolute()
|
root = Path(kwargs['path']).expanduser().absolute()
|
||||||
|
|
||||||
|
log_level = log.get_level(kwargs['verbose'])
|
||||||
|
LOG = log.get_logger(level=log_level)
|
||||||
config = get_collection_config(root)
|
config = get_collection_config(root)
|
||||||
opt = config.get_options()
|
opt = config.get_options()
|
||||||
collection = Collection(root, exclude=opt['exclude'], logger=logger)
|
collection = Collection(root, exclude=opt['exclude'])
|
||||||
result = collection.check_db()
|
result = collection.check_db()
|
||||||
if result:
|
if result:
|
||||||
summary = collection.check_files()
|
summary = collection.check_files()
|
||||||
|
@ -439,7 +429,7 @@ def _check(**kwargs):
|
||||||
if summary.errors:
|
if summary.errors:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
logger.error('Db data is not accurate run `ordigi update`')
|
LOG.error('Db data is not accurate run `ordigi update`')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
@ -469,11 +459,11 @@ def _compare(**kwargs):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
dry_run = kwargs['dry_run']
|
dry_run = kwargs['dry_run']
|
||||||
log_level = log.level(kwargs['verbose'], kwargs['debug'])
|
|
||||||
logger = log.get_logger(level=log_level)
|
|
||||||
|
|
||||||
subdirs = kwargs['subdirs']
|
subdirs = kwargs['subdirs']
|
||||||
root = kwargs['collection']
|
root = kwargs['collection']
|
||||||
|
|
||||||
|
log_level = log.get_level(kwargs['verbose'])
|
||||||
|
LOG = log.get_logger(level=log_level)
|
||||||
paths, root = _get_paths(subdirs, root)
|
paths, root = _get_paths(subdirs, root)
|
||||||
|
|
||||||
config = get_collection_config(root)
|
config = get_collection_config(root)
|
||||||
|
@ -488,7 +478,6 @@ def _compare(**kwargs):
|
||||||
extensions=extensions,
|
extensions=extensions,
|
||||||
glob=kwargs['glob'],
|
glob=kwargs['glob'],
|
||||||
dry_run=dry_run,
|
dry_run=dry_run,
|
||||||
logger=logger,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
|
|
|
@ -9,11 +9,11 @@ import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import logging
|
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
|
|
||||||
import inquirer
|
import inquirer
|
||||||
|
|
||||||
|
from ordigi import LOG
|
||||||
from ordigi.database import Sqlite
|
from ordigi.database import Sqlite
|
||||||
from ordigi.media import Medias
|
from ordigi.media import Medias
|
||||||
from ordigi.images import Image, Images
|
from ordigi.images import Image, Images
|
||||||
|
@ -25,10 +25,10 @@ from ordigi import utils
|
||||||
class FPath:
|
class FPath:
|
||||||
"""Featured path object"""
|
"""Featured path object"""
|
||||||
|
|
||||||
def __init__(self, path_format, day_begins=0, logger=logging.getLogger()):
|
def __init__(self, path_format, day_begins=0):
|
||||||
self.day_begins = day_begins
|
self.day_begins = day_begins
|
||||||
self.items = self.get_items()
|
self.items = self.get_items()
|
||||||
self.logger = logger
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
self.path_format = path_format
|
self.path_format = path_format
|
||||||
self.whitespace_regex = '[ \t\n\r\f\v]+'
|
self.whitespace_regex = '[ \t\n\r\f\v]+'
|
||||||
self.whitespace_sub = '_'
|
self.whitespace_sub = '_'
|
||||||
|
@ -63,7 +63,7 @@ class FPath:
|
||||||
return date.strftime(mask)
|
return date.strftime(mask)
|
||||||
|
|
||||||
if date.hour < self.day_begins:
|
if date.hour < self.day_begins:
|
||||||
self.logger.info(
|
self.log.info(
|
||||||
"moving this photo to the previous day for classification purposes"
|
"moving this photo to the previous day for classification purposes"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -230,7 +230,7 @@ class FPath:
|
||||||
if part != '':
|
if part != '':
|
||||||
# Check if all masks are substituted
|
# Check if all masks are substituted
|
||||||
if True in [c in part for c in '{}']:
|
if True in [c in part for c in '{}']:
|
||||||
self.logger.error(
|
self.log.error(
|
||||||
f"Format path part invalid: {this_part}"
|
f"Format path part invalid: {this_part}"
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -271,34 +271,34 @@ class CollectionDb:
|
||||||
|
|
||||||
class FileIO:
|
class FileIO:
|
||||||
"""File Input/Ouput operations for collection"""
|
"""File Input/Ouput operations for collection"""
|
||||||
def __init__(self, dry_run=False, logger=logging.getLogger()):
|
def __init__(self, dry_run=False):
|
||||||
# Options
|
# Options
|
||||||
self.dry_run = dry_run
|
self.dry_run = dry_run
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
|
|
||||||
def copy(self, src_path, dest_path):
|
def copy(self, src_path, dest_path):
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
shutil.copy2(src_path, dest_path)
|
shutil.copy2(src_path, dest_path)
|
||||||
self.logger.info(f'copy: {src_path} -> {dest_path}')
|
self.log.info(f'copy: {src_path} -> {dest_path}')
|
||||||
|
|
||||||
def move(self, src_path, dest_path):
|
def move(self, src_path, dest_path):
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
# Move the file into the destination directory
|
# Move the file into the destination directory
|
||||||
shutil.move(src_path, dest_path)
|
shutil.move(src_path, dest_path)
|
||||||
|
|
||||||
self.logger.info(f'move: {src_path} -> {dest_path}')
|
self.log.info(f'move: {src_path} -> {dest_path}')
|
||||||
|
|
||||||
def remove(self, path):
|
def remove(self, path):
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
os.remove(path)
|
os.remove(path)
|
||||||
|
|
||||||
self.logger.info(f'remove: {path}')
|
self.log.info(f'remove: {path}')
|
||||||
|
|
||||||
def rmdir(self, directory):
|
def rmdir(self, directory):
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
directory.rmdir()
|
directory.rmdir()
|
||||||
|
|
||||||
self.logger.info(f'remove dir: {directory}')
|
self.log.info(f'remove dir: {directory}')
|
||||||
|
|
||||||
|
|
||||||
class Paths:
|
class Paths:
|
||||||
|
@ -310,7 +310,6 @@ class Paths:
|
||||||
extensions=None,
|
extensions=None,
|
||||||
glob='**/*',
|
glob='**/*',
|
||||||
interactive=False,
|
interactive=False,
|
||||||
logger=logging.getLogger(),
|
|
||||||
max_deep=None,
|
max_deep=None,
|
||||||
):
|
):
|
||||||
|
|
||||||
|
@ -325,7 +324,7 @@ class Paths:
|
||||||
|
|
||||||
self.glob = glob
|
self.glob = glob
|
||||||
self.interactive = interactive
|
self.interactive = interactive
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
self.max_deep = max_deep
|
self.max_deep = max_deep
|
||||||
self.paths_list = []
|
self.paths_list = []
|
||||||
|
|
||||||
|
@ -339,7 +338,7 @@ class Paths:
|
||||||
"""
|
"""
|
||||||
# some error checking
|
# some error checking
|
||||||
if not path.exists():
|
if not path.exists():
|
||||||
self.logger.error(f'Directory {path} does not exist')
|
self.log.error(f'Directory {path} does not exist')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
@ -451,7 +450,6 @@ class SortMedias:
|
||||||
db=None,
|
db=None,
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
interactive=False,
|
interactive=False,
|
||||||
logger=logging.getLogger(),
|
|
||||||
):
|
):
|
||||||
|
|
||||||
# Arguments
|
# Arguments
|
||||||
|
@ -463,7 +461,7 @@ class SortMedias:
|
||||||
self.db = db
|
self.db = db
|
||||||
self.dry_run = dry_run
|
self.dry_run = dry_run
|
||||||
self.interactive = interactive
|
self.interactive = interactive
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
self.summary = Summary(self.root)
|
self.summary = Summary(self.root)
|
||||||
|
|
||||||
# Attributes
|
# Attributes
|
||||||
|
@ -477,7 +475,7 @@ class SortMedias:
|
||||||
dest_checksum = utils.checksum(dest_path)
|
dest_checksum = utils.checksum(dest_path)
|
||||||
|
|
||||||
if dest_checksum != src_checksum:
|
if dest_checksum != src_checksum:
|
||||||
self.logger.info(
|
self.log.info(
|
||||||
"Source checksum and destination checksum are not the same"
|
"Source checksum and destination checksum are not the same"
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
@ -489,7 +487,7 @@ class SortMedias:
|
||||||
# Check if file remain the same
|
# Check if file remain the same
|
||||||
checksum = metadata['checksum']
|
checksum = metadata['checksum']
|
||||||
if not self._checkcomp(dest_path, checksum):
|
if not self._checkcomp(dest_path, checksum):
|
||||||
self.logger.error(f'Files {src_path} and {dest_path} are not identical')
|
self.log.error(f'Files {src_path} and {dest_path} are not identical')
|
||||||
self.summary.append('check', False, src_path, dest_path)
|
self.summary.append('check', False, src_path, dest_path)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -551,7 +549,7 @@ class SortMedias:
|
||||||
for i, _ in enumerate(parts):
|
for i, _ in enumerate(parts):
|
||||||
dir_path = self.root / Path(*parts[0 : i + 1])
|
dir_path = self.root / Path(*parts[0 : i + 1])
|
||||||
if dir_path.is_file():
|
if dir_path.is_file():
|
||||||
self.logger.warning(f'Target directory {dir_path} is a file')
|
self.log.warning(f'Target directory {dir_path} is a file')
|
||||||
# Rename the src_file
|
# Rename the src_file
|
||||||
if self.interactive:
|
if self.interactive:
|
||||||
prompt = [
|
prompt = [
|
||||||
|
@ -565,7 +563,7 @@ class SortMedias:
|
||||||
else:
|
else:
|
||||||
file_path = dir_path.parent / (dir_path.name + '_file')
|
file_path = dir_path.parent / (dir_path.name + '_file')
|
||||||
|
|
||||||
self.logger.warning(f'Renaming {dir_path} to {file_path}')
|
self.log.warning(f'Renaming {dir_path} to {file_path}')
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
shutil.move(dir_path, file_path)
|
shutil.move(dir_path, file_path)
|
||||||
metadata = self.medias.datas[dir_path]
|
metadata = self.medias.datas[dir_path]
|
||||||
|
@ -574,7 +572,7 @@ class SortMedias:
|
||||||
|
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
directory_path.mkdir(parents=True, exist_ok=True)
|
directory_path.mkdir(parents=True, exist_ok=True)
|
||||||
self.logger.info(f'Create {directory_path}')
|
self.log.info(f'Create {directory_path}')
|
||||||
|
|
||||||
def check_conflicts(self, src_path, dest_path, remove_duplicates=False):
|
def check_conflicts(self, src_path, dest_path, remove_duplicates=False):
|
||||||
"""
|
"""
|
||||||
|
@ -583,24 +581,24 @@ class SortMedias:
|
||||||
|
|
||||||
# check for collisions
|
# check for collisions
|
||||||
if src_path == dest_path:
|
if src_path == dest_path:
|
||||||
self.logger.info(f"File {dest_path} already sorted")
|
self.log.info(f"File {dest_path} already sorted")
|
||||||
return 2
|
return 2
|
||||||
|
|
||||||
if dest_path.is_dir():
|
if dest_path.is_dir():
|
||||||
self.logger.info(f"File {dest_path} is a existing directory")
|
self.log.info(f"File {dest_path} is a existing directory")
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if dest_path.is_file():
|
if dest_path.is_file():
|
||||||
self.logger.info(f"File {dest_path} already exist")
|
self.log.info(f"File {dest_path} already exist")
|
||||||
if remove_duplicates:
|
if remove_duplicates:
|
||||||
if filecmp.cmp(src_path, dest_path):
|
if filecmp.cmp(src_path, dest_path):
|
||||||
self.logger.info(
|
self.log.info(
|
||||||
"File in source and destination are identical. Duplicate will be ignored."
|
"File in source and destination are identical. Duplicate will be ignored."
|
||||||
)
|
)
|
||||||
return 3
|
return 3
|
||||||
|
|
||||||
# name is same, but file is different
|
# name is same, but file is different
|
||||||
self.logger.info(
|
self.log.info(
|
||||||
f"File {src_path} and {dest_path} are different."
|
f"File {src_path} and {dest_path} are different."
|
||||||
)
|
)
|
||||||
return 1
|
return 1
|
||||||
|
@ -633,7 +631,7 @@ class SortMedias:
|
||||||
if conflict == 1:
|
if conflict == 1:
|
||||||
# i = 100:
|
# i = 100:
|
||||||
unresolved_conflicts.append((src_path, dest_path, metadata))
|
unresolved_conflicts.append((src_path, dest_path, metadata))
|
||||||
self.logger.error(f"Too many appends for {dest_path}")
|
self.log.error(f"Too many appends for {dest_path}")
|
||||||
|
|
||||||
metadata['file_path'] = os.path.relpath(dest_path, self.root)
|
metadata['file_path'] = os.path.relpath(dest_path, self.root)
|
||||||
|
|
||||||
|
@ -705,7 +703,6 @@ class Collection(SortMedias):
|
||||||
glob='**/*',
|
glob='**/*',
|
||||||
interactive=False,
|
interactive=False,
|
||||||
ignore_tags=None,
|
ignore_tags=None,
|
||||||
logger=logging.getLogger(),
|
|
||||||
max_deep=None,
|
max_deep=None,
|
||||||
use_date_filename=False,
|
use_date_filename=False,
|
||||||
use_file_dates=False,
|
use_file_dates=False,
|
||||||
|
@ -713,13 +710,12 @@ class Collection(SortMedias):
|
||||||
|
|
||||||
# Modules
|
# Modules
|
||||||
self.db = CollectionDb(root)
|
self.db = CollectionDb(root)
|
||||||
self.fileio = FileIO(dry_run, logger)
|
self.fileio = FileIO(dry_run)
|
||||||
self.paths = Paths(
|
self.paths = Paths(
|
||||||
exclude,
|
exclude,
|
||||||
extensions,
|
extensions,
|
||||||
glob,
|
glob,
|
||||||
interactive,
|
interactive,
|
||||||
logger,
|
|
||||||
max_deep,
|
max_deep,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -731,7 +727,6 @@ class Collection(SortMedias):
|
||||||
self.db,
|
self.db,
|
||||||
interactive,
|
interactive,
|
||||||
ignore_tags,
|
ignore_tags,
|
||||||
logger,
|
|
||||||
use_date_filename,
|
use_date_filename,
|
||||||
use_file_dates,
|
use_file_dates,
|
||||||
)
|
)
|
||||||
|
@ -744,18 +739,17 @@ class Collection(SortMedias):
|
||||||
self.db,
|
self.db,
|
||||||
dry_run,
|
dry_run,
|
||||||
interactive,
|
interactive,
|
||||||
logger,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Arguments
|
# Arguments
|
||||||
if not self.root.exists():
|
if not self.root.exists():
|
||||||
self.logger.error(f'Directory {self.root} does not exist')
|
self.log.error(f'Directory {self.root} does not exist')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Options
|
# Options
|
||||||
self.day_begins = day_begins
|
self.day_begins = day_begins
|
||||||
self.glob = glob
|
self.glob = glob
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
|
|
||||||
self.summary = Summary(self.root)
|
self.summary = Summary(self.root)
|
||||||
|
|
||||||
|
@ -769,7 +763,6 @@ class Collection(SortMedias):
|
||||||
paths = Paths(
|
paths = Paths(
|
||||||
exclude,
|
exclude,
|
||||||
interactive=self.interactive,
|
interactive=self.interactive,
|
||||||
logger=self.logger,
|
|
||||||
)
|
)
|
||||||
for file_path in paths.get_files(self.root):
|
for file_path in paths.get_files(self.root):
|
||||||
yield file_path
|
yield file_path
|
||||||
|
@ -796,14 +789,14 @@ class Collection(SortMedias):
|
||||||
relpath = os.path.relpath(file_path, self.root)
|
relpath = os.path.relpath(file_path, self.root)
|
||||||
# If file not in database
|
# If file not in database
|
||||||
if relpath not in db_rows:
|
if relpath not in db_rows:
|
||||||
self.logger.error('Db data is not accurate')
|
self.log.error('Db data is not accurate')
|
||||||
self.logger.info(f'{file_path} not in db')
|
self.log.info(f'{file_path} not in db')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
nb_files = len(file_paths)
|
nb_files = len(file_paths)
|
||||||
nb_row = len(db_rows)
|
nb_row = len(db_rows)
|
||||||
if nb_row != nb_files:
|
if nb_row != nb_files:
|
||||||
self.logger.error('Db data is not accurate')
|
self.log.error('Db data is not accurate')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -812,7 +805,7 @@ class Collection(SortMedias):
|
||||||
if self.db.sqlite.is_empty('metadata'):
|
if self.db.sqlite.is_empty('metadata'):
|
||||||
self.init(loc)
|
self.init(loc)
|
||||||
elif not self.check_db():
|
elif not self.check_db():
|
||||||
self.logger.error('Db data is not accurate run `ordigi update`')
|
self.log.error('Db data is not accurate run `ordigi update`')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def update(self, loc):
|
def update(self, loc):
|
||||||
|
@ -864,7 +857,7 @@ class Collection(SortMedias):
|
||||||
if checksum == self.db.sqlite.get_checksum(relpath):
|
if checksum == self.db.sqlite.get_checksum(relpath):
|
||||||
self.summary.append('check',True, file_path)
|
self.summary.append('check',True, file_path)
|
||||||
else:
|
else:
|
||||||
self.logger.error('{file_path} is corrupted')
|
self.log.error('{file_path} is corrupted')
|
||||||
self.summary.append('check', False, file_path)
|
self.summary.append('check', False, file_path)
|
||||||
|
|
||||||
return self.summary
|
return self.summary
|
||||||
|
@ -893,7 +886,7 @@ class Collection(SortMedias):
|
||||||
"""Remove empty subdir after moving files"""
|
"""Remove empty subdir after moving files"""
|
||||||
parents = set()
|
parents = set()
|
||||||
for directory in directories:
|
for directory in directories:
|
||||||
self.logger.info("remove empty subdirs")
|
self.log.info("remove empty subdirs")
|
||||||
if not directory.is_dir():
|
if not directory.is_dir():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -928,7 +921,7 @@ class Collection(SortMedias):
|
||||||
# if folder empty, delete it
|
# if folder empty, delete it
|
||||||
files = os.listdir(directory)
|
files = os.listdir(directory)
|
||||||
if len(files) == 0 and remove_root:
|
if len(files) == 0 and remove_root:
|
||||||
self.logger.info(f"Removing empty folder: {directory}")
|
self.log.info(f"Removing empty folder: {directory}")
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
os.rmdir(directory)
|
os.rmdir(directory)
|
||||||
self.summary.append('remove', True, directory)
|
self.summary.append('remove', True, directory)
|
||||||
|
@ -949,7 +942,7 @@ class Collection(SortMedias):
|
||||||
subdirs = set()
|
subdirs = set()
|
||||||
for src_path, metadata in self.medias.get_metadatas(src_dirs, imp=imp, loc=loc):
|
for src_path, metadata in self.medias.get_metadatas(src_dirs, imp=imp, loc=loc):
|
||||||
# Get the destination path according to metadata
|
# Get the destination path according to metadata
|
||||||
fpath = FPath(path_format, self.day_begins, self.logger)
|
fpath = FPath(path_format, self.day_begins)
|
||||||
metadata['file_path'] = fpath.get_path(metadata)
|
metadata['file_path'] = fpath.get_path(metadata)
|
||||||
subdirs.add(src_path.parent)
|
subdirs.add(src_path.parent)
|
||||||
|
|
||||||
|
@ -966,7 +959,7 @@ class Collection(SortMedias):
|
||||||
|
|
||||||
return self.summary
|
return self.summary
|
||||||
|
|
||||||
def dedup_regex(self, paths, dedup_regex=None, remove_duplicates=False):
|
def dedup_path(self, paths, dedup_regex=None, remove_duplicates=False):
|
||||||
"""Deduplicate file path parts"""
|
"""Deduplicate file path parts"""
|
||||||
|
|
||||||
# Check db
|
# Check db
|
||||||
|
@ -1048,7 +1041,7 @@ class Collection(SortMedias):
|
||||||
path = self.paths.check(path)
|
path = self.paths.check(path)
|
||||||
|
|
||||||
images_paths = set(self.paths.get_images(path))
|
images_paths = set(self.paths.get_images(path))
|
||||||
images = Images(images_paths, logger=self.logger)
|
images = Images(images_paths)
|
||||||
nb_row_ini = self.db.sqlite.len('metadata')
|
nb_row_ini = self.db.sqlite.len('metadata')
|
||||||
for image in images_paths:
|
for image in images_paths:
|
||||||
# Clear datas in every loops
|
# Clear datas in every loops
|
||||||
|
@ -1062,7 +1055,7 @@ class Collection(SortMedias):
|
||||||
|
|
||||||
nb_row_end = self.db.sqlite.len('metadata')
|
nb_row_end = self.db.sqlite.len('metadata')
|
||||||
if nb_row_ini and nb_row_ini != nb_row_end:
|
if nb_row_ini and nb_row_ini != nb_row_end:
|
||||||
self.logger.error('Nb of row have changed unexpectedly')
|
self.log.error('Nb of row have changed unexpectedly')
|
||||||
|
|
||||||
if not self.check_db():
|
if not self.check_db():
|
||||||
self.summary.append('check', False)
|
self.summary.append('check', False)
|
||||||
|
|
|
@ -4,7 +4,6 @@ https://github.com/RhetTbull/osxphotos/blob/master/osxphotos/exiftool.py
|
||||||
|
|
||||||
import atexit
|
import atexit
|
||||||
import json
|
import json
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
|
@ -13,6 +12,8 @@ import subprocess
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from functools import lru_cache # pylint: disable=syntax-error
|
from functools import lru_cache # pylint: disable=syntax-error
|
||||||
|
|
||||||
|
from ordigi import LOG
|
||||||
|
|
||||||
# exiftool -stay_open commands outputs this EOF marker after command is run
|
# exiftool -stay_open commands outputs this EOF marker after command is run
|
||||||
EXIFTOOL_STAYOPEN_EOF = "{ready}"
|
EXIFTOOL_STAYOPEN_EOF = "{ready}"
|
||||||
EXIFTOOL_STAYOPEN_EOF_LEN = len(EXIFTOOL_STAYOPEN_EOF)
|
EXIFTOOL_STAYOPEN_EOF_LEN = len(EXIFTOOL_STAYOPEN_EOF)
|
||||||
|
@ -58,16 +59,16 @@ class _ExifToolProc:
|
||||||
|
|
||||||
return cls.instance
|
return cls.instance
|
||||||
|
|
||||||
def __init__(self, exiftool=None, logger=logging.getLogger()):
|
def __init__(self, exiftool=None):
|
||||||
"""construct _ExifToolProc singleton object or return instance of already created object
|
"""construct _ExifToolProc singleton object or return instance of already created object
|
||||||
exiftool: optional path to exiftool binary (if not provided, will search path to find it)"""
|
exiftool: optional path to exiftool binary (if not provided, will search path to find it)"""
|
||||||
|
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
self._exiftool = exiftool or get_exiftool_path()
|
self._exiftool = exiftool or get_exiftool_path()
|
||||||
if hasattr(self, "_process_running") and self._process_running:
|
if hasattr(self, "_process_running") and self._process_running:
|
||||||
# already running
|
# already running
|
||||||
if exiftool is not None and exiftool != self._exiftool:
|
if exiftool is not None and exiftool != self._exiftool:
|
||||||
self.logger.warning(
|
self.log.warning(
|
||||||
f"exiftool subprocess already running, "
|
f"exiftool subprocess already running, "
|
||||||
f"ignoring exiftool={exiftool}"
|
f"ignoring exiftool={exiftool}"
|
||||||
)
|
)
|
||||||
|
@ -99,7 +100,7 @@ class _ExifToolProc:
|
||||||
"""start exiftool in batch mode"""
|
"""start exiftool in batch mode"""
|
||||||
|
|
||||||
if self._process_running:
|
if self._process_running:
|
||||||
self.logger.warning("exiftool already running: {self._process}")
|
self.log.warning("exiftool already running: {self._process}")
|
||||||
return
|
return
|
||||||
|
|
||||||
# open exiftool process
|
# open exiftool process
|
||||||
|
@ -155,7 +156,6 @@ class ExifTool:
|
||||||
exiftool=None,
|
exiftool=None,
|
||||||
overwrite=True,
|
overwrite=True,
|
||||||
flags=None,
|
flags=None,
|
||||||
logger=logging.getLogger(),
|
|
||||||
):
|
):
|
||||||
"""Create ExifTool object
|
"""Create ExifTool object
|
||||||
|
|
||||||
|
@ -176,7 +176,7 @@ class ExifTool:
|
||||||
self.error = None
|
self.error = None
|
||||||
# if running as a context manager, self._context_mgr will be True
|
# if running as a context manager, self._context_mgr will be True
|
||||||
self._context_mgr = False
|
self._context_mgr = False
|
||||||
self._exiftoolproc = _ExifToolProc(exiftool=exiftool, logger=logger)
|
self._exiftoolproc = _ExifToolProc(exiftool=exiftool)
|
||||||
self._read_exif()
|
self._read_exif()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -402,17 +402,17 @@ class ExifToolCaching(ExifTool):
|
||||||
|
|
||||||
_singletons: dict[Path, ExifTool] = {}
|
_singletons: dict[Path, ExifTool] = {}
|
||||||
|
|
||||||
def __new__(cls, filepath, exiftool=None, logger=logging.getLogger()):
|
def __new__(cls, filepath, exiftool=None):
|
||||||
"""create new object or return instance of already created singleton"""
|
"""create new object or return instance of already created singleton"""
|
||||||
if filepath not in cls._singletons:
|
if filepath not in cls._singletons:
|
||||||
cls._singletons[filepath] = _ExifToolCaching(
|
cls._singletons[filepath] = _ExifToolCaching(
|
||||||
filepath, exiftool=exiftool, logger=logger
|
filepath, exiftool=exiftool
|
||||||
)
|
)
|
||||||
return cls._singletons[filepath]
|
return cls._singletons[filepath]
|
||||||
|
|
||||||
|
|
||||||
class _ExifToolCaching(ExifTool):
|
class _ExifToolCaching(ExifTool):
|
||||||
def __init__(self, filepath, exiftool=None, logger=logging.getLogger()):
|
def __init__(self, filepath, exiftool=None):
|
||||||
"""Create read-only ExifTool object that caches values
|
"""Create read-only ExifTool object that caches values
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -425,7 +425,7 @@ class _ExifToolCaching(ExifTool):
|
||||||
self._json_cache = None
|
self._json_cache = None
|
||||||
self._asdict_cache = {}
|
self._asdict_cache = {}
|
||||||
super().__init__(
|
super().__init__(
|
||||||
filepath, exiftool=exiftool, overwrite=False, flags=None, logger=logger
|
filepath, exiftool=exiftool, overwrite=False, flags=None
|
||||||
)
|
)
|
||||||
|
|
||||||
def run_commands(self, *commands, no_file=False):
|
def run_commands(self, *commands, no_file=False):
|
||||||
|
|
|
@ -2,8 +2,8 @@ from os import path
|
||||||
|
|
||||||
import geopy
|
import geopy
|
||||||
from geopy.geocoders import Nominatim, options
|
from geopy.geocoders import Nominatim, options
|
||||||
import logging
|
|
||||||
|
|
||||||
|
from ordigi import LOG
|
||||||
from ordigi import config
|
from ordigi import config
|
||||||
|
|
||||||
__KEY__ = None
|
__KEY__ = None
|
||||||
|
@ -15,17 +15,16 @@ class GeoLocation:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
geocoder='Nominatim',
|
geocoder='Nominatim',
|
||||||
logger=logging.getLogger(),
|
|
||||||
prefer_english_names=False,
|
prefer_english_names=False,
|
||||||
timeout=options.default_timeout,
|
timeout=options.default_timeout,
|
||||||
):
|
):
|
||||||
self.geocoder = geocoder
|
self.geocoder = geocoder
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
self.prefer_english_names = prefer_english_names
|
self.prefer_english_names = prefer_english_names
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
|
|
||||||
def coordinates_by_name(self, name, timeout=options.default_timeout):
|
def coordinates_by_name(self, name, timeout=options.default_timeout):
|
||||||
# If the name is not cached then we go ahead with an API lookup
|
"""Get coordinates from given location name"""
|
||||||
geocoder = self.geocoder
|
geocoder = self.geocoder
|
||||||
if geocoder == 'Nominatim':
|
if geocoder == 'Nominatim':
|
||||||
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
|
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
|
||||||
|
@ -41,6 +40,7 @@ class GeoLocation:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def place_name(self, lat, lon, timeout=options.default_timeout):
|
def place_name(self, lat, lon, timeout=options.default_timeout):
|
||||||
|
"""get place name from coordinates"""
|
||||||
lookup_place_name_default = {'default': None}
|
lookup_place_name_default = {'default': None}
|
||||||
if lat is None or lon is None:
|
if lat is None or lon is None:
|
||||||
return lookup_place_name_default
|
return lookup_place_name_default
|
||||||
|
@ -76,6 +76,7 @@ class GeoLocation:
|
||||||
return lookup_place_name
|
return lookup_place_name
|
||||||
|
|
||||||
def lookup_osm( self, lat, lon, timeout=options.default_timeout):
|
def lookup_osm( self, lat, lon, timeout=options.default_timeout):
|
||||||
|
"""Get Geolocation address data from latitude and longitude"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
|
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
|
||||||
|
@ -87,12 +88,14 @@ class GeoLocation:
|
||||||
locator_reverse = locator.reverse(coords, language=lang)
|
locator_reverse = locator.reverse(coords, language=lang)
|
||||||
if locator_reverse is not None:
|
if locator_reverse is not None:
|
||||||
return locator_reverse.raw
|
return locator_reverse.raw
|
||||||
else:
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
except geopy.exc.GeocoderUnavailable or geopy.exc.GeocoderServiceError as e:
|
except geopy.exc.GeocoderUnavailable or geopy.exc.GeocoderServiceError as e:
|
||||||
self.logger.error(e)
|
self.log.error(e)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Fix *** TypeError: `address` must not be None
|
# Fix *** TypeError: `address` must not be None
|
||||||
except (TypeError, ValueError) as e:
|
except (TypeError, ValueError) as e:
|
||||||
self.logger.error(e)
|
self.log.error(e)
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -5,14 +5,15 @@ image objects (JPG, DNG, etc.).
|
||||||
.. moduleauthor:: Jaisen Mathai <jaisen@jmathai.com>
|
.. moduleauthor:: Jaisen Mathai <jaisen@jmathai.com>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import imagehash
|
|
||||||
import imghdr
|
import imghdr
|
||||||
import logging
|
|
||||||
import numpy as np
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import imagehash
|
||||||
|
import numpy as np
|
||||||
from PIL import Image as img
|
from PIL import Image as img
|
||||||
from PIL import UnidentifiedImageError
|
from PIL import UnidentifiedImageError
|
||||||
import time
|
|
||||||
|
from ordigi import LOG
|
||||||
|
|
||||||
# HEIC extension support (experimental, not tested)
|
# HEIC extension support (experimental, not tested)
|
||||||
PYHEIF = False
|
PYHEIF = False
|
||||||
|
@ -23,10 +24,12 @@ try:
|
||||||
# Allow to open HEIF/HEIC image from pillow
|
# Allow to open HEIF/HEIC image from pillow
|
||||||
register_heif_opener()
|
register_heif_opener()
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
pass
|
LOG.info(e)
|
||||||
|
|
||||||
|
|
||||||
class Image:
|
class Image:
|
||||||
|
"""Image file class"""
|
||||||
|
|
||||||
def __init__(self, img_path, hash_size=8):
|
def __init__(self, img_path, hash_size=8):
|
||||||
|
|
||||||
self.img_path = img_path
|
self.img_path = img_path
|
||||||
|
@ -61,6 +64,7 @@ class Image:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def get_hash(self):
|
def get_hash(self):
|
||||||
|
"""Get image hash"""
|
||||||
try:
|
try:
|
||||||
with img.open(self.img_path) as image:
|
with img.open(self.img_path) as image:
|
||||||
return imagehash.average_hash(image, self.hash_size).hash
|
return imagehash.average_hash(image, self.hash_size).hash
|
||||||
|
@ -89,14 +93,13 @@ class Images:
|
||||||
'rw2',
|
'rw2',
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, images=set(), hash_size=8, logger=logging.getLogger()):
|
def __init__(self, images=set(), hash_size=8):
|
||||||
|
|
||||||
self.images = images
|
self.images = images
|
||||||
self.duplicates = []
|
self.duplicates = []
|
||||||
self.hash_size = hash_size
|
self.hash_size = hash_size
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
if PYHEIF == False:
|
if PYHEIF == False:
|
||||||
self.logger.info("No module named 'pyheif_pillow_opener'")
|
self.log.info("No module named 'pyheif_pillow_opener'")
|
||||||
|
|
||||||
def add_images(self, file_paths):
|
def add_images(self, file_paths):
|
||||||
for img_path in file_paths:
|
for img_path in file_paths:
|
||||||
|
@ -117,7 +120,7 @@ class Images:
|
||||||
hashes = {}
|
hashes = {}
|
||||||
for temp_hash in self.get_images_hashes():
|
for temp_hash in self.get_images_hashes():
|
||||||
if temp_hash in hashes:
|
if temp_hash in hashes:
|
||||||
self.logger.info(
|
self.log.info(
|
||||||
"Duplicate {} \nfound for image {}\n".format(
|
"Duplicate {} \nfound for image {}\n".format(
|
||||||
img_path, hashes[temp_hash]
|
img_path, hashes[temp_hash]
|
||||||
)
|
)
|
||||||
|
@ -129,25 +132,28 @@ class Images:
|
||||||
return duplicates
|
return duplicates
|
||||||
|
|
||||||
def remove_duplicates(self, duplicates):
|
def remove_duplicates(self, duplicates):
|
||||||
|
"""Remove duplicate files"""
|
||||||
for duplicate in duplicates:
|
for duplicate in duplicates:
|
||||||
try:
|
try:
|
||||||
os.remove(duplicate)
|
os.remove(duplicate)
|
||||||
except OSError as error:
|
except OSError as error:
|
||||||
self.logger.error(error)
|
self.log.error(error)
|
||||||
|
|
||||||
def remove_duplicates_interactive(self, duplicates):
|
def remove_duplicates_interactive(self, duplicates):
|
||||||
|
"""Remove duplicate files: interactive mode"""
|
||||||
if len(duplicates) != 0:
|
if len(duplicates) != 0:
|
||||||
answer = input(f"Do you want to delete these {duplicates} images? Y/n: ")
|
answer = input(f"Do you want to delete these {duplicates} images? Y/n: ")
|
||||||
if answer.strip().lower() == 'y':
|
if answer.strip().lower() == 'y':
|
||||||
self.remove_duplicates(duplicates)
|
self.remove_duplicates(duplicates)
|
||||||
self.logger.info('Duplicates images deleted successfully!')
|
self.log.info('Duplicates images deleted successfully!')
|
||||||
else:
|
else:
|
||||||
self.logger.info("No duplicates found")
|
self.log.info("No duplicates found")
|
||||||
|
|
||||||
def diff(self, hash1, hash2):
|
def diff(self, hash1, hash2):
|
||||||
return np.count_nonzero(hash1 != hash2)
|
return np.count_nonzero(hash1 != hash2)
|
||||||
|
|
||||||
def similarity(self, img_diff):
|
def similarity(self, img_diff):
|
||||||
|
"""Similarity rate in %"""
|
||||||
threshold_img = img_diff / (self.hash_size ** 2)
|
threshold_img = img_diff / (self.hash_size ** 2)
|
||||||
similarity_img = round((1 - threshold_img) * 100)
|
similarity_img = round((1 - threshold_img) * 100)
|
||||||
|
|
||||||
|
@ -163,7 +169,7 @@ class Images:
|
||||||
if hash1 is None:
|
if hash1 is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
self.logger.info(f'Finding similar images to {image.img_path}')
|
self.log.info(f'Finding similar images to {image.img_path}')
|
||||||
|
|
||||||
threshold = 1 - similarity / 100
|
threshold = 1 - similarity / 100
|
||||||
diff_limit = int(threshold * (self.hash_size ** 2))
|
diff_limit = int(threshold * (self.hash_size ** 2))
|
||||||
|
@ -181,7 +187,7 @@ class Images:
|
||||||
img_diff = self.diff(hash1, hash2)
|
img_diff = self.diff(hash1, hash2)
|
||||||
if img_diff <= diff_limit:
|
if img_diff <= diff_limit:
|
||||||
similarity_img = self.similarity(img_diff)
|
similarity_img = self.similarity(img_diff)
|
||||||
self.logger.info(
|
self.log.info(
|
||||||
f'{img.img_path} image found {similarity_img}% similar to {image}'
|
f'{img.img_path} image found {similarity_img}% similar to {image}'
|
||||||
)
|
)
|
||||||
yield img.img_path
|
yield img.img_path
|
||||||
|
|
|
@ -1,22 +1,26 @@
|
||||||
|
"""Logging module"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
def level(verbose, debug):
|
def get_level(verbose):
|
||||||
if debug:
|
"""Return int logging level from string"""
|
||||||
return logging.DEBUG
|
if verbose.isnumeric():
|
||||||
elif verbose:
|
return int(verbose)
|
||||||
return logging.INFO
|
|
||||||
|
|
||||||
return logging.WARNING
|
return int(logging.getLevelName(verbose))
|
||||||
|
|
||||||
|
|
||||||
def get_logger(name='ordigi', level=30):
|
def get_logger(name='ordigi', level=30):
|
||||||
|
"""Get configured logger"""
|
||||||
if level > 10:
|
if level > 10:
|
||||||
format='%(levelname)s:%(message)s'
|
log_format='%(levelname)s:%(message)s'
|
||||||
else:
|
else:
|
||||||
format='%(levelname)s:%(name)s:%(message)s'
|
log_format='%(levelname)s:%(name)s:%(message)s'
|
||||||
|
|
||||||
logging.basicConfig(format=format, level=level)
|
logging.basicConfig(format=log_format, level=level)
|
||||||
logging.getLogger('asyncio').setLevel(level)
|
logging.getLogger('asyncio').setLevel(level)
|
||||||
logger = logging.getLogger(name)
|
logger = logging.getLogger(name)
|
||||||
|
logger.setLevel(level)
|
||||||
|
|
||||||
return logger
|
return logger
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import logging
|
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
@ -7,6 +6,7 @@ import sys
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
import inquirer
|
import inquirer
|
||||||
|
|
||||||
|
from ordigi import LOG
|
||||||
from ordigi.exiftool import ExifTool, ExifToolCaching
|
from ordigi.exiftool import ExifTool, ExifToolCaching
|
||||||
from ordigi import utils
|
from ordigi import utils
|
||||||
from ordigi import request
|
from ordigi import request
|
||||||
|
@ -76,13 +76,12 @@ class ReadExif(ExifMetadata):
|
||||||
file_path,
|
file_path,
|
||||||
exif_metadata=None,
|
exif_metadata=None,
|
||||||
ignore_tags=None,
|
ignore_tags=None,
|
||||||
logger=logging.getLogger(),
|
|
||||||
):
|
):
|
||||||
|
|
||||||
super().__init__(file_path, ignore_tags)
|
super().__init__(file_path, ignore_tags)
|
||||||
|
|
||||||
# Options
|
# Options
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
|
|
||||||
if exif_metadata:
|
if exif_metadata:
|
||||||
self.exif_metadata = exif_metadata
|
self.exif_metadata = exif_metadata
|
||||||
|
@ -93,7 +92,7 @@ class ReadExif(ExifMetadata):
|
||||||
def get_exif_metadata(self):
|
def get_exif_metadata(self):
|
||||||
"""Get metadata from exiftool."""
|
"""Get metadata from exiftool."""
|
||||||
|
|
||||||
return ExifToolCaching(self.file_path, logger=self.logger).asdict()
|
return ExifToolCaching(self.file_path).asdict()
|
||||||
|
|
||||||
def get_key_values(self, key):
|
def get_key_values(self, key):
|
||||||
"""
|
"""
|
||||||
|
@ -150,14 +149,13 @@ class WriteExif(ExifMetadata):
|
||||||
file_path,
|
file_path,
|
||||||
metadata,
|
metadata,
|
||||||
ignore_tags=None,
|
ignore_tags=None,
|
||||||
logger=logging.getLogger(),
|
|
||||||
):
|
):
|
||||||
|
|
||||||
super().__init__(file_path, ignore_tags)
|
super().__init__(file_path, ignore_tags)
|
||||||
|
|
||||||
self.metadata = metadata
|
self.metadata = metadata
|
||||||
|
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
|
|
||||||
def set_value(self, tag, value):
|
def set_value(self, tag, value):
|
||||||
"""Set value of a tag.
|
"""Set value of a tag.
|
||||||
|
@ -165,7 +163,7 @@ class WriteExif(ExifMetadata):
|
||||||
:returns: value (str)
|
:returns: value (str)
|
||||||
"""
|
"""
|
||||||
# TODO overwrite mode check if fail
|
# TODO overwrite mode check if fail
|
||||||
return ExifTool(self.file_path, logger=self.logger).setvalue(tag, value)
|
return ExifTool(self.file_path).setvalue(tag, value)
|
||||||
|
|
||||||
def set_key_values(self, key, value):
|
def set_key_values(self, key, value):
|
||||||
"""Set tags values for given key"""
|
"""Set tags values for given key"""
|
||||||
|
@ -240,21 +238,19 @@ class Media(ReadExif):
|
||||||
album_from_folder=False,
|
album_from_folder=False,
|
||||||
ignore_tags=None,
|
ignore_tags=None,
|
||||||
interactive=False,
|
interactive=False,
|
||||||
logger=logging.getLogger(),
|
|
||||||
use_date_filename=False,
|
use_date_filename=False,
|
||||||
use_file_dates=False,
|
use_file_dates=False,
|
||||||
):
|
):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
file_path,
|
file_path,
|
||||||
ignore_tags=ignore_tags,
|
ignore_tags=ignore_tags,
|
||||||
logger=logger,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.src_dir = src_dir
|
self.src_dir = src_dir
|
||||||
|
|
||||||
self.album_from_folder = album_from_folder
|
self.album_from_folder = album_from_folder
|
||||||
self.interactive = interactive
|
self.interactive = interactive
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
self.metadata = None
|
self.metadata = None
|
||||||
self.use_date_filename = use_date_filename
|
self.use_date_filename = use_date_filename
|
||||||
self.use_file_dates = use_file_dates
|
self.use_file_dates = use_file_dates
|
||||||
|
@ -292,7 +288,7 @@ class Media(ReadExif):
|
||||||
value = re.sub(regex, r'\g<1>-\g<2>-\g<3>', value)
|
value = re.sub(regex, r'\g<1>-\g<2>-\g<3>', value)
|
||||||
return parser.parse(value)
|
return parser.parse(value)
|
||||||
except BaseException or parser._parser.ParserError as e:
|
except BaseException or parser._parser.ParserError as e:
|
||||||
self.logger.warning(e.args, value)
|
self.log.warning(e.args, value)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _get_date_media_interactive(self, choices, default):
|
def _get_date_media_interactive(self, choices, default):
|
||||||
|
@ -338,7 +334,7 @@ class Media(ReadExif):
|
||||||
date_modified = self.metadata['date_modified']
|
date_modified = self.metadata['date_modified']
|
||||||
if self.metadata['date_original']:
|
if self.metadata['date_original']:
|
||||||
if date_filename and date_filename != date_original:
|
if date_filename and date_filename != date_original:
|
||||||
self.logger.warning(
|
self.log.warning(
|
||||||
f"{filename} time mark is different from {date_original}"
|
f"{filename} time mark is different from {date_original}"
|
||||||
)
|
)
|
||||||
if self.interactive:
|
if self.interactive:
|
||||||
|
@ -353,14 +349,14 @@ class Media(ReadExif):
|
||||||
|
|
||||||
return self.metadata['date_original']
|
return self.metadata['date_original']
|
||||||
|
|
||||||
self.logger.warning(f"could not find original date for {self.file_path}")
|
self.log.warning(f"could not find original date for {self.file_path}")
|
||||||
|
|
||||||
if self.use_date_filename and date_filename:
|
if self.use_date_filename and date_filename:
|
||||||
self.logger.info(
|
self.log.info(
|
||||||
f"use date from filename:{date_filename} for {self.file_path}"
|
f"use date from filename:{date_filename} for {self.file_path}"
|
||||||
)
|
)
|
||||||
if date_created and date_filename > date_created:
|
if date_created and date_filename > date_created:
|
||||||
self.logger.warning(
|
self.log.warning(
|
||||||
f"{filename} time mark is more recent than {date_created}"
|
f"{filename} time mark is more recent than {date_created}"
|
||||||
)
|
)
|
||||||
if self.interactive:
|
if self.interactive:
|
||||||
|
@ -376,13 +372,13 @@ class Media(ReadExif):
|
||||||
|
|
||||||
if self.use_file_dates:
|
if self.use_file_dates:
|
||||||
if date_created:
|
if date_created:
|
||||||
self.logger.warning(
|
self.log.warning(
|
||||||
f"use date created:{date_created} for {self.file_path}"
|
f"use date created:{date_created} for {self.file_path}"
|
||||||
)
|
)
|
||||||
return date_created
|
return date_created
|
||||||
|
|
||||||
if date_modified:
|
if date_modified:
|
||||||
self.logger.warning(
|
self.log.warning(
|
||||||
f"use date modified:{date_modified} for {self.file_path}"
|
f"use date modified:{date_modified} for {self.file_path}"
|
||||||
)
|
)
|
||||||
return date_modified
|
return date_modified
|
||||||
|
@ -485,12 +481,12 @@ class Media(ReadExif):
|
||||||
file_checksum = self.metadata['checksum']
|
file_checksum = self.metadata['checksum']
|
||||||
# Check if checksum match
|
# Check if checksum match
|
||||||
if db_checksum and db_checksum != file_checksum:
|
if db_checksum and db_checksum != file_checksum:
|
||||||
self.logger.error(f'{self.file_path} checksum has changed')
|
self.log.error(f'{self.file_path} checksum has changed')
|
||||||
self.logger.error('(modified or corrupted file).')
|
self.log.error('(modified or corrupted file).')
|
||||||
self.logger.error(
|
self.log.error(
|
||||||
f'file_checksum={file_checksum},\ndb_checksum={db_checksum}'
|
f'file_checksum={file_checksum},\ndb_checksum={db_checksum}'
|
||||||
)
|
)
|
||||||
self.logger.info(
|
self.log.info(
|
||||||
'Use --reset-cache, check database integrity or try to restore the file'
|
'Use --reset-cache, check database integrity or try to restore the file'
|
||||||
)
|
)
|
||||||
# We d'ont want to silently ignore or correct this without
|
# We d'ont want to silently ignore or correct this without
|
||||||
|
@ -620,7 +616,6 @@ class Medias:
|
||||||
db=None,
|
db=None,
|
||||||
interactive=False,
|
interactive=False,
|
||||||
ignore_tags=None,
|
ignore_tags=None,
|
||||||
logger=logging.getLogger(),
|
|
||||||
use_date_filename=False,
|
use_date_filename=False,
|
||||||
use_file_dates=False,
|
use_file_dates=False,
|
||||||
):
|
):
|
||||||
|
@ -637,7 +632,7 @@ class Medias:
|
||||||
self.album_from_folder = album_from_folder
|
self.album_from_folder = album_from_folder
|
||||||
self.ignore_tags = ignore_tags
|
self.ignore_tags = ignore_tags
|
||||||
self.interactive = interactive
|
self.interactive = interactive
|
||||||
self.logger = logger.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
self.use_date_filename = use_date_filename
|
self.use_date_filename = use_date_filename
|
||||||
self.use_file_dates = use_file_dates
|
self.use_file_dates = use_file_dates
|
||||||
|
|
||||||
|
@ -653,7 +648,6 @@ class Medias:
|
||||||
self.album_from_folder,
|
self.album_from_folder,
|
||||||
self.ignore_tags,
|
self.ignore_tags,
|
||||||
self.interactive,
|
self.interactive,
|
||||||
self.logger,
|
|
||||||
self.use_date_filename,
|
self.use_date_filename,
|
||||||
self.use_file_dates,
|
self.use_file_dates,
|
||||||
)
|
)
|
||||||
|
@ -677,7 +671,7 @@ class Medias:
|
||||||
for src_path in paths:
|
for src_path in paths:
|
||||||
if self.root not in src_path.parents:
|
if self.root not in src_path.parents:
|
||||||
if not imp:
|
if not imp:
|
||||||
self.logger.error(f"""{src_path} not in {self.root}
|
self.log.error(f"""{src_path} not in {self.root}
|
||||||
collection, use `ordigi import`""")
|
collection, use `ordigi import`""")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
@ -693,7 +687,6 @@ class Medias:
|
||||||
file_path,
|
file_path,
|
||||||
metadata,
|
metadata,
|
||||||
ignore_tags=self.ignore_tags,
|
ignore_tags=self.ignore_tags,
|
||||||
logger=self.logger
|
|
||||||
)
|
)
|
||||||
|
|
||||||
updated = False
|
updated = False
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
[pytest]
|
[pytest]
|
||||||
addopts = --ignore=old_tests -s
|
# addopts = --ignore=old_tests -s
|
||||||
|
|
||||||
# collect_ignore = ["old_test"]
|
# collect_ignore = ["old_test"]
|
||||||
|
|
||||||
|
|
|
@ -25,10 +25,7 @@ class TestOrdigi:
|
||||||
def setup_class(cls, sample_files_paths):
|
def setup_class(cls, sample_files_paths):
|
||||||
cls.runner = CliRunner()
|
cls.runner = CliRunner()
|
||||||
cls.src_path, cls.file_paths = sample_files_paths
|
cls.src_path, cls.file_paths = sample_files_paths
|
||||||
cls.logger_options = (
|
cls.logger_options = (('--verbose', 'DEBUG'),)
|
||||||
'--debug',
|
|
||||||
'--verbose',
|
|
||||||
)
|
|
||||||
cls.filter_options = (
|
cls.filter_options = (
|
||||||
('--exclude', '.DS_Store'),
|
('--exclude', '.DS_Store'),
|
||||||
('--ignore-tags', 'CreateDate'),
|
('--ignore-tags', 'CreateDate'),
|
||||||
|
@ -45,7 +42,7 @@ class TestOrdigi:
|
||||||
|
|
||||||
def assert_cli(self, command, attributes):
|
def assert_cli(self, command, attributes):
|
||||||
result = self.runner.invoke(command, [*attributes])
|
result = self.runner.invoke(command, [*attributes])
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0, attributes
|
||||||
|
|
||||||
def assert_options(self, command, bool_options, arg_options, paths):
|
def assert_options(self, command, bool_options, arg_options, paths):
|
||||||
for bool_option in bool_options:
|
for bool_option in bool_options:
|
||||||
|
@ -62,7 +59,6 @@ class TestOrdigi:
|
||||||
|
|
||||||
def test_sort(self):
|
def test_sort(self):
|
||||||
bool_options = (
|
bool_options = (
|
||||||
*self.logger_options,
|
|
||||||
# '--interactive',
|
# '--interactive',
|
||||||
'--dry-run',
|
'--dry-run',
|
||||||
'--album-from-folder',
|
'--album-from-folder',
|
||||||
|
@ -73,6 +69,7 @@ class TestOrdigi:
|
||||||
)
|
)
|
||||||
|
|
||||||
arg_options = (
|
arg_options = (
|
||||||
|
*self.logger_options,
|
||||||
*self.filter_options,
|
*self.filter_options,
|
||||||
('--path-format', '{%Y}/{folder}/{name}.{ext}'),
|
('--path-format', '{%Y}/{folder}/{name}.{ext}'),
|
||||||
|
|
||||||
|
@ -86,32 +83,22 @@ class TestOrdigi:
|
||||||
self.assert_all_options(cli._sort, bool_options, arg_options, paths)
|
self.assert_all_options(cli._sort, bool_options, arg_options, paths)
|
||||||
|
|
||||||
def assert_init(self):
|
def assert_init(self):
|
||||||
for bool_option in self.logger_options:
|
for opt, arg in self.logger_options:
|
||||||
result = self.runner.invoke(
|
self.assert_cli(cli._init, [opt, arg, str(self.src_path)])
|
||||||
cli._init, [bool_option, str(self.src_path
|
|
||||||
)])
|
|
||||||
assert result.exit_code == 0, bool_option
|
|
||||||
|
|
||||||
def assert_update(self):
|
def assert_update(self):
|
||||||
file_path = Path(ORDIGI_PATH, 'samples/test_exif/photo.cr2')
|
file_path = Path(ORDIGI_PATH, 'samples/test_exif/photo.cr2')
|
||||||
dest_path = self.src_path / 'photo_moved.cr2'
|
dest_path = self.src_path / 'photo_moved.cr2'
|
||||||
shutil.copyfile(file_path, dest_path)
|
shutil.copyfile(file_path, dest_path)
|
||||||
for bool_option in self.logger_options:
|
for opt, arg in self.logger_options:
|
||||||
result = self.runner.invoke(
|
self.assert_cli(cli._update, [opt, arg, str(self.src_path)])
|
||||||
cli._update, [bool_option, str(self.src_path
|
|
||||||
)])
|
|
||||||
assert result.exit_code == 0, bool_option
|
|
||||||
|
|
||||||
def assert_check(self):
|
def assert_check(self):
|
||||||
for bool_option in self.logger_options:
|
for opt, arg in self.logger_options:
|
||||||
result = self.runner.invoke(
|
self.assert_cli(cli._check, [opt, arg, str(self.src_path)])
|
||||||
cli._check, [bool_option, str(self.src_path
|
|
||||||
)])
|
|
||||||
assert result.exit_code == 0, bool_option
|
|
||||||
|
|
||||||
def assert_clean(self):
|
def assert_clean(self):
|
||||||
bool_options = (
|
bool_options = (
|
||||||
*self.logger_options,
|
|
||||||
# '--interactive',
|
# '--interactive',
|
||||||
'--dry-run',
|
'--dry-run',
|
||||||
'--delete-excluded',
|
'--delete-excluded',
|
||||||
|
@ -121,6 +108,7 @@ class TestOrdigi:
|
||||||
)
|
)
|
||||||
|
|
||||||
arg_options = (
|
arg_options = (
|
||||||
|
*self.logger_options,
|
||||||
*self.filter_options,
|
*self.filter_options,
|
||||||
('--dedup-regex', r'\d{4}-\d{2}'),
|
('--dedup-regex', r'\d{4}-\d{2}'),
|
||||||
)
|
)
|
||||||
|
@ -142,7 +130,6 @@ class TestOrdigi:
|
||||||
|
|
||||||
def test_import(self, tmp_path):
|
def test_import(self, tmp_path):
|
||||||
bool_options = (
|
bool_options = (
|
||||||
*self.logger_options,
|
|
||||||
# '--interactive',
|
# '--interactive',
|
||||||
'--dry-run',
|
'--dry-run',
|
||||||
'--album-from-folder',
|
'--album-from-folder',
|
||||||
|
@ -153,6 +140,7 @@ class TestOrdigi:
|
||||||
)
|
)
|
||||||
|
|
||||||
arg_options = (
|
arg_options = (
|
||||||
|
*self.logger_options,
|
||||||
*self.filter_options,
|
*self.filter_options,
|
||||||
('--path-format', '{%Y}/{folder}/{stem}.{ext}'),
|
('--path-format', '{%Y}/{folder}/{stem}.{ext}'),
|
||||||
|
|
||||||
|
@ -168,7 +156,6 @@ class TestOrdigi:
|
||||||
|
|
||||||
def test_compare(self):
|
def test_compare(self):
|
||||||
bool_options = (
|
bool_options = (
|
||||||
*self.logger_options,
|
|
||||||
# '--interactive',
|
# '--interactive',
|
||||||
'--dry-run',
|
'--dry-run',
|
||||||
'--find-duplicates',
|
'--find-duplicates',
|
||||||
|
@ -176,6 +163,7 @@ class TestOrdigi:
|
||||||
)
|
)
|
||||||
|
|
||||||
arg_options = (
|
arg_options = (
|
||||||
|
*self.logger_options,
|
||||||
*self.filter_options,
|
*self.filter_options,
|
||||||
# ('--similar-to', ''),
|
# ('--similar-to', ''),
|
||||||
('--similarity', '65'),
|
('--similarity', '65'),
|
||||||
|
|
|
@ -6,16 +6,17 @@ import re
|
||||||
import pytest
|
import pytest
|
||||||
import inquirer
|
import inquirer
|
||||||
|
|
||||||
|
from ordigi import LOG
|
||||||
from ordigi import constants
|
from ordigi import constants
|
||||||
from ordigi.collection import Collection, FPath, Paths
|
from ordigi.collection import Collection, FPath, Paths
|
||||||
from ordigi.exiftool import ExifToolCaching, exiftool_is_running, terminate_exiftool
|
from ordigi.exiftool import ExifToolCaching, exiftool_is_running, terminate_exiftool
|
||||||
from ordigi.geolocation import GeoLocation
|
from ordigi.geolocation import GeoLocation
|
||||||
from ordigi import log
|
|
||||||
from ordigi.media import Media, ReadExif
|
from ordigi.media import Media, ReadExif
|
||||||
from ordigi import utils
|
from ordigi import utils
|
||||||
from .conftest import randomize_files, randomize_db
|
from .conftest import randomize_files, randomize_db
|
||||||
from ordigi.summary import Summary
|
from ordigi.summary import Summary
|
||||||
|
|
||||||
|
LOG.setLevel(10)
|
||||||
|
|
||||||
class TestFPath:
|
class TestFPath:
|
||||||
|
|
||||||
|
@ -23,13 +24,12 @@ class TestFPath:
|
||||||
def setup_class(cls, sample_files_paths):
|
def setup_class(cls, sample_files_paths):
|
||||||
cls.src_path, cls.file_paths = sample_files_paths
|
cls.src_path, cls.file_paths = sample_files_paths
|
||||||
cls.path_format = constants.DEFAULT_PATH + '/' + constants.DEFAULT_NAME
|
cls.path_format = constants.DEFAULT_PATH + '/' + constants.DEFAULT_NAME
|
||||||
cls.logger = log.get_logger(level=10)
|
|
||||||
|
|
||||||
def test_get_part(self, tmp_path):
|
def test_get_part(self, tmp_path):
|
||||||
"""
|
"""
|
||||||
Test all parts
|
Test all parts
|
||||||
"""
|
"""
|
||||||
fpath = FPath(self.path_format, 4, self.logger)
|
fpath = FPath(self.path_format, 4)
|
||||||
# Item to search for:
|
# Item to search for:
|
||||||
items = fpath.get_items()
|
items = fpath.get_items()
|
||||||
masks = [
|
masks = [
|
||||||
|
@ -107,7 +107,7 @@ class TestFPath:
|
||||||
|
|
||||||
def test_get_early_morning_photos_date(self):
|
def test_get_early_morning_photos_date(self):
|
||||||
date = datetime(2021, 10, 16, 2, 20, 40)
|
date = datetime(2021, 10, 16, 2, 20, 40)
|
||||||
fpath = FPath(self.path_format, 4, self.logger)
|
fpath = FPath(self.path_format, 4)
|
||||||
part = fpath.get_early_morning_photos_date(date, '%Y-%m-%d')
|
part = fpath.get_early_morning_photos_date(date, '%Y-%m-%d')
|
||||||
assert part == '2021-10-15'
|
assert part == '2021-10-15'
|
||||||
|
|
||||||
|
@ -121,7 +121,6 @@ class TestCollection:
|
||||||
def setup_class(cls, sample_files_paths):
|
def setup_class(cls, sample_files_paths):
|
||||||
cls.src_path, cls.file_paths = sample_files_paths
|
cls.src_path, cls.file_paths = sample_files_paths
|
||||||
cls.path_format = constants.DEFAULT_PATH + '/' + constants.DEFAULT_NAME
|
cls.path_format = constants.DEFAULT_PATH + '/' + constants.DEFAULT_NAME
|
||||||
cls.logger = log.get_logger(level=10)
|
|
||||||
|
|
||||||
def teardown_class(self):
|
def teardown_class(self):
|
||||||
terminate_exiftool()
|
terminate_exiftool()
|
||||||
|
@ -138,8 +137,7 @@ class TestCollection:
|
||||||
assert summary.success_table.sum('sort') == nb
|
assert summary.success_table.sum('sort') == nb
|
||||||
|
|
||||||
def test_sort_files(self, tmp_path):
|
def test_sort_files(self, tmp_path):
|
||||||
collection = Collection(tmp_path, album_from_folder=True,
|
collection = Collection(tmp_path, album_from_folder=True)
|
||||||
logger=self.logger)
|
|
||||||
loc = GeoLocation()
|
loc = GeoLocation()
|
||||||
summary = collection.sort_files([self.src_path],
|
summary = collection.sort_files([self.src_path],
|
||||||
self.path_format, loc, imp='copy')
|
self.path_format, loc, imp='copy')
|
||||||
|
@ -235,7 +233,7 @@ class TestCollection:
|
||||||
def test_sort_similar_images(self, tmp_path):
|
def test_sort_similar_images(self, tmp_path):
|
||||||
path = tmp_path / 'collection'
|
path = tmp_path / 'collection'
|
||||||
shutil.copytree(self.src_path, path)
|
shutil.copytree(self.src_path, path)
|
||||||
collection = Collection(path, logger=self.logger)
|
collection = Collection(path)
|
||||||
loc = GeoLocation()
|
loc = GeoLocation()
|
||||||
summary = collection.init(loc)
|
summary = collection.init(loc)
|
||||||
summary = collection.sort_similar_images(path, similarity=60)
|
summary = collection.sort_similar_images(path, similarity=60)
|
||||||
|
@ -247,7 +245,7 @@ class TestCollection:
|
||||||
def test_fill_data(self, tmp_path, monkeypatch):
|
def test_fill_data(self, tmp_path, monkeypatch):
|
||||||
path = tmp_path / 'collection'
|
path = tmp_path / 'collection'
|
||||||
shutil.copytree(self.src_path, path)
|
shutil.copytree(self.src_path, path)
|
||||||
collection = Collection(path, logger=self.logger)
|
collection = Collection(path)
|
||||||
# loc = GeoLocation()
|
# loc = GeoLocation()
|
||||||
|
|
||||||
# def mockreturn(prompt, theme):
|
# def mockreturn(prompt, theme):
|
||||||
|
|
Loading…
Reference in New Issue