2021-08-08 15:33:47 +02:00
|
|
|
# TODO to be removed later
|
|
|
|
from datetime import datetime
|
|
|
|
import os
|
|
|
|
import pytest
|
2021-09-24 18:50:38 +02:00
|
|
|
import shutil
|
2021-08-31 16:18:41 +02:00
|
|
|
import sqlite3
|
2021-08-08 15:33:47 +02:00
|
|
|
from pathlib import Path
|
|
|
|
import re
|
|
|
|
from sys import platform
|
|
|
|
from time import sleep
|
|
|
|
|
2021-08-31 16:18:41 +02:00
|
|
|
from .conftest import randomize_files, randomize_db
|
2021-08-13 21:11:24 +02:00
|
|
|
from ordigi import constants
|
2021-10-09 16:19:33 +02:00
|
|
|
from ordigi.collection import Collection
|
2021-08-31 16:18:41 +02:00
|
|
|
from ordigi.database import Sqlite
|
2021-08-24 17:23:51 +02:00
|
|
|
from ordigi.exiftool import ExifToolCaching, exiftool_is_running, terminate_exiftool
|
|
|
|
from ordigi.geolocation import GeoLocation
|
2021-10-09 16:19:33 +02:00
|
|
|
from ordigi import log
|
2021-08-13 21:11:24 +02:00
|
|
|
from ordigi.media import Media
|
2021-09-26 17:44:13 +02:00
|
|
|
from ordigi import utils
|
2021-08-08 15:33:47 +02:00
|
|
|
|
|
|
|
|
2021-08-27 12:45:25 +02:00
|
|
|
class TestCollection:
|
|
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
|
|
|
def setup_class(cls, sample_files_paths):
|
2021-09-18 22:06:34 +02:00
|
|
|
cls.src_path, cls.file_paths = sample_files_paths
|
2021-08-08 15:33:47 +02:00
|
|
|
cls.path_format = constants.default_path + '/' + constants.default_name
|
2021-10-16 19:29:52 +02:00
|
|
|
cls.logger = log.get_logger(level=10)
|
2021-08-08 15:33:47 +02:00
|
|
|
|
|
|
|
def teardown_class(self):
|
|
|
|
terminate_exiftool()
|
|
|
|
assert not exiftool_is_running()
|
|
|
|
|
2021-08-08 21:43:37 +02:00
|
|
|
def test_get_part(self, tmp_path):
|
2021-08-08 15:33:47 +02:00
|
|
|
"""
|
|
|
|
Test all parts
|
|
|
|
"""
|
|
|
|
# Item to search for:
|
2021-09-24 18:50:38 +02:00
|
|
|
collection = Collection(tmp_path, self.path_format,
|
|
|
|
use_date_filename=True, use_file_dates=True)
|
2021-08-27 12:45:25 +02:00
|
|
|
items = collection.get_items()
|
2021-08-08 15:33:47 +02:00
|
|
|
masks = [
|
|
|
|
'{album}',
|
|
|
|
'{basename}',
|
|
|
|
'{camera_make}',
|
|
|
|
'{camera_model}',
|
|
|
|
'{city}',
|
|
|
|
'{"custom"}',
|
|
|
|
'{country}',
|
|
|
|
'{ext}',
|
|
|
|
'{folder}',
|
|
|
|
'{folders[1:3]}',
|
|
|
|
'{location}',
|
|
|
|
'{name}',
|
|
|
|
'{original_name}',
|
|
|
|
'{state}',
|
|
|
|
'{title}',
|
|
|
|
'{%Y-%m-%d}',
|
|
|
|
'{%Y-%m-%d_%H-%M-%S}',
|
|
|
|
'{%Y-%m-%b}'
|
|
|
|
]
|
|
|
|
|
2021-08-08 21:43:37 +02:00
|
|
|
for file_path in self.file_paths:
|
2021-09-24 18:50:38 +02:00
|
|
|
media = Media(file_path, self.src_path, use_date_filename=True,
|
|
|
|
use_file_dates=True)
|
2021-09-18 22:06:34 +02:00
|
|
|
subdirs = file_path.relative_to(self.src_path).parent
|
2021-08-13 19:09:52 +02:00
|
|
|
exif_tags = {}
|
|
|
|
for key in ('album', 'camera_make', 'camera_model', 'latitude',
|
|
|
|
'longitude', 'original_name', 'title'):
|
|
|
|
exif_tags[key] = media.tags_keys[key]
|
|
|
|
|
2021-08-08 15:33:47 +02:00
|
|
|
exif_data = ExifToolCaching(str(file_path)).asdict()
|
2021-08-24 17:23:51 +02:00
|
|
|
loc = GeoLocation()
|
2021-08-31 16:18:41 +02:00
|
|
|
metadata = media.get_metadata(loc)
|
2021-08-08 15:33:47 +02:00
|
|
|
for item, regex in items.items():
|
|
|
|
for mask in masks:
|
|
|
|
matched = re.search(regex, mask)
|
|
|
|
if matched:
|
2021-09-26 17:44:13 +02:00
|
|
|
part = collection.get_part(item, mask[1:-1], metadata)
|
2021-08-08 15:33:47 +02:00
|
|
|
# check if part is correct
|
2021-08-08 21:43:37 +02:00
|
|
|
assert isinstance(part, str), file_path
|
2021-08-08 15:33:47 +02:00
|
|
|
if item == 'basename':
|
2021-08-08 21:43:37 +02:00
|
|
|
assert part == file_path.stem, file_path
|
2021-08-08 15:33:47 +02:00
|
|
|
elif item == 'date':
|
2021-09-24 18:50:38 +02:00
|
|
|
if part == '':
|
|
|
|
media.get_date_media()
|
2021-08-08 15:33:47 +02:00
|
|
|
assert datetime.strptime(part, mask[1:-1])
|
|
|
|
elif item == 'folder':
|
2021-08-08 21:43:37 +02:00
|
|
|
assert part == subdirs.name, file_path
|
2021-08-08 15:33:47 +02:00
|
|
|
elif item == 'folders':
|
2021-09-18 22:06:34 +02:00
|
|
|
assert part in str(subdirs)
|
2021-08-08 15:33:47 +02:00
|
|
|
elif item == 'ext':
|
2021-08-08 21:43:37 +02:00
|
|
|
assert part == file_path.suffix[1:], file_path
|
|
|
|
elif item == 'name':
|
2021-08-08 15:33:47 +02:00
|
|
|
expected_part = file_path.stem
|
2021-09-26 17:44:13 +02:00
|
|
|
for i, rx in utils.get_date_regex(expected_part):
|
2021-08-08 21:43:37 +02:00
|
|
|
part = re.sub(rx, '', expected_part)
|
|
|
|
assert part == expected_part, file_path
|
2021-08-08 15:33:47 +02:00
|
|
|
elif item == 'custom':
|
2021-08-08 21:43:37 +02:00
|
|
|
assert part == mask[2:-2], file_path
|
2021-08-08 15:33:47 +02:00
|
|
|
elif item in ('city', 'country', 'location', 'state'):
|
2021-08-08 21:43:37 +02:00
|
|
|
pass
|
2021-08-08 15:33:47 +02:00
|
|
|
elif item in exif_tags.keys():
|
|
|
|
f = False
|
|
|
|
for key in exif_tags[item]:
|
|
|
|
if key in exif_data:
|
|
|
|
f = True
|
2021-08-08 21:43:37 +02:00
|
|
|
assert part == exif_data[key], file_path
|
2021-08-08 15:33:47 +02:00
|
|
|
break
|
|
|
|
if f == False:
|
2021-08-08 21:43:37 +02:00
|
|
|
assert part == '', file_path
|
|
|
|
else:
|
|
|
|
assert part == '', file_path
|
2021-08-08 15:33:47 +02:00
|
|
|
|
|
|
|
def test_sort_files(self, tmp_path):
|
2021-10-15 06:41:22 +02:00
|
|
|
collection = Collection(tmp_path, self.path_format,
|
|
|
|
album_from_folder=True, logger=self.logger)
|
2021-08-24 17:23:51 +02:00
|
|
|
loc = GeoLocation()
|
2021-09-29 07:36:47 +02:00
|
|
|
summary, result = collection.sort_files([self.src_path], loc)
|
2021-08-08 15:33:47 +02:00
|
|
|
|
|
|
|
# Summary is created and there is no errors
|
|
|
|
assert summary, summary
|
2021-09-29 07:36:47 +02:00
|
|
|
assert result, result
|
2021-08-08 15:33:47 +02:00
|
|
|
|
2021-10-15 06:41:22 +02:00
|
|
|
summary, result = collection.check_files()
|
|
|
|
assert summary, summary
|
|
|
|
assert result, result
|
|
|
|
|
2021-09-18 22:06:34 +02:00
|
|
|
for file_path in tmp_path.glob('**/*'):
|
2021-09-12 07:39:37 +02:00
|
|
|
if '.db' not in str(file_path):
|
2021-09-18 22:06:34 +02:00
|
|
|
media = Media(file_path, tmp_path, album_from_folder=True)
|
2021-09-12 07:39:37 +02:00
|
|
|
media.get_exif_metadata()
|
|
|
|
for value in media._get_key_values('album'):
|
|
|
|
assert value != '' or None
|
|
|
|
|
|
|
|
# test with populated dest dir
|
2021-08-27 12:45:25 +02:00
|
|
|
randomize_files(tmp_path)
|
2021-10-15 06:41:22 +02:00
|
|
|
summary, result = collection.check_files()
|
|
|
|
assert summary, summary
|
|
|
|
assert not result, result
|
|
|
|
|
|
|
|
collection = Collection(tmp_path, None, mode='move', logger=self.logger)
|
|
|
|
summary = collection.update(loc)
|
|
|
|
assert summary, summary
|
|
|
|
|
2021-09-29 07:36:47 +02:00
|
|
|
collection = Collection(tmp_path, self.path_format, album_from_folder=True)
|
|
|
|
loc = GeoLocation()
|
|
|
|
summary, result = collection.sort_files([self.src_path], loc)
|
2021-08-27 12:45:25 +02:00
|
|
|
|
|
|
|
assert summary, summary
|
2021-09-29 07:36:47 +02:00
|
|
|
assert result, result
|
2021-10-15 06:41:22 +02:00
|
|
|
|
2021-08-08 15:33:47 +02:00
|
|
|
# TODO check if path follow path_format
|
|
|
|
|
2021-08-31 16:18:41 +02:00
|
|
|
def test_sort_files_invalid_db(self, tmp_path):
|
|
|
|
collection = Collection(tmp_path, self.path_format)
|
|
|
|
loc = GeoLocation()
|
|
|
|
randomize_db(tmp_path)
|
|
|
|
with pytest.raises(sqlite3.DatabaseError) as e:
|
2021-09-29 07:36:47 +02:00
|
|
|
summary, result = collection.sort_files([self.src_path], loc)
|
2021-08-27 12:45:25 +02:00
|
|
|
|
2021-08-08 15:33:47 +02:00
|
|
|
def test_sort_file(self, tmp_path):
|
|
|
|
|
|
|
|
for mode in 'copy', 'move':
|
2021-08-31 16:18:41 +02:00
|
|
|
collection = Collection(tmp_path, self.path_format, mode=mode)
|
2021-08-08 15:33:47 +02:00
|
|
|
# copy mode
|
2021-09-18 22:06:34 +02:00
|
|
|
src_path = Path(self.src_path, 'test_exif', 'photo.png')
|
2021-08-24 17:23:51 +02:00
|
|
|
name = 'photo_' + mode + '.png'
|
|
|
|
dest_path = Path(tmp_path, name)
|
2021-09-26 17:44:13 +02:00
|
|
|
src_checksum = utils.checksum(src_path)
|
2021-08-27 12:45:25 +02:00
|
|
|
result_copy = collection.sort_file(src_path, dest_path)
|
2021-08-08 15:33:47 +02:00
|
|
|
assert result_copy
|
|
|
|
# Ensure files remain the same
|
2021-09-26 17:44:13 +02:00
|
|
|
assert collection._checkcomp(dest_path, src_checksum)
|
2021-08-08 15:33:47 +02:00
|
|
|
|
|
|
|
if mode == 'copy':
|
|
|
|
assert src_path.exists()
|
|
|
|
else:
|
|
|
|
assert not src_path.exists()
|
2021-09-24 18:50:38 +02:00
|
|
|
shutil.copyfile(dest_path, src_path)
|
2021-08-08 15:33:47 +02:00
|
|
|
|
|
|
|
# TODO check for conflicts
|
|
|
|
|
|
|
|
|
|
|
|
# TODO check date
|
|
|
|
|
2021-09-18 22:06:34 +02:00
|
|
|
def test__get_files_in_path(self, tmp_path):
|
2021-09-24 18:50:38 +02:00
|
|
|
collection = Collection(tmp_path, self.path_format,
|
|
|
|
exclude={'**/*.dng',},
|
|
|
|
use_date_filename=True, use_file_dates=True)
|
2021-09-18 22:06:34 +02:00
|
|
|
paths = [x for x in collection._get_files_in_path(self.src_path,
|
|
|
|
maxlevel=1, glob='**/photo*')]
|
|
|
|
assert len(paths) == 6
|
|
|
|
for path in paths:
|
|
|
|
assert isinstance(path, Path)
|
|
|
|
|
2021-10-09 16:19:33 +02:00
|
|
|
def test_sort_similar_images(self, tmp_path):
|
|
|
|
path = tmp_path / 'collection'
|
|
|
|
shutil.copytree(self.src_path, path)
|
2021-10-15 06:41:22 +02:00
|
|
|
collection = Collection(path, None, mode='move', logger=self.logger)
|
|
|
|
loc = GeoLocation()
|
|
|
|
summary = collection.init(loc)
|
2021-10-09 16:19:33 +02:00
|
|
|
summary, result = collection.sort_similar_images(path, similarity=60)
|
|
|
|
|
|
|
|
# Summary is created and there is no errors
|
|
|
|
assert summary, summary
|
|
|
|
assert result, result
|
|
|
|
|
|
|
|
summary, result = collection.revert_compare(path)
|
|
|
|
|
|
|
|
# Summary is created and there is no errors
|
|
|
|
assert summary, summary
|
|
|
|
assert result, result
|
2021-09-18 22:06:34 +02:00
|
|
|
|
2021-08-08 15:33:47 +02:00
|
|
|
|