Change hash_db and location_db storage location to target directory
This commit is contained in:
parent
21ed551a54
commit
04f2ac6738
46
elodie.py
46
elodie.py
|
@ -21,7 +21,7 @@ from elodie.compatability import _decode
|
|||
from elodie.config import load_config
|
||||
from elodie.filesystem import FileSystem
|
||||
from elodie.localstorage import Db
|
||||
from elodie.media.base import Base, get_all_subclasses
|
||||
from elodie.media.media import Media, get_all_subclasses
|
||||
from elodie.media.media import Media
|
||||
from elodie.media.audio import Audio
|
||||
from elodie.media.photo import Photo
|
||||
|
@ -34,7 +34,7 @@ from elodie import constants
|
|||
|
||||
FILESYSTEM = FileSystem()
|
||||
|
||||
def import_file(_file, destination, album_from_folder, trash, allow_duplicates):
|
||||
def import_file(_file, destination, db, album_from_folder, trash, allow_duplicates):
|
||||
|
||||
"""Set file metadata and move it to destination.
|
||||
"""
|
||||
|
@ -56,7 +56,7 @@ def import_file(_file, destination, album_from_folder, trash, allow_duplicates):
|
|||
log.all('{"source":"%s", "error_msg":"Not a supported file"}' % _file)
|
||||
return
|
||||
|
||||
dest_path = FILESYSTEM.process_file(_file, destination,
|
||||
dest_path = FILESYSTEM.process_file(_file, destination, db,
|
||||
media, album_from_folder, allowDuplicate=allow_duplicates, move=False)
|
||||
if dest_path:
|
||||
log.all('%s -> %s' % (_file, dest_path))
|
||||
|
@ -74,7 +74,7 @@ def _batch(debug):
|
|||
constants.debug = debug
|
||||
plugins = Plugins()
|
||||
plugins.run_batch()
|
||||
|
||||
|
||||
|
||||
@click.command('import')
|
||||
@click.option('--destination', type=click.Path(file_okay=False),
|
||||
|
@ -128,11 +128,18 @@ def _import(destination, source, file, album_from_folder, trash, allow_duplicate
|
|||
if not FILESYSTEM.should_exclude(path, exclude_regex_list, True):
|
||||
files.add(path)
|
||||
|
||||
for current_file in files:
|
||||
dest_path = import_file(current_file, destination, album_from_folder,
|
||||
trash, allow_duplicates)
|
||||
result.append((current_file, dest_path))
|
||||
has_errors = has_errors is True or not dest_path
|
||||
# Initialize Db
|
||||
if os.path.exists(destination):
|
||||
db = Db(destination)
|
||||
|
||||
for current_file in files:
|
||||
dest_path = import_file(current_file, destination, db,
|
||||
album_from_folder, trash, allow_duplicates)
|
||||
result.append((current_file, dest_path))
|
||||
has_errors = has_errors is True or not dest_path
|
||||
else:
|
||||
result.append((destination, False))
|
||||
has_errors = True
|
||||
|
||||
result.write()
|
||||
|
||||
|
@ -144,7 +151,7 @@ def _import(destination, source, file, album_from_folder, trash, allow_duplicate
|
|||
required=True, help='Source of your photo library.')
|
||||
@click.option('--debug', default=False, is_flag=True,
|
||||
help='Override the value in constants.py with True.')
|
||||
def _generate_db(source, debug):
|
||||
def _generate_db(path, debug):
|
||||
"""Regenerate the hash.json database which contains all of the sha256 signatures of media files. The hash.json file is located at ~/.elodie/.
|
||||
"""
|
||||
constants.debug = debug
|
||||
|
@ -154,8 +161,8 @@ def _generate_db(source, debug):
|
|||
if not os.path.isdir(source):
|
||||
log.error('Source is not a valid directory %s' % source)
|
||||
sys.exit(1)
|
||||
|
||||
db = Db()
|
||||
|
||||
db = Db(path)
|
||||
db.backup_hash_db()
|
||||
db.reset_hash_db()
|
||||
|
||||
|
@ -174,7 +181,7 @@ def _generate_db(source, debug):
|
|||
def _verify(debug):
|
||||
constants.debug = debug
|
||||
result = Result()
|
||||
db = Db()
|
||||
db = Db(path)
|
||||
for checksum, file_path in db.all():
|
||||
if not os.path.isfile(file_path):
|
||||
result.append((file_path, False))
|
||||
|
@ -193,10 +200,10 @@ def _verify(debug):
|
|||
result.write()
|
||||
|
||||
|
||||
def update_location(media, file_path, location_name):
|
||||
def update_location(media, file_path, location_name, db):
|
||||
"""Update location exif metadata of media.
|
||||
"""
|
||||
location_coords = geolocation.coordinates_by_name(location_name)
|
||||
location_coords = geolocation.coordinates_by_name(location_name, db)
|
||||
|
||||
if location_coords and 'latitude' in location_coords and \
|
||||
'longitude' in location_coords:
|
||||
|
@ -223,7 +230,7 @@ def update_time(media, file_path, time_string):
|
|||
sys.exit(1)
|
||||
|
||||
time = datetime.strptime(time_string, time_format)
|
||||
media.set_date_original(time)
|
||||
media.set_date_original(time, file_path)
|
||||
return True
|
||||
|
||||
|
||||
|
@ -277,13 +284,16 @@ def _update(album, location, time, title, paths, debug):
|
|||
).split(os.sep)[:destination_depth]
|
||||
)
|
||||
|
||||
# Initialize Db
|
||||
db = Db(destination)
|
||||
|
||||
media = Media.get_class_by_file(current_file, get_all_subclasses())
|
||||
if not media:
|
||||
continue
|
||||
|
||||
updated = False
|
||||
if location:
|
||||
update_location(media, current_file, location)
|
||||
update_location(media, current_file, location, db)
|
||||
updated = True
|
||||
if time:
|
||||
update_time(media, current_file, time)
|
||||
|
@ -325,7 +335,7 @@ def _update(album, location, time, title, paths, debug):
|
|||
updated_media.set_metadata_basename(
|
||||
original_base_name.replace('-%s' % original_title, ''))
|
||||
|
||||
dest_path = FILESYSTEM.process_file(current_file, destination,
|
||||
dest_path = FILESYSTEM.process_file(current_file, destination, db,
|
||||
updated_media, False, move=True, allowDuplicate=True)
|
||||
log.info(u'%s -> %s' % (current_file, dest_path))
|
||||
log.all('{"source":"%s", "destination":"%s"}' % (current_file,
|
||||
|
|
|
@ -17,10 +17,14 @@ if (
|
|||
application_directory = environ['ELODIE_APPLICATION_DIRECTORY']
|
||||
|
||||
#: File in which to store details about media Elodie has seen.
|
||||
hash_db = '{}/hash.json'.format(application_directory)
|
||||
hash_db = 'hash.json'
|
||||
# TODO will be removed eventualy later
|
||||
# hash_db = '{}/hash.json'.format(application_directory)
|
||||
|
||||
#: File in which to store geolocation details about media Elodie has seen.
|
||||
location_db = '{}/location.json'.format(application_directory)
|
||||
location_db = 'location.json'
|
||||
# TODO will be removed eventualy later
|
||||
# location_db = '{}/location.json'.format(application_directory)
|
||||
|
||||
#: Elodie installation directory.
|
||||
script_directory = path.dirname(path.dirname(path.abspath(__file__)))
|
||||
|
|
|
@ -163,7 +163,8 @@ class FileSystem(object):
|
|||
elif part in ('location', 'city', 'state', 'country'):
|
||||
place_name = geolocation.place_name(
|
||||
metadata['latitude'],
|
||||
metadata['longitude']
|
||||
metadata['longitude'],
|
||||
db
|
||||
)
|
||||
|
||||
location_parts = re.findall('(%[^%]+)', mask)
|
||||
|
@ -348,7 +349,7 @@ class FileSystem(object):
|
|||
|
||||
return self.cached_folder_path_definition
|
||||
|
||||
def get_folder_path(self, metadata, path_parts=None):
|
||||
def get_folder_path(self, metadata, db, path_parts=None):
|
||||
"""Given a media's metadata this function returns the folder path as a string.
|
||||
|
||||
:param dict metadata: Metadata dictionary.
|
||||
|
@ -366,7 +367,7 @@ class FileSystem(object):
|
|||
# Unknown Location - when neither an album nor location exist
|
||||
for this_part in path_part:
|
||||
part, mask = this_part
|
||||
this_path = self.get_dynamic_path(part, mask, metadata)
|
||||
this_path = self.get_dynamic_path(part, mask, metadata, db)
|
||||
if this_path:
|
||||
path.append(this_path.strip())
|
||||
# We break as soon as we have a value to append
|
||||
|
@ -466,7 +467,7 @@ class FileSystem(object):
|
|||
elif metadata['date_modified'] is not None:
|
||||
return metadata['date_modified']
|
||||
|
||||
def get_dynamic_path(self, part, mask, metadata):
|
||||
def get_dynamic_path(self, part, mask, metadata, db):
|
||||
"""Parse a specific folder's name given a mask and metadata.
|
||||
|
||||
:param part: Name of the part as defined in the path (i.e. date from %date)
|
||||
|
@ -494,7 +495,7 @@ class FileSystem(object):
|
|||
for i in custom_parts:
|
||||
folder = folder.replace(
|
||||
i,
|
||||
self.get_dynamic_path(i[1:], i, metadata)
|
||||
self.get_dynamic_path(i[1:], i, metadata, db)
|
||||
)
|
||||
return folder
|
||||
elif part in ('date', 'day', 'month', 'year'):
|
||||
|
@ -506,7 +507,8 @@ class FileSystem(object):
|
|||
elif part in ('location', 'city', 'state', 'country'):
|
||||
place_name = geolocation.place_name(
|
||||
metadata['latitude'],
|
||||
metadata['longitude']
|
||||
metadata['longitude'],
|
||||
db
|
||||
)
|
||||
|
||||
location_parts = re.findall('(%[^%]+)', mask)
|
||||
|
@ -587,8 +589,7 @@ class FileSystem(object):
|
|||
|
||||
return folder_name
|
||||
|
||||
def process_checksum(self, _file, allow_duplicate):
|
||||
db = Db()
|
||||
def process_checksum(self, _file, db, allow_duplicate):
|
||||
checksum = db.checksum(_file)
|
||||
if(checksum is None):
|
||||
log.info('Could not get checksum for %s.' % _file)
|
||||
|
@ -614,7 +615,7 @@ class FileSystem(object):
|
|||
))
|
||||
return checksum
|
||||
|
||||
def process_file(self, _file, destination, media, album_from_folder, **kwargs):
|
||||
def process_file(self, _file, destination, db, media, album_from_folder, **kwargs):
|
||||
move = False
|
||||
if('move' in kwargs):
|
||||
if kwargs['move']:
|
||||
|
@ -633,7 +634,7 @@ class FileSystem(object):
|
|||
print('%s is not a valid media file. Skipping...' % _file)
|
||||
return
|
||||
|
||||
checksum = self.process_checksum(_file, allow_duplicate)
|
||||
checksum = self.process_checksum(_file, db, allow_duplicate)
|
||||
if(checksum is None):
|
||||
log.info('Original checksum returned None for %s. Skipping...' %
|
||||
_file)
|
||||
|
@ -646,7 +647,7 @@ class FileSystem(object):
|
|||
log.warn('At least one plugin pre-run failed for %s' % _file)
|
||||
return
|
||||
|
||||
directory_name = self.get_folder_path(metadata)
|
||||
directory_name = self.get_folder_path(metadata, db)
|
||||
dest_directory = os.path.join(destination, directory_name)
|
||||
file_name = self.get_file_name(metadata)
|
||||
dest_path = os.path.join(dest_directory, file_name)
|
||||
|
@ -684,7 +685,6 @@ class FileSystem(object):
|
|||
if album_from_folder:
|
||||
media.set_album_from_folder(dest_path)
|
||||
|
||||
db = Db()
|
||||
db.add_hash(checksum, dest_path)
|
||||
db.update_hash_db()
|
||||
|
||||
|
|
|
@ -14,16 +14,14 @@ from geopy.geocoders import Nominatim
|
|||
from elodie.config import load_config
|
||||
from elodie import constants
|
||||
from elodie import log
|
||||
from elodie.localstorage import Db
|
||||
|
||||
__KEY__ = None
|
||||
__DEFAULT_LOCATION__ = 'Unknown Location'
|
||||
__PREFER_ENGLISH_NAMES__ = None
|
||||
|
||||
|
||||
def coordinates_by_name(name):
|
||||
def coordinates_by_name(name, db):
|
||||
# Try to get cached location first
|
||||
db = Db()
|
||||
cached_coordinates = db.get_location_coordinates(name)
|
||||
if(cached_coordinates is not None):
|
||||
return {
|
||||
|
@ -149,7 +147,7 @@ def get_prefer_english_names():
|
|||
__PREFER_ENGLISH_NAMES__ = bool(config['Geolocation']['prefer_english_names'])
|
||||
return __PREFER_ENGLISH_NAMES__
|
||||
|
||||
def place_name(lat, lon):
|
||||
def place_name(lat, lon, db):
|
||||
lookup_place_name_default = {'default': __DEFAULT_LOCATION__}
|
||||
if(lat is None or lon is None):
|
||||
return lookup_place_name_default
|
||||
|
@ -161,7 +159,6 @@ def place_name(lat, lon):
|
|||
lon = float(lon)
|
||||
|
||||
# Try to get cached location first
|
||||
db = Db()
|
||||
# 3km distace radious for a match
|
||||
cached_place_name = db.get_location_name(lat, lon, 3000)
|
||||
# We check that it's a dict to coerce an upgrade of the location
|
||||
|
|
|
@ -20,44 +20,59 @@ class Db(object):
|
|||
|
||||
"""A class for interacting with the JSON files created by Elodie."""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, target_dir):
|
||||
# verify that the application directory (~/.elodie) exists,
|
||||
# else create it
|
||||
if not os.path.exists(constants.application_directory):
|
||||
os.makedirs(constants.application_directory)
|
||||
# if not os.path.exists(constants.application_directory):
|
||||
# os.makedirs(constants.application_directory)
|
||||
|
||||
# If the hash db doesn't exist we create it.
|
||||
# Otherwise we only open for reading
|
||||
if not os.path.isfile(constants.hash_db):
|
||||
with open(constants.hash_db, 'a'):
|
||||
os.utime(constants.hash_db, None)
|
||||
# Create dir for target database
|
||||
dirname = os.path.join(target_dir, '.elodie')
|
||||
# Legacy dir
|
||||
# dirname = constants.application_directory
|
||||
|
||||
if not os.path.exists(dirname):
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# self.hash_db = constants.hash_db
|
||||
self.hash_db_file = os.path.join(dirname, constants.hash_db)
|
||||
self.check_db(self.hash_db_file)
|
||||
|
||||
self.hash_db = {}
|
||||
|
||||
# We know from above that this file exists so we open it
|
||||
# for reading only.
|
||||
with open(constants.hash_db, 'r') as f:
|
||||
with open(self.hash_db_file, 'r') as f:
|
||||
try:
|
||||
self.hash_db = json.load(f)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# If the location db doesn't exist we create it.
|
||||
# Otherwise we only open for reading
|
||||
if not os.path.isfile(constants.location_db):
|
||||
with open(constants.location_db, 'a'):
|
||||
os.utime(constants.location_db, None)
|
||||
# self.location_db_file = constants.location_db
|
||||
self.location_db_file = os.path.join(dirname, constants.location_db)
|
||||
self.check_db(self.location_db_file)
|
||||
|
||||
self.location_db = []
|
||||
|
||||
# We know from above that this file exists so we open it
|
||||
# for reading only.
|
||||
with open(constants.location_db, 'r') as f:
|
||||
with open(self.location_db_file, 'r') as f:
|
||||
try:
|
||||
self.location_db = json.load(f)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def check_db(self, db_file):
|
||||
'''Load db from file'''
|
||||
# If the hash db doesn't exist we create it.
|
||||
# Otherwise we only open for reading
|
||||
if not os.path.isfile(db_file):
|
||||
with open(db_file, 'a'):
|
||||
os.utime(db_file, None)
|
||||
|
||||
def add_hash(self, key, value, write=False):
|
||||
"""Add a hash to the hash db.
|
||||
|
||||
|
@ -95,10 +110,11 @@ class Db(object):
|
|||
|
||||
def backup_hash_db(self):
|
||||
"""Backs up the hash db."""
|
||||
if os.path.isfile(constants.hash_db):
|
||||
# TODO
|
||||
if os.path.isfile(self.hash_db_file):
|
||||
mask = strftime('%Y-%m-%d_%H-%M-%S')
|
||||
backup_file_name = '%s-%s' % (constants.hash_db, mask)
|
||||
copyfile(constants.hash_db, backup_file_name)
|
||||
backup_file_name = '%s-%s' % (self.hash_db_file, mask)
|
||||
copyfile(self.hash_db_file, backup_file_name)
|
||||
return backup_file_name
|
||||
|
||||
def check_hash(self, key):
|
||||
|
@ -196,10 +212,10 @@ class Db(object):
|
|||
|
||||
def update_hash_db(self):
|
||||
"""Write the hash db to disk."""
|
||||
with open(constants.hash_db, 'w') as f:
|
||||
with open(self.hash_db_file, 'w') as f:
|
||||
json.dump(self.hash_db, f)
|
||||
|
||||
def update_location_db(self):
|
||||
"""Write the location db to disk."""
|
||||
with open(constants.location_db, 'w') as f:
|
||||
with open(self.location_db_file, 'w') as f:
|
||||
json.dump(self.location_db, f)
|
||||
|
|
|
@ -131,7 +131,8 @@ def test_place_name_deprecated_string_cached():
|
|||
[{"lat": 37.3667027222222, "long": -122.033383611111, "name": "OLDVALUE"}]
|
||||
"""
|
||||
)
|
||||
place_name = geolocation.place_name(37.3667027222222, -122.033383611111)
|
||||
place_name = geolocation.place_name(37.3667027222222, -122.033383611111,
|
||||
db)
|
||||
helper.restore_dbs()
|
||||
|
||||
assert place_name['city'] == 'Sunnyvale', place_name
|
||||
|
@ -144,7 +145,8 @@ def test_place_name_cached():
|
|||
[{"lat": 37.3667027222222, "long": -122.033383611111, "name": {"city": "UNITTEST"}}]
|
||||
"""
|
||||
)
|
||||
place_name = geolocation.place_name(37.3667027222222, -122.033383611111)
|
||||
place_name = geolocation.place_name(37.3667027222222, -122.033383611111,
|
||||
db)
|
||||
helper.restore_dbs()
|
||||
|
||||
assert place_name['city'] == 'UNITTEST', place_name
|
||||
|
@ -152,7 +154,7 @@ def test_place_name_cached():
|
|||
def test_place_name_no_default():
|
||||
# See gh-160 for backwards compatability needed when a string is stored instead of a dict
|
||||
helper.reset_dbs()
|
||||
place_name = geolocation.place_name(123456.000, 123456.000)
|
||||
place_name = geolocation.place_name(123456.000, 123456.000, db)
|
||||
helper.restore_dbs()
|
||||
|
||||
assert place_name['default'] == 'Unknown Location', place_name
|
||||
|
|
Loading…
Reference in New Issue