Update import.py to handle videos and photos. Add geolocation and constants. And more
This commit is contained in:
parent
c9d52aaa59
commit
866e4dd6d3
|
@ -0,0 +1,4 @@
|
||||||
|
from os import path
|
||||||
|
|
||||||
|
application_directory = '{}/.elodie'.format(path.expanduser('~'))
|
||||||
|
hash_db = '{}/hash.json'.format(application_directory)
|
|
@ -17,8 +17,17 @@ def reverse_lookup(lat, lon):
|
||||||
|
|
||||||
key = config.get('MapQuest', 'key')
|
key = config.get('MapQuest', 'key')
|
||||||
|
|
||||||
r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon))
|
try:
|
||||||
return r.json()
|
r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon))
|
||||||
|
return r.json()
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
print e
|
||||||
|
return None
|
||||||
|
except ValueError as e:
|
||||||
|
print r.text
|
||||||
|
print e
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def place_name(lat, lon):
|
def place_name(lat, lon):
|
||||||
geolocation_info = reverse_lookup(lat, lon)
|
geolocation_info = reverse_lookup(lat, lon)
|
||||||
|
|
|
@ -0,0 +1,50 @@
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from elodie import constants
|
||||||
|
|
||||||
|
class Db(object):
|
||||||
|
def __init__(self):
|
||||||
|
# verify that the application directory (~/.elodie) exists, else create it
|
||||||
|
if not os.path.exists(constants.application_directory):
|
||||||
|
os.makedirs(constants.application_directory)
|
||||||
|
|
||||||
|
self.hash_db = {}
|
||||||
|
# we need to open in w incase the file doesn't exist
|
||||||
|
with open(constants.hash_db, 'rw') as f:
|
||||||
|
try:
|
||||||
|
self.hash_db = json.load(f)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def add_hash(self, key, value, write=False):
|
||||||
|
self.hash_db[key] = value
|
||||||
|
if(write == True):
|
||||||
|
self.update_hash_db()
|
||||||
|
|
||||||
|
def check_hash(self, key):
|
||||||
|
return key in self.hash_db
|
||||||
|
|
||||||
|
def get_hash(self, key):
|
||||||
|
if(self.check_hash(key) == True):
|
||||||
|
return self.hash_db[key]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def update_hash_db(self):
|
||||||
|
with open(constants.hash_db, 'w') as f:
|
||||||
|
json.dump(self.hash_db, f)
|
||||||
|
|
||||||
|
"""
|
||||||
|
http://stackoverflow.com/a/3431835/1318758
|
||||||
|
"""
|
||||||
|
def checksum(self, file_path, blocksize=65536):
|
||||||
|
hasher = hashlib.sha256()
|
||||||
|
with open(file_path, 'r') as f:
|
||||||
|
buf = f.read(blocksize)
|
||||||
|
|
||||||
|
while len(buf) > 0:
|
||||||
|
hasher.update(buf)
|
||||||
|
buf = f.read(blocksize)
|
||||||
|
return hasher.hexdigest()
|
||||||
|
return None
|
|
@ -51,12 +51,15 @@ class Photo(Media):
|
||||||
# EXIF DateTime is already stored as a timestamp
|
# EXIF DateTime is already stored as a timestamp
|
||||||
# Sourced from https://github.com/photo/frontend/blob/master/src/libraries/models/Photo.php#L500
|
# Sourced from https://github.com/photo/frontend/blob/master/src/libraries/models/Photo.php#L500
|
||||||
exif = self.get_exif()
|
exif = self.get_exif()
|
||||||
if('EXIF DateTimeOriginal' in exif):
|
try:
|
||||||
seconds_since_epoch = time.mktime(datetime.strptime(str(exif['EXIF DateTimeOriginal']), '%Y:%m:%d %H:%M:%S').timetuple())
|
if('EXIF DateTimeOriginal' in exif):
|
||||||
elif('EXIF DateTime' in exif):
|
seconds_since_epoch = time.mktime(datetime.strptime(str(exif['EXIF DateTimeOriginal']), '%Y:%m:%d %H:%M:%S').timetuple())
|
||||||
seconds_since_epoch = time.mktime(datetime.strptime(str(exif['EXIF DateTime']), '%Y:%m:%d %H:%M:%S').timetuple())
|
elif('EXIF DateTime' in exif):
|
||||||
elif('EXIF FileDateTime' in exif):
|
seconds_since_epoch = time.mktime(datetime.strptime(str(exif['EXIF DateTime']), '%Y:%m:%d %H:%M:%S').timetuple())
|
||||||
seconds_since_epoch = str(exif['EXIF DateTime'])
|
elif('EXIF FileDateTime' in exif):
|
||||||
|
seconds_since_epoch = str(exif['EXIF DateTime'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if(seconds_since_epoch == 0):
|
if(seconds_since_epoch == 0):
|
||||||
return None
|
return None
|
||||||
|
|
50
import.py
50
import.py
|
@ -5,35 +5,67 @@ import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from elodie import arguments
|
from elodie import arguments
|
||||||
|
from elodie.media.photo import Photo
|
||||||
from elodie.media.video import Video
|
from elodie.media.video import Video
|
||||||
from elodie.filesystem import FileSystem
|
from elodie.filesystem import FileSystem
|
||||||
|
from elodie.localstorage import Db
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
|
args = arguments.parse(argv, None, ['type=','source=','destination='], './import.py --type=<photo or video> --source=<source directory> -destination=<destination directory>')
|
||||||
|
|
||||||
args = arguments.parse(argv, None, ['source=','destination='], './import.py --source=<source directory> -destination=<destination directory>')
|
db = Db()
|
||||||
|
|
||||||
source = args['source']
|
source = args['source']
|
||||||
destination = args['destination']
|
destination = args['destination']
|
||||||
|
|
||||||
filesystem = FileSystem()
|
filesystem = FileSystem()
|
||||||
|
if(args['type'] == 'photo'):
|
||||||
|
media_type = Photo
|
||||||
|
else:
|
||||||
|
media_type = Video
|
||||||
|
|
||||||
for video_file in filesystem.get_all_files(source, Video.get_valid_extensions()):
|
write_counter = 0
|
||||||
video = Video(video_file)
|
for current_file in filesystem.get_all_files(source, media_type.get_valid_extensions()):
|
||||||
|
checksum = db.checksum(current_file)
|
||||||
|
if(checksum == None):
|
||||||
|
print 'Could not get checksum for %s. Skipping...' % current_file
|
||||||
|
continue
|
||||||
|
|
||||||
filesystem.set_date_from_path_video(video)
|
if(db.check_hash(checksum) == True):
|
||||||
|
print '%s already exists at %s. Skipping...' % (current_file, db.get_hash(checksum))
|
||||||
|
continue
|
||||||
|
|
||||||
metadata = video.get_metadata()
|
media = media_type(current_file)
|
||||||
|
|
||||||
directory_name = filesystem.get_folder_name_by_date(metadata['date_taken'])
|
if(media_type.__name__ == 'Video'):
|
||||||
|
filesystem.set_date_from_path_video(media)
|
||||||
|
|
||||||
|
metadata = media.get_metadata()
|
||||||
|
|
||||||
|
directory_name = filesystem.get_folder_path(date=metadata['date_taken'], latitude=metadata['latitude'], longitude=metadata['longitude'])
|
||||||
|
#directory_name = filesystem.get_folder_path(date=metadata['date_taken'])
|
||||||
dest_directory = '%s/%s' % (destination, directory_name)
|
dest_directory = '%s/%s' % (destination, directory_name)
|
||||||
# TODO remove the day prefix of the file that was there prior to the crawl
|
# TODO remove the day prefix of the file that was there prior to the crawl
|
||||||
file_name = filesystem.get_file_name_for_video(video)
|
file_name = filesystem.get_file_name(media)
|
||||||
dest_path = '%s/%s' % (dest_directory, file_name)
|
dest_path = '%s/%s' % (dest_directory, file_name)
|
||||||
|
|
||||||
filesystem.create_directory(dest_directory)
|
filesystem.create_directory(dest_directory)
|
||||||
|
|
||||||
print '%s -> %s' % (video_file, dest_path)
|
print '%s -> %s' % (current_file, dest_path)
|
||||||
shutil.copy2(video_file, dest_path)
|
shutil.copy2(current_file, dest_path)
|
||||||
|
#shutil.move(current_file, dest_path)
|
||||||
|
db.add_hash(checksum, dest_path)
|
||||||
|
|
||||||
|
# Write to the hash database every 10 iterations
|
||||||
|
write_counter += 1
|
||||||
|
if(write_counter % 10 == 0):
|
||||||
|
db.update_hash_db()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# If there's anything we haven't written to the hash database then write it now
|
||||||
|
if(write_counter % 10 != 10):
|
||||||
|
db.update_hash_db()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main(sys.argv[1:])
|
main(sys.argv[1:])
|
||||||
|
|
Loading…
Reference in New Issue