Add support for --file to import.py and return exit code
This commit is contained in:
parent
866e4dd6d3
commit
af38851e23
97
import.py
97
import.py
|
@ -10,62 +10,71 @@ from elodie.media.video import Video
|
||||||
from elodie.filesystem import FileSystem
|
from elodie.filesystem import FileSystem
|
||||||
from elodie.localstorage import Db
|
from elodie.localstorage import Db
|
||||||
|
|
||||||
|
db = Db()
|
||||||
|
filesystem = FileSystem()
|
||||||
|
|
||||||
|
def process_file(_file):
|
||||||
|
checksum = db.checksum(_file)
|
||||||
|
if(checksum == None):
|
||||||
|
print 'Could not get checksum for %s. Skipping...' % _file
|
||||||
|
return
|
||||||
|
|
||||||
|
if(db.check_hash(checksum) == True):
|
||||||
|
print '%s already exists at %s. Skipping...' % (_file, db.get_hash(checksum))
|
||||||
|
return
|
||||||
|
|
||||||
|
media = media_type(_file)
|
||||||
|
|
||||||
|
if(media_type.__name__ == 'Video'):
|
||||||
|
filesystem.set_date_from_path_video(media)
|
||||||
|
|
||||||
|
metadata = media.get_metadata()
|
||||||
|
|
||||||
|
directory_name = filesystem.get_folder_path(date=metadata['date_taken'], latitude=metadata['latitude'], longitude=metadata['longitude'])
|
||||||
|
#directory_name = filesystem.get_folder_path(date=metadata['date_taken'])
|
||||||
|
dest_directory = '%s/%s' % (destination, directory_name)
|
||||||
|
# TODO remove the day prefix of the file that was there prior to the crawl
|
||||||
|
file_name = filesystem.get_file_name(media)
|
||||||
|
dest_path = '%s/%s' % (dest_directory, file_name)
|
||||||
|
|
||||||
|
filesystem.create_directory(dest_directory)
|
||||||
|
|
||||||
|
print '%s -> %s' % (_file, dest_path)
|
||||||
|
shutil.copy2(_file, dest_path)
|
||||||
|
#shutil.move(_file, dest_path)
|
||||||
|
db.add_hash(checksum, dest_path)
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
args = arguments.parse(argv, None, ['type=','source=','destination='], './import.py --type=<photo or video> --source=<source directory> -destination=<destination directory>')
|
args = arguments.parse(argv, None, ['file=','type=','source=','destination='], './import.py --type=<photo or video> --source=<source directory> -destination=<destination directory>')
|
||||||
|
|
||||||
db = Db()
|
if('destination' not in args):
|
||||||
|
print 'No destination passed in'
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
source = args['source']
|
|
||||||
destination = args['destination']
|
destination = args['destination']
|
||||||
|
if('type' in args and args['type'] == 'photo'):
|
||||||
filesystem = FileSystem()
|
|
||||||
if(args['type'] == 'photo'):
|
|
||||||
media_type = Photo
|
media_type = Photo
|
||||||
else:
|
else:
|
||||||
media_type = Video
|
media_type = Video
|
||||||
|
|
||||||
write_counter = 0
|
if('source' in args):
|
||||||
for current_file in filesystem.get_all_files(source, media_type.get_valid_extensions()):
|
source = args['source']
|
||||||
checksum = db.checksum(current_file)
|
|
||||||
if(checksum == None):
|
|
||||||
print 'Could not get checksum for %s. Skipping...' % current_file
|
|
||||||
continue
|
|
||||||
|
|
||||||
if(db.check_hash(checksum) == True):
|
write_counter = 0
|
||||||
print '%s already exists at %s. Skipping...' % (current_file, db.get_hash(checksum))
|
for current_file in filesystem.get_all_files(source, media_type.get_valid_extensions()):
|
||||||
continue
|
process_file(current_file)
|
||||||
|
# Write to the hash database every 10 iterations
|
||||||
|
write_counter += 1
|
||||||
|
if(write_counter % 10 == 0):
|
||||||
|
db.update_hash_db()
|
||||||
|
|
||||||
media = media_type(current_file)
|
# If there's anything we haven't written to the hash database then write it now
|
||||||
|
if(write_counter % 10 != 10):
|
||||||
if(media_type.__name__ == 'Video'):
|
|
||||||
filesystem.set_date_from_path_video(media)
|
|
||||||
|
|
||||||
metadata = media.get_metadata()
|
|
||||||
|
|
||||||
directory_name = filesystem.get_folder_path(date=metadata['date_taken'], latitude=metadata['latitude'], longitude=metadata['longitude'])
|
|
||||||
#directory_name = filesystem.get_folder_path(date=metadata['date_taken'])
|
|
||||||
dest_directory = '%s/%s' % (destination, directory_name)
|
|
||||||
# TODO remove the day prefix of the file that was there prior to the crawl
|
|
||||||
file_name = filesystem.get_file_name(media)
|
|
||||||
dest_path = '%s/%s' % (dest_directory, file_name)
|
|
||||||
|
|
||||||
filesystem.create_directory(dest_directory)
|
|
||||||
|
|
||||||
print '%s -> %s' % (current_file, dest_path)
|
|
||||||
shutil.copy2(current_file, dest_path)
|
|
||||||
#shutil.move(current_file, dest_path)
|
|
||||||
db.add_hash(checksum, dest_path)
|
|
||||||
|
|
||||||
# Write to the hash database every 10 iterations
|
|
||||||
write_counter += 1
|
|
||||||
if(write_counter % 10 == 0):
|
|
||||||
db.update_hash_db()
|
db.update_hash_db()
|
||||||
|
elif('file' in args):
|
||||||
|
process_file(args['file'])
|
||||||
|
|
||||||
# If there's anything we haven't written to the hash database then write it now
|
|
||||||
if(write_counter % 10 != 10):
|
|
||||||
db.update_hash_db()
|
db.update_hash_db()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main(sys.argv[1:])
|
main(sys.argv[1:])
|
||||||
|
sys.exit(0)
|
||||||
|
|
Loading…
Reference in New Issue