Compare commits

...

104 Commits

Author SHA1 Message Date
Cédric Leporcq d54ccd64f6 Update LICENCE file and .gitignore 2022-08-28 17:11:03 +02:00
Cédric Leporcq 836792429f Change command line options for log levels 2022-08-28 14:22:30 +02:00
Cédric Leporcq b7435c4eac Modify input text fonction and add Input Class 2022-08-28 14:19:58 +02:00
Cédric Leporcq 723f549f73 Modify db checking for best performances 2022-08-28 14:19:58 +02:00
Cédric Leporcq 47b9aa57ae Add checksum dict 2022-08-28 14:19:58 +02:00
Cédric Leporcq 9e32052ce3 Verify checksum in check_db 2022-08-28 14:19:58 +02:00
Cédric Leporcq ed58383ea0 Add --ckecksum option to update command 2022-08-28 14:19:58 +02:00
Cédric Leporcq f6816c6c01 Allow import or sort single files 2022-08-28 07:50:43 +02:00
Cédric Leporcq b7f0cafe98 Update .gitignore 2022-08-28 07:50:43 +02:00
Cédric Leporcq 573a63998e Revamp and fix options 2022-08-28 07:50:43 +02:00
Cédric Leporcq 01b47c8c40 Fix date detection in filenames 2022-08-28 07:50:43 +02:00
Cédric Leporcq 52768f64db Add 60s tolerance when compare date_original, date_filename and
date_created
2022-08-28 07:50:43 +02:00
Cédric Leporcq cdfa408206 Fix get_date_from_string function 2022-08-28 07:50:43 +02:00
Cédric Leporcq eee3c71f6a Remove unused import 2022-08-28 07:50:43 +02:00
Cédric Leporcq 1eb2a2c6e0 Fix set original name to EXIF metadata 2022-08-28 07:50:43 +02:00
Cédric Leporcq 58e282fd87 Fix database 2022-08-28 07:50:43 +02:00
Cédric Leporcq a1ba0663b6 Fix edit metadata 2022-08-28 07:50:43 +02:00
Cédric Leporcq 503b90b631 Add fill date original option 2022-04-23 20:06:36 +02:00
Cédric Leporcq fd35963994 Test edit metadata 2022-04-23 20:06:36 +02:00
Cédric Leporcq be4cf38377 Add Id column as primary key 2022-04-23 13:23:00 +02:00
Cédric Leporcq 9afb433eed Fix retrieve geolocation from exif 2022-04-23 13:23:00 +02:00
Cédric Leporcq 114187415f Retrieve date from filename using get_date_fron_string function 2022-04-23 13:23:00 +02:00
Cédric Leporcq b883d9ca36 Fix exclude option 2022-04-23 13:23:00 +02:00
Cédric Leporcq acf1619b7a Fix Remove duplicate option for sort files 2022-04-23 13:23:00 +02:00
Cédric Leporcq 002404d82c Change path format syntax 2022-04-23 13:23:00 +02:00
Cédric Leporcq a4746cbf60 Fix edit metadata 2022-04-23 13:22:47 +02:00
Cédric Leporcq 5f0237a48f Fix database.py 2022-04-21 14:15:43 +02:00
Cédric Leporcq 27587468ba Fix part 2022-04-21 14:15:43 +02:00
Cédric Leporcq e60dab7f3b Fix load path format from config 2022-04-21 14:15:43 +02:00
Cédric Leporcq cf8b6a9dec Fix absolute path in cli.py 2022-04-21 14:15:43 +02:00
Cédric Leporcq 14234f12cb fixup! Refactoring collection options (2) 2022-04-21 14:15:43 +02:00
Cédric Leporcq ea37849eae Fix cache option for exiftool 2022-04-21 14:15:43 +02:00
Cédric Leporcq 957e436309 Add get_exif_metadata_caching method in ReadExif 2022-04-21 14:15:43 +02:00
Cédric Leporcq b0969f62af Add edit_metadata method in collection 2022-04-21 14:15:43 +02:00
Cédric Leporcq e0fc31e543 Move get_date_format method in media and fixes 2022-01-29 14:25:12 +01:00
Cédric Leporcq f0a7624b0f Add Clone command and fixes 2021-12-05 18:27:04 +01:00
Cédric Leporcq d55fc63a41 Fix system resume event in inquirer prompt 2021-12-05 14:11:28 +01:00
Cédric Leporcq da4a388697 Code improvements 2021-12-05 13:40:10 +01:00
Cédric Leporcq 7688d0e7c4 Refactoring collection options (3) 2021-11-19 18:24:35 +01:00
Cédric Leporcq 27af9bb55e Refactoring collection options (2) 2021-11-14 05:12:41 +01:00
Cédric Leporcq 0fdf09ea42 Refactoring collection options (1) 2021-11-13 10:03:53 +01:00
Cédric Leporcq 21be384563 fixup! User LOG global variable for logging 2021-11-13 10:01:21 +01:00
Cédric Leporcq e22ac84d75 Fix pylint formating 2021-11-12 12:58:34 +01:00
Cédric Leporcq 5d7e0c4f46 Fix image.py 2021-11-12 12:58:34 +01:00
Cédric Leporcq 11291a582e Remove confusing unused argument from get_date_regex 2021-11-12 12:58:34 +01:00
Cédric Leporcq a9913e61d9 User LOG global variable for logging 2021-11-12 12:58:34 +01:00
Cédric Leporcq 506869ca4f Fix ordigi utils 2021-11-12 12:58:34 +01:00
Cédric Leporcq 26845cf56b Refactoring media class (3) 2021-11-12 12:58:34 +01:00
Cédric Leporcq 8fd65fda34 Refactoring media class (2), create ReadExif and WriteExif class 2021-11-11 11:54:44 +01:00
Cédric Leporcq a693e6018a Refactoring Media class 2021-11-08 07:12:53 +01:00
Cédric Leporcq ad14604648 Fix some pylint warnings 2021-11-07 12:28:30 +01:00
Cédric Leporcq f42de5ad55 Store media in self.medias.datas list and fix pylint warnings 2021-11-07 11:18:13 +01:00
Cédric Leporcq f39e6feee5 Split media.get_metadata function 2021-11-07 09:44:43 +01:00
Cédric Leporcq eae26d9ee3 Save relpath in metadata earlier 2021-11-07 07:13:56 +01:00
Cédric Leporcq 88b3b0a0f1 Fix get path method 2021-11-06 18:04:46 +01:00
Cédric Leporcq 63c8c66660 Change default value to None for optional arguments 2021-11-06 17:07:38 +01:00
Cédric Leporcq adc738cab3 Fix pylint errors 2021-11-06 16:36:56 +01:00
Cédric Leporcq 34c9490580 Refactoring Collection class and fixes 2021-11-06 16:35:35 +01:00
Cédric Leporcq dde40149c2 Refactoring collection 2021-11-03 21:29:06 +01:00
Cédric Leporcq e04ad3248a Refactoring summary 2021-11-01 12:31:29 +01:00
Cédric Leporcq 648930f139 Use pathlib in cli and fix constants upercase style 2021-10-31 18:01:19 +01:00
Cédric Leporcq 202366a8f9 fixup! Add standard python build files 2021-10-31 15:17:20 +01:00
Cédric Leporcq 0b81f89eee Refactoring cli and collection modules and fix tests 2021-10-31 15:09:40 +01:00
Cédric Leporcq bfb2153eb8 fixup! Add standard python build files 2021-10-30 12:48:39 +02:00
Cédric Leporcq 9ec95d99f1 fixup! Add standard python build files 2021-10-30 08:45:11 +02:00
Cédric Leporcq 2a9230c49c Fix missing sample test files 2021-10-27 00:10:11 +02:00
Cédric Leporcq cac3a70b91 Add standard python build files 2021-10-27 00:08:15 +02:00
Cédric Leporcq 7c936fc32c Refactoring sort_file 2021-10-27 00:06:38 +02:00
Cédric Leporcq 4184d753ac remove old stuff 2021-10-24 19:28:51 +02:00
Cédric Leporcq 38d7cb6841 Refactoring and fix class 2021-10-23 07:51:53 +02:00
Cédric Leporcq 374f64d8b1 Change logger format for debug level and fixes 2021-10-18 20:48:23 +02:00
Cédric Leporcq 4324470be0 Fix empty filename string 2021-10-18 20:06:49 +02:00
Cédric Leporcq 16b3c646c2 Change summary format and fixes 2021-10-18 07:45:54 +02:00
Cédric Leporcq c24014b98f Allow ordigi.conf file per collection 2021-10-18 07:44:48 +02:00
Cédric Leporcq 513adb2890 Add path format and fix empty dir and exclude files 2021-10-17 20:04:20 +02:00
Cédric Leporcq 5c255093e3 Add FPath class 2021-10-17 12:33:14 +02:00
Cédric Leporcq 4442c18570 fixup! Add path selection in interactive mode using inquierer 2021-10-17 10:16:17 +02:00
Cédric Leporcq 0ae9f3118a Add geolocation tests 2021-10-17 10:14:19 +02:00
Cédric Leporcq 8f27a84571 Fix logger formating issues 2021-10-17 10:12:45 +02:00
Cédric Leporcq a93e7accc0 format python code with black --skip-string-normalization 2021-10-15 22:10:04 +02:00
Cédric Leporcq 1cade46307 Add init, check and update commands 2021-10-15 06:41:22 +02:00
Cédric Leporcq 2ac8ca3b67 Delete file path entry in db when file is moved inside collection 2021-10-09 17:37:57 +02:00
Cédric Leporcq 1a78962012 Use Path class for sort_similar_images function and test it 2021-10-09 16:19:33 +02:00
Cédric Leporcq 4156e769d0 Add final basic checkup 2021-09-29 07:36:47 +02:00
Cédric Leporcq 8e8afe9a89 Ability to retrieve metadata from Sqlite database and fixes 2021-09-28 21:40:12 +02:00
Cédric Leporcq 86d88b72c8 Add options and interactive mode for handling media date 2021-09-24 18:50:38 +02:00
Cédric Leporcq 6203498f20 Add path selection in interactive mode using inquierer 2021-09-19 12:52:26 +02:00
Cédric Leporcq 63b154b8f3 Add Glob option for filtering path 2021-09-19 12:51:53 +02:00
Cédric Leporcq 6af9d5d879 Change sample dir structure 2021-09-18 07:06:58 +02:00
Cédric Leporcq e194c9b6af Cleanup 2021-09-12 07:43:26 +02:00
Cédric Leporcq 1e673dde44 Add funcions to insert Sqlite tables dynamically 2021-09-12 07:41:44 +02:00
Cédric Leporcq db74342f21 Add --album-from-folder option to sort 2021-09-12 07:39:37 +02:00
Cédric Leporcq cc958cf53b Change json Db to Sqlite 2021-08-31 16:18:41 +02:00
Cédric Leporcq 9b055c88bd Refactoring filesystem.py 2021-08-28 19:21:36 +02:00
Cédric Leporcq 0fea0fcfd4 Fix get_date_format 2021-08-24 19:53:17 +02:00
Cédric Leporcq 016329e044 Refactoring config and geolocation methods 2021-08-24 19:53:17 +02:00
Cédric Leporcq 67f3cd471a Fix drop python 3.7 2021-08-24 19:53:17 +02:00
Cédric Leporcq 865f2b929b Fix logger 2021-08-24 19:53:17 +02:00
Cédric Leporcq 5d7bab0ec9 Fix sort_file and add solve_conflict function 2021-08-24 19:53:17 +02:00
Cédric Leporcq bceda812f3 Fix filter-by-ext 2021-08-24 19:53:17 +02:00
Cédric Leporcq 1d488c0154 Refactoring media class 2021-08-24 19:53:17 +02:00
Cédric Leporcq a0920e3b5b fixup! Move elodie code base to dozo and various fix 2021-08-24 19:53:12 +02:00
Cédric Leporcq 851b5b0d76 fixup! Change file paths definitions and parsing 2021-08-24 19:48:36 +02:00
Cédric Leporcq f88255e3c5 Rename dozo to ordigi 2021-08-24 08:14:37 +02:00
104 changed files with 5654 additions and 11732 deletions

152
.gitignore vendored
View File

@ -1,13 +1,141 @@
**/.DS_Store
**/*.pyc
**/config.ini
**/node_modules/**
dist/**
docs/_build
build/**
**/*.arw
**/*.dng
**/*.nef
**/*.rw2
env/**
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# Compiled python modules.
*.pyc
# Other
/diagnostics.lua
docs/Ordigi_data_scheme.odg
/ressources
/Session.vim
/tags
/workflow.sh

View File

@ -1,37 +0,0 @@
FROM debian:jessie
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update -y && \
apt-get install -y --no-install-recommends ca-certificates libimage-exiftool-perl python2.7 python-pip python-pyexiv2 wget make && \
pip install --upgrade pip setuptools && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/*
RUN apt-get update -qq && \
apt-get install -y locales -qq && \
locale-gen en_US.UTF-8 en_us && \
dpkg-reconfigure locales && \
locale-gen C.UTF-8 && \
/usr/sbin/update-locale LANG=C.UTF-8
ENV LANG C.UTF-8
ENV LANGUAGE C.UTF-8
ENV LC_ALL C.UTF-8
RUN wget http://www.sno.phy.queensu.ca/~phil/exiftool/Image-ExifTool-10.20.tar.gz && \
gzip -dc Image-ExifTool-10.20.tar.gz | tar -xf - && \
cd Image-ExifTool-10.20 && perl Makefile.PL && \
make install && cd ../ && rm -r Image-ExifTool-10.20
COPY requirements.txt /opt/elodie/requirements.txt
COPY docs/requirements.txt /opt/elodie/docs/requirements.txt
COPY elodie/tests/requirements.txt /opt/elodie/elodie/tests/requirements.txt
WORKDIR /opt/elodie
RUN pip install -r docs/requirements.txt && \
pip install -r elodie/tests/requirements.txt && \
rm -rf /root/.cache/pip
COPY . /opt/elodie
CMD ["/bin/bash"]

713
LICENSE

File diff suppressed because it is too large Load Diff

167
README.md Normal file
View File

@ -0,0 +1,167 @@
# Ordigi
## Description
This tool aims to make media files organized among giving pattern. It is based on
exif metadata and use Sqlite database.
Goals:
- Organize your existing collection of photos or others media types into a customizable folder structure.
- Record metadata and other file data to an Sqlite database
- Ability to edit metadata
## Install
Ordigi relies on the great [ExifTool library by Phil Harvey](http://www.sno.phy.queensu.ca/~phil/exiftool/). Make sure is installed.
Clone this repository and install ordigi:
```
pip install .
```
## Usage Instructions
### Client interface
You can invoke several commands from the client interface.
Use `ordigi --help` and `ordigi [command] --help` for usage
instructions. For each command there are several options that can be invoked.
#### Import photos to collection
The default behavior is to move files from one or several sources directories
to your destination directory. However, if you want to copy use `-c` or
`--copy` flag.
```
ordigi import -c /source1 /source2 /collection
```
#### Sort photos into collection
The `sort` command is essentially the same as import but restricted to the files already into the
collection.
```
ordigi sort /subdir1 /subdir2 /collection
```
#### Compare images into collection
Sort file by similarity:
```
ordigi compare /subdir1 /subdir2 /collection
```
Undo sort files:
```
ordigi compare --revert-compare /subdir1 /subdir2 /collection
```
#### Verify collection against bit rot / data rot
```
ordigi check
```
### Edit metadata and Reorganize by changing location and dates (WIP)
```
ordigi edit --location="Las Vegas, NV" --sort
ordigi edit --time="2015-04-15" --sort
```
### Configuration
#### Config file
The sample configuration file `ordigi.conf` can be copied to `~/.config/ordigi/ordigi.conf` (default location).
Numerous of option like the folder structure, exclusions and other options can
be configured in this file.
#### Folder structure and name
The folder structure and name can be customized via placeholders, a *f-String like* bracket
keywords. Each keyword can be freely combined in any part of the path
pattern.
Default folder structure:
```
dirs_path=<%Y>/<%m-%b>-<city>-<folder>
name=<%Y%m%d-%H%M%S>-<%u<original_name>|%u<basename>>.%l<ext>
```
Example folder structure:
```
├── 2015
│ ├── 06-Jun-California
│ │ ├── 20150629_163414-img_3900.jpg
│ │ └── 20150629_170706-img_3901.jpg
│ └── Paris
│ └── 20150630_024043-img_3903.jpg
├── 2015
│ ├── 07-Jul-Mountain View
│ │ ├── 20150719_171637-img_9426.jpg
│ │ └── 20150724_190633-img_9432.jpg
└── 2015
│ ├── 09-Sep
│ ├── 20150927_014138-_dsc8705.dng
│ └── 20150927_014138-_dsc8705.nef
```
The folder structure use standard unix path separator (`/`). Fallback folder part can be optionally specified using a pipe separator and brackets (`<.*|.*>`).
Valid keywords are:
- Date string like *%Y%m%d* pattern For details of the supported formats see [strftime.org](https://strftime.org/).
- Geolocation info from OpenStreetMap: *country, city, location, state*
- Folder structure of source subdirectories like *folder* or *folders[1:]* pattern,
similar to python list syntax.
- File data : *basename, ext, name, original_name*
- Exif metadata info: *album, camera_make, camera_model, title*.
- custom string using *custom* keyword.
- Special modifiers *%u*/*%l* for upper/lower case respectively.
The default file path structure would look like `2015/07-Jul-Mountain_View/20150712-142231-original_name.jpg`.
## Retrieving data from media
### EXIF and XMP tags
Ordigi use embedded Exif metadata to organize media files and store them in a Sqlite database.
| Data type | Tags | Notes |
|---|---|---|
| Date Original | EXIF:DateTimeOriginal, H264:DateTimeOriginal, EXIF:ModifyDate, file created, file modified | |
| Date Created | EXIF:CreateDate, QuickTime:CreationDate, QuickTime:CreateDate, QuickTime:CreationDate-und-US, QuickTime:MediaCreateDate | |
| Date Modified | 'File:FileModifyDate', 'QuickTime:ModifyDate' | |
| Location | EXIF:GPSLatitude/EXIF:GPSLatitudeRef, EXIF:GPSLongitude/EXIF:GPSLongitudeRef, XMP:GPSLatitude, Composite:GPSLatitude, XMP:GPSLongitude, Composite:GPSLongitude | Composite tags are read-only |
| Title | XMP:Title, XMP:DisplayName | |
| Album | XMP-xmpDM:Album, XMP:Album | XMP:Album is user defined in `configs/ExifTool_config` for backwards compatability |
| Camera Make | EXIF:Make, QuickTime:Make, EXIF:Model, QuickTime:Model | |
For example, the media date can be retrieved, by order of preference, from
*Date Original*, *Date Created*. Optionally *Date Modified* and even filename *date string* can be used, depending of options used (see `ordigi sort --help`).
### Geolocation info
Ordigi use *location* Exif metadata *Nominatim* geocoder to retrive geolocation info from OpenStreetMap
## Credits
The code is based on [Elodie](https://github.com/jmathai/elodie) media organizer and take inspiration from [SortPhotos](https://github.com/andrewning/sortphotos/blob/master/src/sortphotos.py) and [OSXPhotos](https://github.com/RhetTbull/osxphotos) for the Exiftool module.

402
Readme.md
View File

@ -1,402 +0,0 @@
# Hello, I'm Elodie
~~ *Your Personal EXIF-based Photo, Video and Audio Assistant* ~~
[![Build Status](https://travis-ci.org/jmathai/elodie.svg?branch=master)](https://travis-ci.org/jmathai/elodie) [![Coverage Status](https://coveralls.io/repos/github/jmathai/elodie/badge.svg?branch=master)](https://coveralls.io/github/jmathai/elodie?branch=master) [![Scrutinizer Code Quality](https://scrutinizer-ci.com/g/jmathai/elodie/badges/quality-score.png?b=master)](https://scrutinizer-ci.com/g/jmathai/elodie/?branch=master)
I made an easy to use app from this open source project. [Check out Elodie, the EXIF-based photo organizer app](https://getelodie.com).
<p align="center"><img src ="https://jmathai.s3.amazonaws.com/github/elodie/elodie-folder-anim.gif" /></p>
## Quickstart guide
Getting started takes just a few minutes.
### Install ExifTool
Elodie relies on the great [ExifTool library by Phil Harvey](http://www.sno.phy.queensu.ca/~phil/exiftool/). You'll need to install it for your platform.
Some features for video files will only work with newer versions of ExifTool and have been tested on version 10.20 or higher. Support for HEIC files requires version 11.50 or higher. Check your version by typing `exiftool -ver` and see the [manual installation instructions for ExifTool](http://www.sno.phy.queensu.ca/~phil/exiftool/install.html#Unix) if needed.
```
# OSX (uses homebrew, http://brew.sh/)
brew install exiftool
# Debian / Ubuntu
apt-get install libimage-exiftool-perl
# Fedora / Redhat
dnf install perl-Image-ExifTool
# Windows users can install the binary
# http://www.sno.phy.queensu.ca/~phil/exiftool/install.html
```
### Clone the Elodie repository
You can clone Elodie from GitHub. You'll need `git` installed ([instructions](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)).
```
git clone https://github.com/jmathai/elodie.git
cd elodie
pip install -r requirements.txt
```
### Give Elodie a test drive
Now that you've got the minimum dependencies installed you can start using Elodie. You'll need a photo, video or audio file and a folder you'd like Elodie to organize them into.
```
# Run these commands from the root of the repository you just cloned.
./elodie.py import --debug --destination="/where/i/want/my/photos/to/go" /where/my/photo/is.jpg
```
You'll notice that the photo was organized into an *Unknown Location* folder. That's because you haven't set up your MapQuest API ([instructions](#using-openstreetmap-data-from-mapquest)).
Now you're ready to learn more about Elodie.
<p align="center"><img src ="creative/logo@300x.png" /></p>
## Slowstart guide
[Read a 3 part blog post on why I was created](https://medium.com/vantage/understanding-my-need-for-an-automated-photo-workflow-a2ff95b46f8f#.dmwyjlc57) and how [I can be used with Google Photos](https://medium.com/@jmathai/my-automated-photo-workflow-using-google-photos-and-elodie-afb753b8c724).
I work tirelessly to make sure your photos are always sorted and organized so you can focus on more important things. By photos I mean JPEG, DNG, NEF and common video and audio files.
You don't love me yet but you will.
I only do 3 things.
* Firstly I organize your existing collection of photos into a customizable folder structure.
* Second I help make it easy for all the photos you haven't taken yet to flow into the exact location they belong.
* Third but not least I promise to do all this without a yucky propietary database that some friends of mine use.
*NOTE: make sure you've installed everything I need before running the commands below. [Instructions](#quickstart-guide) at the top of this page.*
## Let's organize your existing photos
My guess is you've got quite a few photos scattered around. The first thing I'll help you do is to get those photos organized. It doesn't matter if you have hundreds, thousands or tens of thousands of photos; the more the merrier.
Fire up your terminal and run this command which *__copies__* your photos into something a bit more structured.
```
./elodie.py import --destination="/where/i/want/my/photos/to/go" /where/my/photos/are
```
I'm pretty fast but depending on how many photos you have you might want to grab a snack. When you run this command I'll `print` out my work as I go along. If you're bored you can open `/where/i/want/my/photos/to/go` in *Finder* and watch as I effortlessly copy your photos there.
You'll notice that your photos are now organized by date and location. Some photos do not have proper dates or location information in them. I do my best and in the worst case scenario I'll use the earlier of the files access or modified time. Ideally your photos have dates and location in the EXIF so my work is more accurate.
Don't fret if your photos don't have much EXIF information. I'll show you how you can fix them up later on but let's walk before we run.
Back to your photos. When I'm done you should see something like this. Notice that I've renamed your files by adding the date and time they were taken. This helps keep them in chronological order when using most viewing applications. You'll thank me later.
```
├── 2015-06-Jun
│ ├── California
│ │ ├── 2015-06-29_16-34-14-img_3900.jpg
│ │ └── 2015-06-29_17-07-06-img_3901.jpg
│ └── Paris
│ └── 2015-06-30_02-40-43-img_3903.jpg
├── 2015-07-Jul
│ ├── Mountain View
│ │ ├── 2015-07-19_17-16-37-img_9426.jpg
│ │ └── 2015-07-24_19-06-33-img_9432.jpg
└── 2015-09-Sep
│ ├── Unknown Location
│ ├── 2015-09-27_01-41-38-_dsc8705.dng
│ └── 2015-09-27_01-41-38-_dsc8705.nef
```
Not too bad, eh? Wait a second, what's *Unknown Location*? If I'm not able to figure out where a photo was taken I'll place it into a folder named *Unknown Location*. This typically happens when photos do not have GPS information in their EXIF. You shouldn't see this for photos taken on a smartphone but it's often the case with digital cameras and SLRs. I can help you add GPS information to those photos and get them organized better. Let me show you how.
### Usage Instructions
You can view these instructions on the command line by typing `./elodie.py import --help`, `./elodie.py update --help` or `./elodie.py generate-db --help`.
#### Import photos
```
Usage: elodie.py import [OPTIONS] [PATHS]...
Import files or directories by reading their EXIF and organizing them
accordingly.
Options:
--destination DIRECTORY Copy imported files into this directory.
[required]
--source DIRECTORY Import files from this directory, if specified.
--file PATH Import this file, if specified.
--album-from-folder Use images' folders as their album names.
--trash After copying files, move the old files to the
trash.
--allow-duplicates Import the file even if it's already been imported.
--debug Override the value in constants.py with True.
--exclude-regex TEXT Regular expression for directories or files to
exclude.
--help Show this message and exit.
```
#### Update photos
```
Usage: elodie.py update [OPTIONS] FILES...
Update a file's EXIF. Automatically modifies the file's location and file
name accordingly.
Options:
--album TEXT Update the image album.
--location TEXT Update the image location. Location should be the name of a
place, like "Las Vegas, NV".
--time TEXT Update the image time. Time should be in YYYY-mm-dd
hh:ii:ss or YYYY-mm-dd format.
--title TEXT Update the image title.
--help Show this message and exit.
```
#### (Re)Generate checksum database
```
Usage: elodie.py generate-db [OPTIONS]
Regenerate the hash.json database which contains all of the sha256
signatures of media files. The hash.json file is located at ~/.elodie/.
Options:
--source DIRECTORY Source of your photo library. [required]
--help Show this message and exit.
```
#### Verify library against bit rot / data rot
```
Usage: elodie.py verify
```
### Excluding folders and files from being imported
If you have specific folders or files which you would like to prevent from being imported you can provide regular expressions which will be used to match and skip files from being imported.
You can specify an exclusion at run time by using the `--exclude-regex` argument of the `import` command. You can pass multiple `--exclude-regex` arguments and all folder/file paths which match will be (silently) skipped.
If there are certain file or folder paths you *never* want to import then you can also add an `[Exclusions]` section to your `config.ini` file. Similar to the command line argument you can provide multiple exclusions. Here is an example.
```
[Exclusions]
synology_folders=@eaDir
thumbnails=.thumbnails
```
### Create your own folder structure
OK, so what if you don't like the folders being named `2015-07-Jul/Mountain View`? No problem!
You can add a custom folder structure by editing your `config.ini` file (which should be placed under `~/.elodie/config.ini`). If you'd like to use a different folder for your configuration file then set an environment variable named `ELODIE_APPLICATION_DIRECTORY` with the fully qualified directory path.
#### Custom folder examples
Sometimes examples are easier to understand than explainations so I'll start there. If you'd like to understand my magic I explain it in more detail below these examples. You customize your folder structure in the `Directory` section of your `config.ini`. For details of the supported formats see [strftime.org](http://strftime.org/)
```
[Directory]
location=%city, %state
year=%Y
full_path=%year/%location
# -> 2015/Sunnyvale, California
location=%city, %state
month=%B
year=%Y
full_path=%year/%month/%location
# -> 2015/December/Sunnyvale, California
location=%city, %state
month=%m
year=%Y
full_path=%year-%month/%location
# -> 2015-12/Sunnyvale, California
date=%Y
location=%city, %state
custom=%date %album
full_path=%location/%custom
# -> Sunnyvale, California/2015 Birthday Party
```
#### Using fallback folders
There are times when the EXIF needed to correctly name a folder doesn't exist on a photo. I came up with fallback folders to help you deal with situations such as this. Here's how it works.
You can specify a series of folder names by separating them with a `|`. That's a pipe, not an L. Let's look at an example.
```
[Directory]
month=%m
year=%Y
location=%city
full_path=%month/%year/%album|%location|%"Beats me"
```
What this asks me to do is to name the last folder the same as the album I find in EXIF. If I don't find an album in EXIF then I should use the location. If there's no GPS in the EXIf then I should name the last folder `Beats me`.
#### How folder customization works
You can construct your folder structure using a combination of the location, dates and camera make/model. Under the `Directory` section of your `config.ini` file you can define placeholder names and assign each a value. For example, `date=%Y-%m` would create a date placeholder with a value of YYYY-MM which would be filled in with the date from the EXIF on the photo.
The placeholders can be used to define the folder structure you'd like to create. The default structure would look like `2015-07-Jul/Mountain View`.
I have some date placeholders you can customize. You can use any of [the standard Python time directives](https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior) to customize the date format to your liking.
* `%day` the day the photo was taken.
* `%month` the month the photo was taken.
* `%year` the year the photo was taken.
I have camera make and model placeholders which can be used to include the camera make and model into the folder path.
* `%camera_make` the make of the camera which took the photo.
* `%camera_model` the model of the camera which took the photo.
I also have a few built-in location placeholders you can use. Use this to construct the `%location` you use in `full_path`.
* `%city` the name of the city the photo was taken. Requires geolocation data in EXIF.
* `%state` the name of the state the photo was taken. Requires geolocation data in EXIF.
* `%country` the name of the country the photo was taken. Requires geolocation data in EXIF.
In addition to my built-in and date placeholders you can combine them into a single folder name using my complex placeholders.
* `%location` can be used to combine multiple values of `%city`, `%state` and `%country`. For example, `location=%city, %state` would result in folder names like `Sunnyvale, California`.
* `%date` can be used to combine multiple values from [the standard Python time directives](https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior). For example, `date=%Y-%m` would result in folder names like `2015-12`.
* `%custom` can be used to combine multiple values from anything else. Think of it as a catch-all when `%location` and `%date` don't meet your needs.
#### How file customization works
You can configure how Elodie names your files using placeholders. This works similarly to how folder customization works. The default naming format is what's referred to elsewhere in this document and has many thought through benefits. Using the default will gives you files named like `2015-09-27_01-41-38-_dsc8705.jpg`.
* Minimizes the likelihood of naming conflicts.
* Encodes important EXIF information into the file name.
* Optimizes for sort order when listing in most file and photo viewers.
If you'd like to specify your own naming convention it's recommended you include something that's mostly unique like the time including seconds. You'll need to include a `[File]` section in your `config.ini` file with a name attribute. If a placeholder doesn't have a value then it plus any preceding characters which are not alphabetic are removed.
By default the resulting filename is all lowercased. To change this behavior to uppercasing add capitalization=upper.
```
[File]
date=%Y-%m-%b-%H-%M-%S
name=%date-%original_name-%title.%extension
# -> 2012-05-mar-12-59-30-dsc_1234-my-title.jpg
date=%Y-%m-%b-%H-%M-%S
name=%date-%original_name-%album.%extension
capitalization=upper
# -> 2012-05-MAR-12-59-30-DSC_1234-MY-ALBUM.JPG
```
### Reorganize by changing location and dates
If you notice some photos were incorrectly organized you should definitely let me know. In the example above I put two photos into an *Unknown Location* folder because I didn't find GPS information in their EXIF. To fix this I'll help you add GPS information into the photos' EXIF and then I'll reorganize them.
#### Tell me where your photos were taken
Run the command below if you want to tell me the photos were taken in Las Vegas. You don't have to type all that in though. It's easier to just type `./elodie.py update --location="Las Vegas, NV" ` and select and drag the files from *OS X Finder* into the terminal.
```
./elodie.py update --location="Las Vegas, NV" /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.dng /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.nef
```
You should see this after running that command.
```
└── 2015-09-Sep
│ ├── Las Vegas
│ ├── 2015-09-27_01-41-38-_dsc8705.dng
│ └── 2015-09-27_01-41-38-_dsc8705.nef
```
#### Tell me when you took your photos
Run the command below if I got the date wrong when organizing your photos. Similarly to the above command you can drag files from *Finder* into your terminal.
```
./elodie.py update --time="2015-04-15" /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.dng /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.nef
```
That will change the date folder like so.
```
└── 2015-04-Apr
│ ├── Las Vegas
│ ├── 2015-09-27_01-41-38-_dsc8705.dng
│ └── 2015-09-27_01-41-38-_dsc8705.nef
```
You can, of course, ask me to change the location and time. I'll happily update the photos and move them around accordingly.
```
./elodie.py update --location="Las Vegas, NV" --time="2015-04-15" /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.dng /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.nef
```
## What about photos I take in the future?
Organizing your existing photos is great. But I'd be lying if I said I was the only one who could help you with that. Unlike other programs I put the same effort into keeping your library organized into the future as I have in getting it organized in the first place.
### Letting me know when you've got more photos to organize
In order to sort new photos that I haven't already organized I need someone to tell me about them. There's no single way to do this. You could use inotify, cron, Automator or my favorite app - Hazel; it doesn't matter.
If you'd like to let me know of a specific photo or group of photos to add to your library you would run one of the following commands. Use fully qualified paths for everything since you won't be running this manually.
```
# I can import a single file into your library.
./elodie.py import --destination="/where/i/want/my/photo/to/go" /full/path/to/file.jpg
# I can also import all the photos from a directory into your library.
./elodie.py import --destination="/where/i/want/my/photo/to/go" /where/my/photos/are
```
## Why not use a database?
Look, it's not that I think databases are evil. One of my friends is a database. It's just that I've been doing this for a long time and I've always used a database for it. In the end they're more trouble than they're worth. I should have listened to my mother when she told me to not date a database.
It's a lot more work to organize photos without a database. No wonder everyone else uses them. But your happiness is my happiness. If a little elbow grease on my part makes you happy then I'm glad to do it.
### A bit on how I do all this without a database
Every photo is essentially a database. So it's more accurate to say I use the thousands of tiny databases you already have and use them to organize your photos.
I'm simple. I put a photo into its proper location. I can update a photo to have the right date or location. The latter triggers the first; creating a nice tidy loop of organizational goodness.
I don't do anything else so don't bother asking.
## EXIF and XMP tags
When I organize photos I look at the embedded metadata. Here are the details of how I determine what information to use in order of precedence.
| Dimension | Fields | Notes |
|---|---|---|
| Date Taken (photo) | EXIF:DateTimeOriginal, EXIF:CreateDate, EXIF:ModifyDate, file created, file modified | |
| Date Taken (video, audio) | QuickTime:CreationDate, QuickTime:CreateDate, QuickTime:CreationDate-und-US, QuickTime:MediaCreateDate, H264:DateTimeOriginal, file created, file modified | |
| Location (photo) | EXIF:GPSLatitude/EXIF:GPSLatitudeRef, EXIF:GPSLongitude/EXIF:GPSLongitudeRef | |
| Location (video, audio) | XMP:GPSLatitude, Composite:GPSLatitude, XMP:GPSLongitude, Composite:GPSLongitude | Composite tags are read-only |
| Title (photo) | XMP:Title | |
| Title (video, audio) | XMP:DisplayName | |
| Album | XMP-xmpDM:Album, XMP:Album | XMP:Album is user defined in `configs/ExifTool_config` for backwards compatability |
| Camera Make (photo, video) | EXIF:Make, QuickTime:Make | |
| Camera Model (photo, video) | EXIF:Model, QuickTime:Model | |
## Using OpenStreetMap data from MapQuest
I use MapQuest to help me organize your photos by location. You'll need to sign up for a [free developer account](https://developer.mapquest.com/plan_purchase/steps/business_edition/business_edition_free) and get an API key. They give you 15,000 calls per month so I can't do any more than that unless you shell out some big bucks to them. Once I hit my limit the best I'll be able to do is *Unknown Location* until the following month.
Once you sign up you'll have to get an API key and copy it into a file named `~/.elodie/config.ini`. I've included a `config.ini-sample` file which you can copy to `config.ini`.
```
mkdir ~/.elodie
cp config.ini-sample ~/.elodie/config.ini
# now you're ready to add your MapQuest key
```
If you're an english speaker then you will probably want to add `prefer_english_names=True` to the `[MapQuest]` section else you'll have cities named using the local language.
## Questions, comments or concerns?
The best ways to provide feedback is by opening a [GitHub issue](https://github.com/jmathai/elodie/issues) or emailing me at [jaisen@jmathai.com](mailto:jaisen@jmathai.com).

View File

@ -1,54 +0,0 @@
# Hello, I'm Elodie's GUI
~~ *Your Personal EXIF-based Photo, Video and Audio Assistant* ~~
<p align="center"><img src ="../../../blob/master/creative/logo@300x.png" /></p>
You can download [my latest GUI from the releases page](https://github.com/jmathai/elodie/releases).
My GUI taskbar app sits nestled away in your taskbar until you need me.
Let's say you took a few hundred photos in New York City. I'll have put the photos into a folder named *New York City*. You decide you'd rather organize those photos into a folder named *Summer in NYC*. What you'd do is select the photos using Finder and drag them onto my taskbar icon. I'll display a few options and one of them would be to *Create album*. Type in an album name and I'll add this to the EXIF of your photos and move them to a folder with the same name.
*NOTE: I've extensively used the GUI but it's a work in progress.*
## See me in action
Updating EXIF of photos using the GUI taskbar app.
[![IMAGE ALT TEXT](http://img.youtube.com/vi/fF_jGCaMog0/0.jpg)](http://www.youtube.com/watch?v=fF_jGCaMog0 "Updating Photos Using GUI Taskbar App")
## Building the app
You'll need to bundle up the python dependencies and create an electron app using Node.js.
### Bundling the python libraries
First you'll need to [install the python dependencies](../../../#install-everything-you-need).
Once you've done that you'll need to install `pyinstaller`.
```
pip install pyinstaller
```
Next you can `cd` to the root of the repository and run `pyinstaller`.
```
pyinstaller elodie.spec
```
This should create a `dist` folder that bundles all of the dependencies. Now you're ready to build the GUI app.
### Building the GUI app
The GUI app is written using [Node.js](https://github.com/nodejs) and [Electron](https://github.com/atom/electron) and you'll need [electron-packager](https://github.com/maxogden/electron-packager) to create an executable file for your operating system.
I'm going to assume you've got *Node.js* installed. I've successfully built the app using version `5.1.0` on OS X.
```
# use --platform=win32 for Windows or --platform=linux for linux
electron-packager . Elodie --platform=darwin --arch=x64 --version=0.34.2 --overwrite
```
This will create a folder named `Elodie-darwin-x64` which contains the executable. Running the executable should add my face to your taskbar which you can click on or drag photos over.

View File

@ -1,6 +0,0 @@
<html>
<head></head>
<body>
<div style="width:100%; height:100%; background-color:#fff;"></div>
</body>
</html>

View File

@ -1,34 +0,0 @@
<html>
<head>
<script src="js/handlers.js"></script>
<link href='https://fonts.googleapis.com/css?family=Lato:400,100,300,100italic,300italic' rel='stylesheet' type='text/css'>
<link rel="stylesheet" href="css/bootstrap.css"></script>
<link rel="stylesheet" href="css/boilerplate.css"></script>
<link rel="stylesheet" href="css/styles.css"></script>
<link rel="stylesheet" href="css/fontello/css/animation.css"></script>
<link rel="stylesheet" href="css/fontello/css/elodie.css"></script>
</head>
<body>
<div class="titlebar">
<!--<a href="" class="left quit quitProgram"><i class="icon-cancel-circle"></i></a>
<a href="" class="left minus minimizeProgram"><i class="icon-minus-circle"></i></a>-->
How can I help you? <em>-- Elodie</em><i></i>
</div>
<form class="updateConfig" action="" method="post">
<div class="content">
<p>
Doesn't look like you have a MapQuest API key. Get one for free <a href="#" class="launchUrl" data-url="https://developer.mapquest.com/plan_purchase/steps/business_edition/business_edition_free">here</a>.
</p>
<div class="location">
<label for="mapquest-api-key-field"><i class="icon-map"></i>MapQuest API Key</label>
<input id="mapquest-api-key-field" type="text" placeholder="i.e. pzjNKTtTjLydLtxUBwdgKAIC8OQbGLUy">
<button type="submit" class="push">Get Started<i></i></button>
</div>
</div>
</form>
<script>
document.getElementById('location-field').focus();
</script>
</body>
</html>

View File

@ -1,293 +0,0 @@
/*
* HTML5 Boilerplate
*
* What follows is the result of much research on cross-browser styling.
* Credit left inline and big thanks to Nicolas Gallagher, Jonathan Neal,
* Kroc Camen, and the H5BP dev community and team.
*
* Detailed information about this CSS: h5bp.com/css
*
* ==|== normalize ==========================================================
*/
/* =============================================================================
HTML5 display definitions
========================================================================== */
article, aside, details, figcaption, figure, footer, header, hgroup, nav, section { display: block; }
audio, canvas, video { display: inline-block; *display: inline; *zoom: 1; }
audio:not([controls]) { display: none; }
[hidden] { display: none; }
/* =============================================================================
Base
========================================================================== */
/*
* 1. Correct text resizing oddly in IE6/7 when body font-size is set using em units
* 2. Force vertical scrollbar in non-IE
* 3. Prevent iOS text size adjust on device orientation change, without disabling user zoom: h5bp.com/g
*/
html { font-size: 100%; overflow-y: scroll; -webkit-text-size-adjust: 100%; -ms-text-size-adjust: 100%; }
body { margin: 0; font-size: 13px; line-height: 1.231; }
body, button, input, select, textarea { font-family: sans-serif; color: #222; }
/*
* Remove text-shadow in selection highlight: h5bp.com/i
* These selection declarations have to be separate
* Also: hot pink! (or customize the background color to match your design)
*/
::-moz-selection { background: #fe57a1; color: #fff; text-shadow: none; }
::selection { background: #fe57a1; color: #fff; text-shadow: none; }
/* =============================================================================
Links
========================================================================== */
a { color: #00e; }
a:visited { color: #551a8b; }
a:hover { color: #06e; }
a:focus { outline: thin dotted; }
/* Improve readability when focused and hovered in all browsers: h5bp.com/h */
a:hover, a:active { outline: 0; }
/* =============================================================================
Typography
========================================================================== */
abbr[title] { border-bottom: 1px dotted; }
b, strong { font-weight: bold; }
blockquote { margin: 1em 40px; }
dfn { font-style: italic; }
hr { display: block; height: 1px; border: 0; border-top: 1px solid #ccc; margin: 1em 0; padding: 0; }
ins { background: #ff9; color: #000; text-decoration: none; }
mark { background: #ff0; color: #000; font-style: italic; font-weight: bold; }
/* Redeclare monospace font family: h5bp.com/j */
pre, code, kbd, samp { font-family: monospace, serif; _font-family: 'courier new', monospace; font-size: 1em; }
/* Improve readability of pre-formatted text in all browsers */
pre { white-space: pre; white-space: pre-wrap; word-wrap: break-word; }
q { quotes: none; }
q:before, q:after { content: ""; content: none; }
small { font-size: 85%; }
/* Position subscript and superscript content without affecting line-height: h5bp.com/k */
sub, sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; }
sup { top: -0.5em; }
sub { bottom: -0.25em; }
/* =============================================================================
Lists
========================================================================== */
ul, ol { margin: 1em 0; padding: 0 0 0 40px; }
dd { margin: 0 0 0 40px; }
nav ul, nav ol { list-style: none; list-style-image: none; margin: 0; padding: 0; }
/* =============================================================================
Embedded content
========================================================================== */
/*
* 1. Improve image quality when scaled in IE7: h5bp.com/d
* 2. Remove the gap between images and borders on image containers: h5bp.com/e
*/
img { border: 0; -ms-interpolation-mode: bicubic; vertical-align: middle; }
/*
* Correct overflow not hidden in IE9
*/
svg:not(:root) { overflow: hidden; }
/* =============================================================================
Figures
========================================================================== */
figure { margin: 0; }
/* =============================================================================
Forms
========================================================================== */
form { margin: 0; }
fieldset { border: 0; margin: 0; padding: 0; }
/* Indicate that 'label' will shift focus to the associated form element */
label { cursor: pointer; }
/*
* 1. Correct color not inheriting in IE6/7/8/9
* 2. Correct alignment displayed oddly in IE6/7
*/
legend { border: 0; *margin-left: -7px; padding: 0; }
/*
* 1. Correct font-size not inheriting in all browsers
* 2. Remove margins in FF3/4 S5 Chrome
* 3. Define consistent vertical alignment display in all browsers
*/
button, input, select, textarea { font-size: 100%; margin: 0; vertical-align: baseline; *vertical-align: middle; }
/*
* 1. Define line-height as normal to match FF3/4 (set using !important in the UA stylesheet)
* 2. Correct inner spacing displayed oddly in IE6/7
*/
button, input { line-height: normal; *overflow: visible; }
/*
* Reintroduce inner spacing in 'table' to avoid overlap and whitespace issues in IE6/7
*/
table button, table input { *overflow: auto; }
/*
* 1. Display hand cursor for clickable form elements
* 2. Allow styling of clickable form elements in iOS
*/
button, input[type="button"], input[type="reset"], input[type="submit"] { cursor: pointer; -webkit-appearance: button; }
/*
* Consistent box sizing and appearance
*/
input[type="checkbox"], input[type="radio"] { box-sizing: border-box; padding: 0; }
input[type="search"] { -webkit-appearance: textfield; -moz-box-sizing: content-box; -webkit-box-sizing: content-box; box-sizing: content-box; }
input[type="search"]::-webkit-search-decoration { -webkit-appearance: none; }
/*
* Remove inner padding and border in FF3/4: h5bp.com/l
*/
button::-moz-focus-inner, input::-moz-focus-inner { border: 0; padding: 0; }
/*
* 1. Remove default vertical scrollbar in IE6/7/8/9
* 2. Allow only vertical resizing
*/
textarea { overflow: auto; vertical-align: top; resize: vertical; }
/* Colors for form validity */
input:valid, textarea:valid { }
input:invalid, textarea:invalid { background-color: #f0dddd; }
/* =============================================================================
Tables
========================================================================== */
table { border-collapse: collapse; border-spacing: 0; }
td { vertical-align: top; }
/* ==|== primary styles =====================================================
Author:
========================================================================== */
/* ==|== media queries ======================================================
PLACEHOLDER Media Queries for Responsive Design.
These override the primary ('mobile first') styles
Modify as content requires.
========================================================================== */
@media only screen and (min-width: 480px) {
/* Style adjustments for viewports 480px and over go here */
}
@media only screen and (min-width: 768px) {
/* Style adjustments for viewports 768px and over go here */
}
/* ==|== non-semantic helper classes ========================================
Please define your styles before this section.
========================================================================== */
/* For image replacement */
.ir { display: block; border: 0; text-indent: -999em; overflow: hidden; background-color: transparent; background-repeat: no-repeat; text-align: left; direction: ltr; }
.ir br { display: none; }
/* Hide from both screenreaders and browsers: h5bp.com/u */
.hidden { display: none !important; visibility: hidden; }
/* Hide only visually, but have it available for screenreaders: h5bp.com/v */
.visuallyhidden { border: 0; clip: rect(0 0 0 0); height: 1px; margin: -1px; overflow: hidden; padding: 0; position: absolute; width: 1px; }
/* Extends the .visuallyhidden class to allow the element to be focusable when navigated to via the keyboard: h5bp.com/p */
.visuallyhidden.focusable:active, .visuallyhidden.focusable:focus { clip: auto; height: auto; margin: 0; overflow: visible; position: static; width: auto; }
/* Hide visually and from screenreaders, but maintain layout */
.invisible { visibility: hidden; }
/* Contain floats: h5bp.com/q */
.clearfix:before, .clearfix:after { content: ""; display: table; }
.clearfix:after { clear: both; }
.clearfix { *zoom: 1; }
/* ==|== print styles =======================================================
Print styles.
Inlined to avoid required HTTP connection: h5bp.com/r
========================================================================== */
@media print {
* { background: transparent !important; color: black !important; text-shadow: none !important; filter:none !important; -ms-filter: none !important; } /* Black prints faster: h5bp.com/s */
a, a:visited { text-decoration: underline; }
a[href]:after { content: " (" attr(href) ")"; }
abbr[title]:after { content: " (" attr(title) ")"; }
.ir a:after, a[href^="javascript:"]:after, a[href^="#"]:after { content: ""; } /* Don't show links for images, or javascript/internal links */
pre, blockquote { border: 1px solid #999; page-break-inside: avoid; }
thead { display: table-header-group; } /* h5bp.com/t */
tr, img { page-break-inside: avoid; }
img { max-width: 100% !important; }
@page { margin: 0.5cm; }
p, h2, h3 { orphans: 3; widows: 3; }
h2, h3 { page-break-after: avoid; }
}

File diff suppressed because it is too large Load Diff

View File

@ -1,30 +0,0 @@
Font license info
## Fontelico
Copyright (C) 2012 by Fontello project
Author: Crowdsourced, for Fontello project
License: SIL (http://scripts.sil.org/OFL)
Homepage: http://fontello.com
## Modern Pictograms
Copyright (c) 2012 by John Caserta. All rights reserved.
Author: John Caserta
License: SIL (http://scripts.sil.org/OFL)
Homepage: http://thedesignoffice.org/project/modern-pictograms/
## Typicons
(c) Stephen Hutchings 2012
Author: Stephen Hutchings
License: SIL (http://scripts.sil.org/OFL)
Homepage: http://typicons.com/

View File

@ -1,88 +0,0 @@
{
"name": "elodie",
"css_prefix_text": "icon-",
"css_use_suffix": false,
"hinting": true,
"units_per_em": 1000,
"ascent": 850,
"glyphs": [
{
"uid": "c64623255a4a7c72436b199b05296c4f",
"css": "happy",
"code": 59392,
"src": "fontelico"
},
{
"uid": "53ed8570225581269cd7eff5795e8bea",
"css": "unhappy",
"code": 59396,
"src": "fontelico"
},
{
"uid": "f0c301ac841dafc38d8eb1b933fc73e5",
"css": "spin",
"code": 59393,
"src": "fontelico"
},
{
"uid": "0f99ab40ab0b4d64a74f2d0deeb03e42",
"css": "video",
"code": 59397,
"src": "fontawesome"
},
{
"uid": "b091a8bd0fdade174951f17d936f51e4",
"css": "folder-closed",
"code": 59402,
"src": "fontawesome"
},
{
"uid": "6533bdc16ab201eb3f3b27ce989cab33",
"css": "folder-open",
"code": 59401,
"src": "fontawesome"
},
{
"uid": "c5845105a87df2ee1999826d90622f6a",
"css": "title",
"code": 59399,
"src": "fontawesome"
},
{
"uid": "87d337fee4866c2c28f6082994ce0f41",
"css": "map",
"code": 59395,
"src": "typicons"
},
{
"uid": "bd517dbd6ccbc464f6d80efca97abb7d",
"css": "media-add",
"code": 59400,
"src": "typicons"
},
{
"uid": "dplw5xo88mzzr7b45nvjcamyyhni6drs",
"css": "book",
"code": 59394,
"src": "modernpics"
},
{
"uid": "64abb7c56aefca89046bb69f7251d2e2",
"css": "calendar",
"code": 59398,
"src": "elusive"
},
{
"uid": "15739f3032c2aa3df67efc96b3ffef56",
"css": "cancel-circle",
"code": 59403,
"src": "websymbols"
},
{
"uid": "994eaa764b3f30721f3839c64c390ce3",
"css": "minus-circle",
"code": 59404,
"src": "websymbols"
}
]
}

View File

@ -1,85 +0,0 @@
/*
Animation example, for spinners
*/
.animate-spin {
-moz-animation: spin 2s infinite linear;
-o-animation: spin 2s infinite linear;
-webkit-animation: spin 2s infinite linear;
animation: spin 2s infinite linear;
display: inline-block;
}
@-moz-keyframes spin {
0% {
-moz-transform: rotate(0deg);
-o-transform: rotate(0deg);
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-moz-transform: rotate(359deg);
-o-transform: rotate(359deg);
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
@-webkit-keyframes spin {
0% {
-moz-transform: rotate(0deg);
-o-transform: rotate(0deg);
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-moz-transform: rotate(359deg);
-o-transform: rotate(359deg);
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
@-o-keyframes spin {
0% {
-moz-transform: rotate(0deg);
-o-transform: rotate(0deg);
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-moz-transform: rotate(359deg);
-o-transform: rotate(359deg);
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
@-ms-keyframes spin {
0% {
-moz-transform: rotate(0deg);
-o-transform: rotate(0deg);
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-moz-transform: rotate(359deg);
-o-transform: rotate(359deg);
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
@keyframes spin {
0% {
-moz-transform: rotate(0deg);
-o-transform: rotate(0deg);
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-moz-transform: rotate(359deg);
-o-transform: rotate(359deg);
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}

View File

@ -1,40 +0,0 @@
{
"name": "elodie",
"css_prefix_text": "icon-",
"css_use_suffix": false,
"hinting": true,
"units_per_em": 1000,
"ascent": 850,
"glyphs": [
{
"uid": "c64623255a4a7c72436b199b05296c4f",
"css": "happy",
"code": 59392,
"src": "fontelico"
},
{
"uid": "53ed8570225581269cd7eff5795e8bea",
"css": "emo-unhappy",
"code": 59396,
"src": "fontelico"
},
{
"uid": "f0c301ac841dafc38d8eb1b933fc73e5",
"css": "spin",
"code": 59393,
"src": "fontelico"
},
{
"uid": "87d337fee4866c2c28f6082994ce0f41",
"css": "map",
"code": 59395,
"src": "typicons"
},
{
"uid": "dplw5xo88mzzr7b45nvjcamyyhni6drs",
"css": "book",
"code": 59394,
"src": "modernpics"
}
]
}

View File

@ -1,69 +0,0 @@
@font-face {
font-family: 'elodie';
src: url('../font/elodie.eot?99803888');
src: url('../font/elodie.eot?99803888#iefix') format('embedded-opentype'),
url('../font/elodie.woff?99803888') format('woff'),
url('../font/elodie.ttf?99803888') format('truetype'),
url('../font/elodie.svg?99803888#elodie') format('svg');
font-weight: normal;
font-style: normal;
}
/* Chrome hack: SVG is rendered more smooth in Windozze. 100% magic, uncomment if you need it. */
/* Note, that will break hinting! In other OS-es font will be not as sharp as it could be */
/*
@media screen and (-webkit-min-device-pixel-ratio:0) {
@font-face {
font-family: 'elodie';
src: url('../font/elodie.svg?99803888#elodie') format('svg');
}
}
*/
[class^="icon-"]:before, [class*=" icon-"]:before {
font-family: "elodie";
font-style: normal;
font-weight: normal;
speak: none;
display: inline-block;
text-decoration: inherit;
width: 1em;
margin-right: .2em;
text-align: center;
/* opacity: .8; */
/* For safety - reset parent styles, that can break glyph codes*/
font-variant: normal;
text-transform: none;
/* fix buttons height, for twitter bootstrap */
line-height: 1em;
/* Animation center compensation - margins should be symmetric */
/* remove if not needed */
margin-left: .2em;
/* you can be more comfortable with increased icons size */
/* font-size: 120%; */
/* Font smoothing. That was taken from TWBS */
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
/* Uncomment for 3D effect */
/* text-shadow: 1px 1px 1px rgba(127, 127, 127, 0.3); */
}
.icon-happy:before { content: '\e800'; } /* '' */
.icon-spin:before { content: '\e801'; } /* '' */
.icon-book:before { content: '\e802'; } /* '' */
.icon-map:before { content: '\e803'; } /* '' */
.icon-unhappy:before { content: '\e804'; } /* '' */
.icon-video:before { content: '\e805'; } /* '' */
.icon-calendar:before { content: '\e806'; } /* '' */
.icon-title:before { content: '\e807'; } /* '' */
.icon-media-add:before { content: '\e808'; } /* '' */
.icon-folder-open:before { content: '\e809'; } /* '' */
.icon-folder-closed:before { content: '\e80a'; } /* '' */
.icon-cancel-circle:before { content: '\e80b'; } /* '' */
.icon-minus-circle:before { content: '\e80c'; } /* '' */

View File

@ -1,24 +0,0 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg">
<metadata>Copyright (C) 2015 by original authors @ fontello.com</metadata>
<defs>
<font id="elodie" horiz-adv-x="1000" >
<font-face font-family="elodie" font-weight="400" font-stretch="normal" units-per-em="1000" ascent="850" descent="-150" />
<missing-glyph horiz-adv-x="1000" />
<glyph glyph-name="happy" unicode="&#xe800;" d="m261 800c-60 0-109-65-109-144 0-80 49-145 109-145s110 65 110 145c0 79-49 144-110 144z m477 0c-61 0-110-65-110-144 0-80 49-145 110-145 60 0 110 65 110 145 0 79-50 144-110 144z m208-599c-13 0-27-5-37-16-4-4-8-8-12-12-111-109-253-164-396-165-142-2-285 50-396 155l-3 3-12 12c-21 21-54 20-75-1-20-21-20-55 1-76 3-4 8-8 14-14l3-3c132-124 301-186 469-184 169 1 337 67 468 195 5 5 9 10 14 14 20 22 20 56-1 77-10 10-23 15-37 15z" horiz-adv-x="999" />
<glyph glyph-name="spin" unicode="&#xe801;" d="m46 144l0 0c0 0-1 0-1 0-8 18-15 37-21 55-6 19-11 38-15 58-19 99-8 203 35 298 3 6 10 8 15 5 1 0 2 0 2-1l0 0 80-59c5-3 6-9 4-14-5-12-9-25-12-38-4-12-7-26-9-39-11-67-3-137 23-201 2-5 0-10-4-13l0 0-80-56c-5-4-12-3-16 3-1 0-1 1-1 2l0 0z m120 574l0 0c0 1 0 1 0 1 15 13 30 25 46 37 16 11 33 22 51 31 89 50 192 72 297 60 6-1 10-6 10-13 0-1-1-1-1-2l0 0-31-94c-2-5-8-8-13-7-13 0-27 0-40 0-14-1-27-2-40-4-68-11-133-40-186-84-4-3-10-3-14 0l0 0-79 58c-5 3-6 11-2 16 0 0 1 1 2 1l0 0z m588 65l0 0c0 0 1 0 1 0 17-10 34-21 50-32 16-12 31-25 46-38 74-69 127-160 148-262 2-6-2-12-9-13-1 0-1 0-2 0l0 0-100 1c-5 0-10 4-11 9-3 13-8 26-12 38-5 12-10 25-17 36-31 61-78 113-137 150-5 3-6 8-5 13l0 0 31 92c2 6 9 9 15 7 1 0 2-1 2-1l0 0z m244-535l0 0c0 0 0 0 0 0-4-20-9-39-15-57-7-19-14-37-22-55-44-92-114-170-205-221-6-3-13-1-16 4 0 1-1 2-1 2l0 0-30 94c-2 6 1 12 6 14 11 7 22 15 32 23 11 9 21 18 30 27 49 48 84 109 101 176 2 5 6 8 11 8l0 0 98-1c6 0 11-5 11-11 0-1 0-2 0-3l0 0z m-438-395l0 0c0 0 0 0 0 0-20-2-40-3-60-3-20 0-40 1-59 4-102 12-198 54-276 125-5 4-5 11 0 16 0 0 1 1 1 1l0 0 81 58c5 3 12 2 16-2 10-8 20-16 32-23 11-7 22-14 34-20 62-31 131-45 200-41 6 0 10-3 12-8l0 0 29-92c2-6-1-12-7-14-1-1-2-1-3-1l0 0z" horiz-adv-x="1000" />
<glyph glyph-name="book" unicode="&#xe802;" d="m600 630l35 0 0-672-502 0c-74 0-133 52-133 128l0 581c0 41 34 75 75 75l465 0 0-576-407 0c-52 0-88-28-88-78l0-2c0-50 36-83 88-83l467 0 0 627z m-60-562l-402 0c-12 0-22 7-22 19 0 10 10 16 22 16l402 0 0-35z" horiz-adv-x="635" />
<glyph glyph-name="map" unicode="&#xe803;" d="m53-93q-23 0-38 16t-15 36l0 521q0 21 15 36l235 235q14 14 34 15t35-11l224-179 202 201q25 25 57 10t32-47l0-520q0-21-16-38l-234-233q-14-14-35-15t-35 11l-224 179-201-202q-15-15-36-15z m51 178q152 150 156 152l0 378-156-156 0-374z m215 149l202-162 0 389-208 165 0-389q1-1 3-2t3-1z m410 7l0 374q-153-151-156-154l0-376z" horiz-adv-x="834" />
<glyph glyph-name="unhappy" unicode="&#xe804;" d="m261 800c-60 0-109-65-109-144 0-80 49-145 109-145s110 65 110 145c0 79-49 144-110 144z m477 0c-61 0-110-65-110-144 0-80 49-145 110-145 60 0 110 65 110 145 0 79-50 144-110 144z m-244-599c-165 0-331-62-461-184l-3-3c-6-5-11-10-14-14-21-21-21-55-1-76 21-21 54-21 75-1l12 12 3 3c111 105 254 157 396 155 143-1 285-56 396-165 4-4 8-8 12-12 20-21 54-21 74-1 21 21 21 55 1 77-5 5-9 10-14 14-131 129-299 194-468 195-3 0-6 0-8 0z" horiz-adv-x="999" />
<glyph glyph-name="video" unicode="&#xe805;" d="m1000 654v-608q0-23-22-32-7-3-14-3-15 0-25 10l-225 225v-92q0-67-47-114t-113-47h-393q-67 0-114 47t-47 114v392q0 67 47 114t114 47h393q66 0 113-47t47-114v-92l225 225q10 10 25 10 7 0 14-3 22-9 22-32z" horiz-adv-x="1000" />
<glyph glyph-name="calendar" unicode="&#xe806;" d="m0-150l0 649 893 0 0-649-893 0z m0 705l0 221 109 0 0-141 200 0 0 141 275 0 0-141 199 0 0 141 110 0 0-221-893 0z m168 139l0 156 82 0 0-156-82 0z m59-619q0-112 123-112 47 0 84 32 39 31 39 80 0 68-78 90 48 15 64 48 12 28-2 73-27 62-107 62-51 0-86-26t-37-77l72 0q0 45 49 46 43 0 45-52 0-49-84-47l0-57q48 0 68-8 23-11 23-46 0-57-54-61-43 0-47 55l-72 0z m281 146q49 14 88 47l0-297 70 0 0 371-64 0q-38-37-94-58l0-63z m135 473l0 156 82 0 0-156-82 0z" horiz-adv-x="893" />
<glyph glyph-name="title" unicode="&#xe807;" d="m713 745v-41q0-16-10-34t-24-18q-28 0-30-1-15-3-18-17-2-6-2-36v-643q0-14-10-24t-24-10h-60q-14 0-24 10t-10 24v680h-80v-680q0-14-9-24t-25-10h-60q-14 0-24 10t-10 24v277q-82 7-137 33-70 33-107 100-36 65-36 145 0 92 50 159 49 66 116 89 62 21 233 21h267q14 0 24-10t10-24z" horiz-adv-x="714.3" />
<glyph glyph-name="media-add" unicode="&#xe808;" d="m573 350q21 0 36-15t16-37-16-36-36-15l-104 0 0-105q0-22-16-37t-37-15-36 15-15 37l0 105-105 0q-21 0-36 15t-15 36 15 37 36 15l105 0 0 104q0 21 15 37t36 16 37-16 16-37l0-104 104 0z m245 245q16-16 16-36l0-521q0-65-46-111t-110-46l-522 0q-65 0-110 46t-46 111l0 625q0 65 46 110t110 46l417 0q22 0 37-15z m-110-36l-135 134 0-56q0-32 23-55t55-23l57 0z m-30-574q21 0 36 16t15 37l0 469-78 0q-53 0-92 38t-38 92l0 78-365 0q-21 0-37-15t-15-37l0-625q0-21 15-37t37-16l522 0z" horiz-adv-x="834" />
<glyph glyph-name="folder-open" unicode="&#xe809;" d="m994 330q0 20-30 20h-607q-22 0-48-12t-39-29l-164-203q-11-13-11-22 0-20 30-20h607q22 0 48 13t40 29l164 203q10 12 10 21z m-637 91h429v90q0 22-16 38t-38 15h-321q-23 0-38 16t-16 38v36q0 22-15 38t-38 15h-179q-22 0-38-15t-16-38v-476l143 175q25 30 65 49t78 19z m708-91q0-34-25-66l-165-203q-24-30-65-49t-78-19h-607q-51 0-88 37t-37 88v536q0 51 37 88t88 37h179q51 0 88-37t37-88v-18h303q51 0 88-37t37-88v-90h107q30 0 56-13t37-40q8-17 8-38z" horiz-adv-x="1071.4" />
<glyph glyph-name="folder-closed" unicode="&#xe80a;" d="m857 118v393q0 22-15 38t-38 15h-393q-23 0-38 16t-16 38v36q0 22-15 38t-38 15h-179q-22 0-38-15t-16-38v-536q0-22 16-38t38-16h679q22 0 38 16t15 38z m72 393v-393q0-51-37-88t-88-37h-679q-51 0-88 37t-37 88v536q0 51 37 88t88 37h179q51 0 88-37t37-88v-18h375q51 0 88-37t37-88z" horiz-adv-x="928.6" />
<glyph glyph-name="cancel-circle" unicode="&#xe80b;" d="m1000 349q0-136-67-251t-182-182-251-67-251 67-182 182-67 251 67 251 182 182 251 67 251-67 182-182 67-251z m-339-232l71 71-161 161 161 161-71 71-161-161-161 161-71-71 161-161-161-161 71-71 161 161z" horiz-adv-x="1000" />
<glyph glyph-name="minus-circle" unicode="&#xe80c;" d="m1000 349q0-136-67-251t-182-182-251-67-251 67-182 182-67 251 67 251 182 182 251 67 251-67 182-182 67-251z m-794-58h589v118h-589v-118z" horiz-adv-x="1000" />
</font>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 6.2 KiB

View File

@ -1,188 +0,0 @@
body {
-webkit-animation: fadein 1s;
}
/* Safari, Chrome and Opera > 12.1 */
@-webkit-keyframes fadein {
from { opacity: 0; }
to { opacity: 1; }
}
*, body, div {
font-family: 'Lato', 'Helvetica';
font-weight: 300;
font-size: 1.1em;
color: #444;
}
body {
padding: 0;
margin: 0;
}
::-webkit-input-placeholder {
color: #ddd;
}
.titlebar {
height: 45px;
padding-top:10px;
text-align: center;
background-color: #eee;
border-bottom: solid 1.5px #aaa;
}
.titlebar a {
font-weight: 300;
font-size:.7em;
padding-top:5px;
}
.titlebar a.right {
float:right;
}
.titlebar a.left {
float:left;
}
.titlebar a.quit i {
color: #ff4136 !important;
}
.titlebar a.minus i {
color: #ffba00 !important;
}
.x-titlebar em {
padding-right: 28px;
background: url("../img/logo@18x22.png") no-repeat right 4px;
}
.content, .content-index {
padding: 0 10px 10px 10px;
font-size: 1em;
}
.content > div {
border-bottom: solid 1px #eee;
padding: 10px 0;
}
.content > p {
padding: 10px 0;
}
.content .import-success, .content .import-success a {
font-size:.9em;
}
.status {
display: none;
}
.status ul {
list-style-type:none;
padding:0;
margin:20px 0 0;
font-size:.8em;
}
.status ul li {
font-size:.9em;
padding:0;
margin:0 0 10px;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.status ul li .destination {
width: 100%;
font-size: .75em;
padding-top: 0;
}
.preview {
position: relative;
padding: 20px 0;
}
.preview .center-cropped {
display: inline-block;
border-radius: 2px;
border: solid 1px #ddd;
margin: 1px;
width: 42px;
height: 42px;
background-position: center center;
background-size: cover;
}
.preview .center-cropped.video:before {
font-family: "elodie";
color: #eee;
content: '\e805';
position: absolute;
margin-top: 6px;
margin-left: 8px;
font-size: 1.5em;
}
i {
color: #555;
}
i.icon-happy {
color: #6cc644;
}
i.icon-unhappy {
color: #bd2c00;
}
label {
font-size: .9em;
font-weight: 300;
display: block;
padding-bottom:3px;
}
input, button {
font-family: 'Lato', 'Helvetica';
font-weight: 300;
font-size: .9em;
color: #666;
border: solid 1px #eee;
border-radius: 3px;
}
input:focus,
select:focus,
textarea:focus,
button:focus {
outline: none;
}
input {
padding: 4px;
width: 100%;
}
input[type="file"] {
height:0px;
width:0px;
overflow:hidden;
display:none;
}
button {
cursor: pointer;
background-color: #eee;
padding: 4px 10px;
margin-top: 10px;
}
small {
font-size:.7em;
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 550 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -1,37 +0,0 @@
<html>
<head>
<script src="js/handlers.js"></script>
<link href='https://fonts.googleapis.com/css?family=Lato:400,100,300,100italic,300italic' rel='stylesheet' type='text/css'>
<link rel="stylesheet" href="css/bootstrap.css"></script>
<link rel="stylesheet" href="css/boilerplate.css"></script>
<link rel="stylesheet" href="css/styles.css"></script>
<link rel="stylesheet" href="css/fontello/css/animation.css"></script>
<link rel="stylesheet" href="css/fontello/css/elodie.css"></script>
</head>
<body>
<div class="titlebar">
<!--<a href="" class="left quit quitProgram"><i class="icon-cancel-circle"></i></a>
<a href="" class="left minus minimizeProgram"><i class="icon-minus-circle"></i></a>-->
How can I help you? <em>-- Elodie</em><i></i>
</div>
<form class="importPhotos" action="" method="post">
<div id="content" class="content">
<p>
Let me know where your photos are and where you'd like me to put them as I sort them.
<small><em>(You can drop your photos here to update its information.)</em></small>
</p>
<div>
<label for="source"><i class="icon-folder-open"></i> What folder are your photos in now?</label>
<input type="text" name="source" id="source" placeholder="Path to your photos folder">
<small><em>Hint: Right clicking on your folder in Finder, pressing option and clicking "Copy as Pathname" will put the full path on your clipboard</em></small>
</div>
<div>
<label for="destination"><i class="icon-folder-closed"></i> Where would you like me to organize them to?</label>
<input type="text" name="destination" id="destination" placeholder="Path to your photos folder">
<button type="submit" class="push">Start Organizing<i></i></button>
</div>
<div class="import-success"></div>
</div>
</form>
</body>
</html>

View File

@ -1,250 +0,0 @@
var __constants__ = {
baseUrl : 'http://localhost:5000'
};
var __process__ = {};
if(typeof(require) === 'function') {
var ipc = require('ipc');
var path = require('path');
var os = require('os');
ipc.on('files', function(files) {
__process__.files = files;
});
ipc.on('preview', function(files) {
handlers.renderPreview(files);
});
ipc.on('update-import-success', function(args) {
//var response = JSON.parse(args['stdout']);
handlers.setSuccessTitle();
handlers.removeProgressIcons();
handlers.addSuccessImportMessage(args);
});
ipc.on('update-import-no-photos', function(args) {
//var response = JSON.parse(args['stdout']);
handlers.removeProgressIcons();
});
ipc.on('update-config-status', function(args) {
if(args) {
// @TODO: We should really handle this in the nodejs code.
handlers.removeProgressIcons();
location.href = 'index.html';
} else {
}
});
ipc.on('update-photos-success', function(args) {
if(os.platform() == 'win32'){
var response = JSON.parse(args['stdout'].replace(/\\/g, '\\\\'));
}else{
var response = JSON.parse(args['stdout']);
}
handlers.setSuccessTitle();
handlers.removeProgressIcons();
handlers.updateStatus(response);
});
function Broadcast() {
this.send = function(name, message) {
ipc.send(name, message);
};
}
window.onload = function () {
var broadcast = new Broadcast();
window.ondragover = function (e){ e.preventDefault(); return false };
window.ondragover = function (e){ e.preventDefault(); return false };
var holder = document.getElementById('content');
if(holder != null){
holder.ondrop = function (e) {
e.preventDefault();
files = []
for (var i = 0; i < e.dataTransfer.files.length; ++i) {
console.log(e.dataTransfer.files[i].path);
files.push(e.dataTransfer.files[i].path);
}
broadcast.send('load-update-photos', files);
return false;
};
}
};
};
function Handlers() {
var self = this;
var broadcast = new Broadcast();
this.click = {};
this.submit = {};
this.change = {};
// CHANGE
this.change.fileSelected = function(ev) {
var el = ev.target,
dir = el.value.substr(el.value.lastIndexOf("\\")+1),
tgt = document.querySelector(el.dataset.display);
tgt.innerHTML = dir;
};
// CLICK
this.click.selectFile = function(ev) {
var el = ev.target,
tgt = document.querySelector(el.dataset.for);
ev.preventDefault();
tgt.click();
};
this.click.launchFinder = function(ev) {
var el = ev.target,
tgt = el.dataset.path;
ev.preventDefault();
broadcast.send('launch-finder', tgt);
};
this.click.launchUrl = function(ev) {
var el = ev.target,
tgt = el.dataset.url;
ev.preventDefault();
broadcast.send('launch-url', tgt);
};
this.click.quitProgram = function(ev) {
//ev.preventDefault();
console.log('quit');
broadcast.send('program-quit');
};
// SUBMIT
this.submit.importPhotos = function(ev) {
var el = ev.target,
cls = el.className,
params;
ev.preventDefault();
params = {};
params['source'] = document.querySelector('input[name="source"]').value
params['destination'] = document.querySelector('input[name="destination"]').value
if(params['destination'].length === 0 || params['source'].length === 0) {
return;
}
document.querySelector('button.push i').className = 'icon-spin animate-spin';
broadcast.send('import-photos', params);
};
this.submit.updateConfig = function(ev) {
var el = ev.target,
cls = el.className,
params;
ev.preventDefault();
document.querySelector('button.push i').className = 'icon-spin animate-spin';
params = {};
params['mapQuestKey'] = document.querySelector('input[id="mapquest-api-key-field"]').value;
if(params['mapQuestKey'].length === 0) {
return;
}
broadcast.send('update-config', params);
};
this.submit.updatePhotos = function(ev) {
var el = ev.target,
cls = el.className,
params;
ev.preventDefault();
document.querySelector('button.push i').className = 'icon-spin animate-spin';
params = {};
params['location'] = document.querySelector('input[id="location-field"]').value;
params['datetime'] = document.querySelector('input[id="datetime-field"]').value;
params['album'] = document.querySelector('input[id="album-field"]').value;
params['title'] = document.querySelector('input[id="title-field"]').value;
if(params['location'].length === 0 && params['datetime'].length === 0 && params['album'].length === 0 && params['title'].length === 0) {
return;
}
params['files'] = __process__.files;
broadcast.send('update-photos', params);
};
this.addSuccessImportMessage = function(args) {
document.querySelector('.import-success').innerHTML = 'Your photos were successfully imported. <a href="#" class="launchFinder" data-path="'+args['destination'] +'">View them here</a>.';
};
this.dispatch = function(ev) {
var classes = ev.target.className.split(' ');
for(i=0; i<classes.length; i++) {
if(typeof(self[ev.type][classes[i]]) !== 'undefined') {
self[ev.type][classes[i]](ev);
}
}
};
this.removeProgressIcons = function() {
var els = document.querySelectorAll('i.icon-spin');
for(el in els) {
els[el].className = '';
}
};
this.renderPreview = function(files) {
html = '<label>You selected ' + (files.length > 1 ? 'these photos' : 'this photo') + '</label>';
for(var i=0; i<files.length && i<16; i++) {
if(files[i].match(/(mov|mp4|3gp|avi)/i) === null) {
html += '<div class="center-cropped" style="background-image:url(\'file://'+fileUrl(files[i])+'\');" title="'+files[i]+'"></div>';
} else {
html += '<div class="center-cropped video"></div>';
}
}
if(files.length >= 16) {
html += '<br>...and ' + (files.length -16) + ' more.';
}
document.querySelector('.preview').innerHTML = html;
};
this.setSuccessTitle = function() {
var el = document.querySelector('.titlebar i').className = 'icon-happy';
};
this.updateStatus = function(response) {
var el = document.querySelector('.status'),
source, destination, html;
console.log('update status');
console.log(response);;
if(response.length > 0) {
html = '<label>Status</label><ul>';
for(i=0; i<response.length; i++) {
source = response[i]['source'] || null;
destination = response[i]['destination'] || null;
sourceFileName = source.substr(source.lastIndexOf('/')+1);
if(destination === null) {
html += '<li><i class="icon-unhappy"></i> ' + sourceFileName + '</li>';
} else {
html += '<li><i class="icon-happy"></i> ' + sourceFileName + '<div class="destination" title="'+destination+'">'+destination+'</div></li>';
}
}
html += '</ul>';
el.innerHTML = html;
el.style.display = 'block';
}
};
function fileUrl(str) {
if (typeof str !== 'string') {
throw new Error('Expected a string');
}
var pathName = path.resolve(str).replace(/\\/g, '/');
// Windows drive letter must be prefixed with a slash
if (pathName[0] !== '/') {
pathName = '/' + pathName;
}
return encodeURI('file://' + pathName);
};
}
var handlers = new Handlers();
window.addEventListener('click', handlers.dispatch);
window.addEventListener('submit', handlers.dispatch);
window.addEventListener('change', handlers.dispatch);

View File

@ -1,71 +0,0 @@
<html>
<head>
<script src="js/handlers.js"></script>
<link href='https://fonts.googleapis.com/css?family=Lato:400,100,300,100italic,300italic' rel='stylesheet' type='text/css'>
<link rel="stylesheet" href="css/bootstrap.css"></script>
<link rel="stylesheet" href="css/boilerplate.css"></script>
<link rel="stylesheet" href="css/styles.css"></script>
<link rel="stylesheet" href="css/fontello/css/animation.css"></script>
<link rel="stylesheet" href="css/fontello/css/elodie.css"></script>
</head>
<body>
<div class="titlebar">
<div class="left">
<!--<a href="" class="left quit quitProgram"><i class="icon-cancel-circle"></i></a>
<a href="" class="left minus minimizeProgram"><i class="icon-minus-circle"></i></a>-->
</div>
How can I help you? <em>-- Elodie</em><i></i>
<a href="index.html" class="right"><i class="icon-media-add"></i></a>
</div>
<form class="updatePhotos" action="" method="post">
<div id="content" class="content">
<div class="location">
<label for="location-field"><i class="icon-map"></i>Change geolocation</label>
<input id="location-field" type="text" placeholder="i.e. Sunnyvale, CA">
</div>
<div class="datetime">
<label for="datetime-field"><i class="icon-calendar"></i>Change date and time</label>
<input id="datetime-field" type="text" placeholder="i.e. 2015-07-31">
</div>
<div class="title">
<label for="title-field"><i class="icon-title"></i>Change title</label>
<input id="title-field" type="text" placeholder="i.e. Elodie smiling at dinner">
</div>
<div class="album">
<label for="album-field"><i class="icon-book"></i>Create album</label>
<input id="album-field" type="text" placeholder="i.e. Elodie's Birthday Party">
<button class="push" type="submit">Update<i></i></button>
<!--<button class="push add-location updatePhotos">Update Photos<i></i></button>
<button class="push add-datetime updatePhotos">Update<i></i></button>
<button class="push add-title updatePhotos">Update<i></i></button>
<button class="push add-album updatePhotos">Update<i></i></button>-->
</div>
<div class="status">
<!--<ul>
<li>IMG_6365.JPG <i class="icon-happy"></i><div class="destination" title="/Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg">/Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg</div></li>
<li>IMG_1234.JPG <i class="icon-unhappy"></i><div class="destination" title="/Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg">/Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg</div></li>
</ul>-->
</div>
<div class="preview">
<!--<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped video"></div>
<div class="center-cropped video"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>-->
</div>
</div>
</form>
<script>
document.getElementById('location-field').focus();
</script>
</body>
</html>

View File

@ -1,19 +0,0 @@
var ipc = require('ipc'),
toolbarUi = require('./modules/toolbar-ui.js'),
broadcast = require('./modules/broadcast.js');
toolbarUi.app.on('ready', toolbarUi.ready);
toolbarUi.app.on('create-window', toolbarUi.createWindow);
toolbarUi.app.on('after-create-window', toolbarUi.afterCreateWindow);
toolbarUi.app.on('show', toolbarUi.show);
toolbarUi.app.on('after-show', toolbarUi.afterShow);
toolbarUi.app.on('hide', toolbarUi.show);
toolbarUi.app.on('after-hide', toolbarUi.afterHide);
ipc.on('import-photos', broadcast.importPhotos);
ipc.on('update-config', broadcast.updateConfig);
ipc.on('update-photos', broadcast.updatePhotos);
ipc.on('launch-finder', broadcast.launchFinder);
ipc.on('launch-url', broadcast.launchUrl);
ipc.on('program-quit', broadcast.programQuit);
ipc.on('load-update-photos', toolbarUi.onDropFiles);

View File

@ -1,123 +0,0 @@
var exports = module.exports = {};
var path = require('path');
var exec = require('child_process').exec,
config = require('./config.js');
// The main process listens for events from the web renderer.
// When photos are dragged onto the toolbar and photos are requested to be updated it will fire an 'update-photos' ipc event.
// The web renderer will send the list of photos, type of update and new value to apply
// Once this main process completes the update it will send a 'update-photos-completed' event back to the renderer with information
// so a proper response can be displayed.
exports.importPhotos = function(event, args) {
var params = args,
normalize;
console.log('import-photos');
console.log(args);
if(typeof(args['source']) === 'undefined' || args['source'].length === 0 || typeof(args['destination']) === 'undefined' || args['destination'].length === 0) {
console.log('no source or destination passed in to import-photos');
event.sender.send('update-import-no-photos', null);
return;
}
args['source'] = args['source'].normalize();
args['destination'] = args['destination'].normalize();
update_command = path.normalize(__dirname + '/../../dist/elodie/elodie') + ' import --source="' + args['source'] + '" --destination="' + args['destination'] + '"';
//update_command = __dirname + '/../../elodie.py import --source="' + args['source'] + '" --destination="' + args['destination'] + '"';
console.log(update_command);
exec(update_command, function(error, stdout, stderr) {
console.log('out ' + stdout);
console.log('err ' + stderr);
/*params['error'] = error
params['stdout'] = '[' + stdout.replace(/\n/g,',').replace(/\,+$/g, '').replace(/\n/g,'') + ']'
params['stderr'] = stderr
console.log('parsed')
console.log(params['stdout'])*/
event.sender.send('update-import-success', args);
});
};
exports.updateConfig = function(event, args) {
var params = args,
status;
status = config.writeConfig(params);
if(status) {
event.sender.send('update-config-status', true);
} else {
event.sender.send('update-config-status', false);
}
};
// When photos are dragged onto the toolbar and photos are requested to be updated it will fire an 'update-photos' ipc event.
// The web renderer will send the list of photos, type of update and new value to apply
// Once this main process completes the update it will send a 'update-photos-completed' event back to the renderer with information
// so a proper response can be displayed.
exports.updatePhotos = function(event, args) {
var params = args,
normalize;
console.log('update-photos');
console.log(args);
if(typeof(args['files']) === 'undefined' || args['files'].length === 0) {
console.log('no files passed in to update-photos');
return;
}
normalize = function(files) {
for(var i=0; i<files.length; i++) {
files[i] = files[i].normalize()
}
return files
}
files = normalize(args['files'])
elodie_path = path.normalize(__dirname + '/../../dist/elodie/elodie');
update_command = elodie_path +' update'
//update_command = __dirname + '/../../elodie.py update'
if(args['location'].length > 0) {
update_command += ' --location="' + args['location'] + '"';
}
if(args['album'].length > 0) {
update_command += ' --album="' + args['album'] + '"';
}
if(args['datetime'].length > 0) {
update_command += ' --time="' + args['datetime'] + '"';
}
if(args['title'].length > 0) {
update_command += ' --title="' + args['title'] + '"';
}
update_command += ' "' + files.join('" "') + '"'
console.log(update_command)
exec(update_command, function(error, stdout, stderr) {
console.log('out ' + stdout)
console.log('err ' + stderr)
params['error'] = error
params['stdout'] = '[' + stdout.replace(/\n/g,',').replace(/\,+$/g, '').replace(/\n/g,'') + ']'
params['stderr'] = stderr
console.log('parsed')
console.log(params['stdout'])
event.sender.send('update-photos-success', params);
});
};
exports.launchFinder = function(event, path) {
console.log(path);
var shell = require('shell');
shell.showItemInFolder(path);
};
exports.launchUrl = function(event, url) {
console.log(url);
var shell = require('shell');
shell.openExternal(url);
};
exports.programQuit = function(event, path) {
console.log('program-quit');
//mb.tray.destroy();
mb.quit();
};

View File

@ -1,38 +0,0 @@
var exports = module.exports = {};
var fs = require('fs'),
os = require('os'),
defaultConfigFile = (function() {
var f = __dirname;
for(var i=0; i<2; i++) {
f = f.substr(0, f.lastIndexOf(os.platform() == 'win32' ? '\\' : '/'));
}
return f + (os.platform() == 'win32' ? '\\config.ini-sample': '/config.ini-sample');
})(),
configFile = (process.env.HOME || process.env.USERPROFILE) + (os.platform() == 'win32' ? '\\.elodie\\config.ini' : '/.elodie/config.ini'),
hasConfig,
setConfig;
exports.hasConfig = function() {
console.log(defaultConfigFile);
console.log(configFile);
return fs.existsSync(configFile);
};
exports.writeConfig = function(params) {
var contents;
try {
if(exports.hasConfig()) {
contents = fs.readFileSync(configFile).toString();
} else {
contents = fs.readFileSync(defaultConfigFile).toString();
}
console.log(contents);
contents = contents.replace(/key=[\s\S]+$/,'key='+params['mapQuestKey']);
fs.writeFileSync(configFile, contents);
return true;
} catch(e) {
console.log(e);
return false;
}
};

View File

@ -1,111 +0,0 @@
var exports = module.exports = {};
var menubar = require('menubar'),
menu = require('menu'),
tray = require('tray'),
config = require('./config.js'),
loadUrl = null;
var os = require('os')
var s_dir = __dirname.substr(0,__dirname.lastIndexOf(os.platform() == 'win32' ? '\\' : '/')) +
(os.platform() == 'win32' ? '\\html' : '/html');
exports.app = app = menubar(
{
preloadWindow: true,
dir: s_dir,
index: 'index.html',
pages: {
'blank': 'blank.html',
'config': 'config.html',
'location': 'location.html'
},
width: 400,
height: 500,
'window-position': 'trayCenter',
'frame': os.platform() == 'win32' ? true : false,
'always-on-top': os.platform() == 'win32' ? true : false
}
);
exports.ready = function() {
console.log('app is ready');
var template = [{
label: "Application",
submenu: [
{ label: "Quit", accelerator: "Command+Q", click: function() { app.quit(); }}
]}, {
label: "Edit",
submenu: [
{ label: "Undo", accelerator: "CmdOrCtrl+Z", selector: "undo:" },
{ label: "Redo", accelerator: "Shift+CmdOrCtrl+Z", selector: "redo:" },
{ label: "Cut", accelerator: "CmdOrCtrl+X", selector: "cut:" },
{ label: "Copy", accelerator: "CmdOrCtrl+C", selector: "copy:" },
{ label: "Paste", accelerator: "CmdOrCtrl+V", selector: "paste:" },
{ label: "Select All", accelerator: "CmdOrCtrl+A", selector: "selectAll:" }
]}
];
menu.setApplicationMenu(menu.buildFromTemplate(template));
this.tray.setToolTip('Drag and drop files here');
console.log(app.getOption('dir'));
this.tray.setImage(app.getOption('dir') + '/img/logo@18x22xbw.png');
this.tray.on('clicked', function clicked () {
console.log('tray-clicked')
});
this.tray.on('drop-files', function dropFiles (ev, files) {
loadUrl = app.getOption('pages')['location'];
app.showWindow();
//app.window.openDevTools();
app.window.webContents.on('did-finish-load', function() {
app.window.webContents.send('files', files);
app.window.webContents.send('preview', files);
});
});
};
exports.onDropFiles = function(event, args) {
var files = args;
loadUrl = app.getOption('pages')['location'];
app.showWindow();
app.window.webContents.on('did-finish-load', function() {
app.window.webContents.send('files', files);
app.window.webContents.send('preview', files);
});
};
exports.createWindow = function() {
console.log('create-window')
};
exports.afterCreateWindow = function() {
console.log('after-create-window')
};
exports.show = function() {
if(!config.hasConfig()) {
loadUrl = this.getOption('pages')['config'];
} else if(loadUrl === null) {
loadUrl = this.getOption('index');
}
this.window.loadUrl('file://' + this.getOption('dir') + '/' + loadUrl);
loadUrl = null;
//app.window.openDevTools();
};
exports.afterShow = function() {
console.log('after-show');
};
exports.hide = function() {
console.log('hide');
};
exports.afterHide = function() {
console.log('after-hide')
this.window.loadUrl('file://' + this.getOption('dir') + '/' + this.getOption('pages')['blank']);
};

View File

@ -1,17 +0,0 @@
[Path]
# day_begins: what hour of the day you want the day to begin (only for
# classification purposes). Defaults at 0 as midnight. Can be
# used to group early morning photos with the previous day. Must
# be a number between 0-23')
day_begins=4
dirs_path=%u{%Y-%m}/{city}|{city}-{%Y}/{folders[:1]}/{folder}
name={%Y-%m-%b-%H-%M-%S}-{basename}.%l{ext}
[Exclusions]
name1=.directory
name2=.DS_Store
[Geolocation]
geocoder=Nominatim
prefer_english_names=False

Binary file not shown.

Before

Width:  |  Height:  |  Size: 99 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

258
dozo.py
View File

@ -1,258 +0,0 @@
#!/usr/bin/env python
import os
import re
import sys
import logging
from datetime import datetime
import click
from send2trash import send2trash
from dozo import constants
from dozo import config
from dozo.filesystem import FileSystem
from dozo.database import Db
from dozo.media.media import Media, get_all_subclasses
from dozo.summary import Summary
FILESYSTEM = FileSystem()
def print_help(command):
click.echo(command.get_help(click.Context(sort)))
@click.command('batch')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
def _batch(debug):
"""Run batch() for all plugins.
"""
constants.debug = debug
plugins = Plugins()
plugins.run_batch()
def get_logger(verbose, debug):
if debug:
level = logging.DEBUG
elif verbose:
level = logging.INFO
else:
level = logging.WARNING
logging.basicConfig(format='%(levelname)s:%(message)s', level=level)
logging.debug('This message should appear on the console')
logging.info('So should this')
logging.getLogger('asyncio').setLevel(level)
logger = logging.getLogger('dozo')
logger.level = level
return logger
@click.command('sort')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
@click.option('--dry-run', default=False, is_flag=True,
help='Dry run only, no change made to the filesystem.')
@click.option('--destination', '-d', type=click.Path(file_okay=False),
default=None, help='Sort files into this directory.')
@click.option('--copy', '-c', default=False, is_flag=True,
help='True if you want files to be copied over from src_dir to\
dest_dir rather than moved')
@click.option('--exclude-regex', '-e', default=set(), multiple=True,
help='Regular expression for directories or files to exclude.')
@click.option('--filter-by-ext', '-f', default=set(), multiple=True, help='''Use filename
extension to filter files for sorting. If value is '*', use
common media file extension for filtering. Ignored files remain in
the same directory structure''' )
@click.option('--ignore-tags', '-i', default=set(), multiple=True,
help='Specific tags or group that will be ignored when\
searching for file data. Example \'File:FileModifyDate\' or \'Filename\'' )
@click.option('--max-deep', '-m', default=None,
help='Maximum level to proceed. Number from 0 to desired level.')
@click.option('--remove-duplicates', '-r', default=False, is_flag=True,
help='True to remove files that are exactly the same in name\
and a file hash')
@click.option('--reset-cache', '-R', default=False, is_flag=True,
help='Regenerate the hash.json and location.json database ')
@click.option('--verbose', '-v', default=False, is_flag=True,
help='True if you want to see details of file processing')
@click.argument('paths', required=True, nargs=-1, type=click.Path())
def _sort(debug, dry_run, destination, copy, exclude_regex, filter_by_ext, ignore_tags,
max_deep, remove_duplicates, reset_cache, verbose, paths):
"""Sort files or directories by reading their EXIF and organizing them
according to config.ini preferences.
"""
if copy:
mode = 'copy'
else:
mode = 'move'
logger = get_logger(verbose, debug)
if max_deep is not None:
max_deep = int(max_deep)
cache = True
if reset_cache:
cache = False
if not destination and paths:
destination = paths[-1]
paths = paths[0:-1]
else:
sys.exit(1)
paths = set(paths)
destination = os.path.abspath(os.path.expanduser(destination))
if not os.path.exists(destination):
logger.error(f'Directory {destination} does not exist')
conf = config.load_config(constants.CONFIG_FILE)
path_format = config.get_path_definition(conf)
# if no exclude list was passed in we check if there's a config
if len(exclude_regex) == 0:
if 'Exclusions' in conf:
exclude_regex = [value for key, value in conf.items('Exclusions')]
exclude_regex_list = set(exclude_regex)
# Initialize Db
db = Db(destination)
if 'Directory' in conf and 'day_begins' in conf['Directory']:
config_directory = conf['Directory']
day_begins = config_directory['day_begins']
else:
day_begins = 0
filesystem = FileSystem(cache, day_begins, dry_run, exclude_regex_list,
filter_by_ext, logger, max_deep, mode, path_format)
summary, has_errors = filesystem.sort_files(paths, destination, db,
remove_duplicates, ignore_tags)
if verbose or debug:
summary.write()
if has_errors:
sys.exit(1)
@click.command('generate-db')
@click.option('--path', type=click.Path(file_okay=False),
required=True, help='Path of your photo library.')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
def _generate_db(path, debug):
"""Regenerate the hash.json database which contains all of the sha256 signatures of media files. The hash.json file is located at ~/.dozo/.
"""
constants.debug = debug
result = Result()
path = os.path.abspath(os.path.expanduser(path))
if not os.path.isdir(path):
log.error('path is not a valid directory %s' % path)
sys.exit(1)
db = Db(path)
db.backup_hash_db()
db.reset_hash_db()
for current_file in FILESYSTEM.get_all_files(path):
result.append((current_file, True))
db.add_hash(db.checksum(current_file), current_file)
log.progress()
db.update_hash_db()
log.progress('', True)
result.write()
@click.command('verify')
@click.option('--path', type=click.Path(file_okay=False),
required=True, help='Path of your photo library.')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
def _verify(path, debug):
constants.debug = debug
result = Result()
db = Db(path)
for checksum, file_path in db.all():
if not os.path.isfile(file_path):
result.append((file_path, False))
log.progress('x')
continue
actual_checksum = db.checksum(file_path)
if checksum == actual_checksum:
result.append((file_path, True))
log.progress()
else:
result.append((file_path, False))
log.progress('x')
log.progress('', True)
result.write()
@click.command('compare')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
@click.option('--dry-run', default=False, is_flag=True,
help='Dry run only, no change made to the filesystem.')
@click.option('--find-duplicates', '-f', default=False, is_flag=True)
@click.option('--output-dir', '-o', default=False, is_flag=True, help='output\
dir')
@click.option('--remove-duplicates', '-r', default=False, is_flag=True)
@click.option('--revert-compare', '-R', default=False, is_flag=True, help='Revert\
compare')
@click.option('--similar-to', '-s', default=False, help='Similar to given\
image')
@click.option('--similarity', '-S', default=80, help='Similarity level for\
images')
@click.option('--verbose', '-v', default=False, is_flag=True,
help='True if you want to see details of file processing')
@click.argument('path', nargs=1, required=True)
def _compare(debug, dry_run, find_duplicates, output_dir, remove_duplicates,
revert_compare, similar_to, similarity, verbose, path):
'''Compare files in directories'''
logger = get_logger(verbose, debug)
# Initialize Db
db = Db(path)
filesystem = FileSystem(mode='move', dry_run=dry_run, logger=logger)
if revert_compare:
summary, has_errors = filesystem.revert_compare(path, db, dry_run)
else:
summary, has_errors = filesystem.sort_similar_images(path, db,
similarity)
if verbose or debug:
summary.write()
if has_errors:
sys.exit(1)
@click.group()
def main():
pass
main.add_command(_compare)
main.add_command(_sort)
main.add_command(_generate_db)
main.add_command(_verify)
main.add_command(_batch)
if __name__ == '__main__':
main()

View File

View File

@ -1,47 +0,0 @@
"""Load config file as a singleton."""
from configparser import RawConfigParser
from os import path
from dozo import constants
def write(conf_file, config):
with open(conf_file, 'w') as conf_file:
config.write(conf_file)
return True
return False
def load_config(file):
if not path.exists(file):
return {}
config = RawConfigParser()
config.read(file)
return config
def get_path_definition(config):
"""Returns a list of folder definitions.
Each element in the list represents a folder.
Fallback folders are supported and are nested lists.
:returns: string
"""
if 'Path' in config:
if 'format' in config['Path']:
return config['Path']['format']
elif 'dirs_path' and 'name' in config['Path']:
return config['Path']['dirs_path'] + '/' + config['Path']['name']
return constants.default_path + '/' + constants.default_name
def get_geocoder():
config = load_config(constants.CONFIG_FILE)
if 'Geolocation' in config and 'geocoder' in config['Geolocation']:
geocoder = config['Geolocation']['geocoder']
if geocoder in ('Nominatim', ):
return geocoder
return constants.default_geocoder

View File

@ -1,35 +0,0 @@
"""
Settings.
"""
from os import environ, path
from sys import version_info
#: If True, debug messages will be printed.
debug = False
#: Directory in which to store Dozo settings.
application_directory = '{}/.dozo'.format(path.expanduser('~'))
default_path = '{%Y-%m-%b}/{album}|{city}|{"Unknown Location"}'
default_name = '{%Y-%m-%d_%H-%M-%S}-{name}-{title}.%l{ext}'
default_geocoder = 'Nominatim'
# Checksum storage file.
hash_db = 'hash.json'
# TODO will be removed eventualy later
# hash_db = '{}/hash.json'.format(application_directory)
# Geolocation details file.
location_db = 'location.json'
# TODO will be removed eventualy later
# location_db = '{}/location.json'.format(application_directory)
# Dozo installation directory.
script_directory = path.dirname(path.dirname(path.abspath(__file__)))
#: Accepted language in responses from MapQuest
accepted_language = 'en'
# check python version, required in filesystem.py to trigger appropriate method
python_version = version_info.major
CONFIG_FILE = '%s/config.ini' % application_directory

View File

@ -1,200 +0,0 @@
"""
Methods for interacting with information Dozo caches about stored media.
"""
from builtins import map
from builtins import object
import json
import os
import sys
from math import radians, cos, sqrt
from shutil import copyfile
from time import strftime
from dozo import constants
class Db(object):
"""A class for interacting with the JSON files created by Dozo."""
def __init__(self, target_dir):
# verify that the application directory (~/.dozo) exists,
# else create it
# if not os.path.exists(constants.application_directory):
# os.makedirs(constants.application_directory)
# Create dir for target database
dirname = os.path.join(target_dir, '.dozo')
# Legacy dir
# dirname = constants.application_directory
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
except OSError:
pass
# self.hash_db = constants.hash_db
self.hash_db_file = os.path.join(dirname, constants.hash_db)
self.check_db(self.hash_db_file)
self.hash_db = {}
# We know from above that this file exists so we open it
# for reading only.
with open(self.hash_db_file, 'r') as f:
try:
self.hash_db = json.load(f)
except ValueError:
pass
# self.location_db_file = constants.location_db
self.location_db_file = os.path.join(dirname, constants.location_db)
self.check_db(self.location_db_file)
self.location_db = []
# We know from above that this file exists so we open it
# for reading only.
with open(self.location_db_file, 'r') as f:
try:
self.location_db = json.load(f)
except ValueError:
pass
def check_db(self, db_file):
'''Load db from file'''
# If the hash db doesn't exist we create it.
# Otherwise we only open for reading
if not os.path.isfile(db_file):
with open(db_file, 'a'):
os.utime(db_file, None)
def add_hash(self, key, value, write=False):
"""Add a hash to the hash db.
:param str key:
:param str value:
:param bool write: If true, write the hash db to disk.
"""
self.hash_db[key] = value
if(write is True):
self.update_hash_db()
# Location database
# Currently quite simple just a list of long/lat pairs with a name
# If it gets many entries a lookup might take too long and a better
# structure might be needed. Some speed up ideas:
# - Sort it and inter-half method can be used
# - Use integer part of long or lat as key to get a lower search list
# - Cache a small number of lookups, photos are likely to be taken in
# clusters around a spot during import.
def add_location(self, latitude, longitude, place, write=False):
"""Add a location to the database.
:param float latitude: Latitude of the location.
:param float longitude: Longitude of the location.
:param str place: Name for the location.
:param bool write: If true, write the location db to disk.
"""
data = {}
data['lat'] = latitude
data['long'] = longitude
data['name'] = place
self.location_db.append(data)
if(write is True):
self.update_location_db()
def backup_hash_db(self):
"""Backs up the hash db."""
# TODO
if os.path.isfile(self.hash_db_file):
mask = strftime('%Y-%m-%d_%H-%M-%S')
backup_file_name = '%s-%s' % (self.hash_db_file, mask)
copyfile(self.hash_db_file, backup_file_name)
return backup_file_name
def check_hash(self, key):
"""Check whether a hash is present for the given key.
:param str key:
:returns: bool
"""
return key in self.hash_db
def get_hash(self, key):
"""Get the hash value for a given key.
:param str key:
:returns: str or None
"""
if(self.check_hash(key) is True):
return self.hash_db[key]
return None
def get_location_name(self, latitude, longitude, threshold_m):
"""Find a name for a location in the database.
:param float latitude: Latitude of the location.
:param float longitude: Longitude of the location.
:param int threshold_m: Location in the database must be this close to
the given latitude and longitude.
:returns: str, or None if a matching location couldn't be found.
"""
last_d = sys.maxsize
name = None
for data in self.location_db:
# As threshold is quite small use simple math
# From http://stackoverflow.com/questions/15736995/how-can-i-quickly-estimate-the-distance-between-two-latitude-longitude-points # noqa
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = list(map(
radians,
[longitude, latitude, data['long'], data['lat']]
))
r = 6371000 # radius of the earth in m
x = (lon2 - lon1) * cos(0.5 * (lat2 + lat1))
y = lat2 - lat1
d = r * sqrt(x * x + y * y)
# Use if closer then threshold_km reuse lookup
if(d <= threshold_m and d < last_d):
name = data['name']
last_d = d
return name
def get_location_coordinates(self, name):
"""Get the latitude and longitude for a location.
:param str name: Name of the location.
:returns: tuple(float), or None if the location wasn't in the database.
"""
for data in self.location_db:
if data['name'] == name:
return (data['lat'], data['long'])
return None
def all(self):
"""Generator to get all entries from self.hash_db
:returns tuple(string)
"""
for checksum, path in self.hash_db.items():
yield (checksum, path)
def reset_hash_db(self):
self.hash_db = {}
def update_hash_db(self):
"""Write the hash db to disk."""
with open(self.hash_db_file, 'w') as f:
json.dump(self.hash_db, f)
def update_location_db(self):
"""Write the location db to disk."""
with open(self.location_db_file, 'w') as f:
json.dump(self.location_db, f)

View File

@ -1,786 +0,0 @@
"""
General file system methods.
"""
from builtins import object
import filecmp
import hashlib
import logging
import os
import pathlib
import re
import sys
import shutil
import time
from datetime import datetime, timedelta
from dozo import constants
from dozo import geolocation
from dozo.media.media import get_media_class, get_all_subclasses
from dozo.media.photo import Photo
from dozo.summary import Summary
class FileSystem(object):
"""A class for interacting with the file system."""
def __init__(self, cache=False, day_begins=0, dry_run=False, exclude_regex_list=set(),
filter_by_ext=(), logger=logging.getLogger(), max_deep=None,
mode='copy', path_format=None):
self.cache = cache
self.day_begins = day_begins
self.dry_run = dry_run
self.exclude_regex_list = exclude_regex_list
self.filter_by_ext = filter_by_ext
self.items = self.get_items()
self.logger = logger
self.max_deep = max_deep
self.mode = mode
# TODO have to be removed
if path_format:
self.path_format = path_format
else:
self.path_format = os.path.join(constants.default_path,
constants.default_name)
self.summary = Summary()
self.whitespace_regex = '[ \t\n\r\f\v]+'
def create_directory(self, directory_path):
"""Create a directory if it does not already exist.
:param str directory_name: A fully qualified path of the
to create.
:returns: bool
"""
try:
if os.path.exists(directory_path):
return True
else:
if not self.dry_run:
os.makedirs(directory_path)
self.logger.info(f'Create {directory_path}')
return True
except OSError:
# OSError is thrown for cases like no permission
pass
return False
def get_items(self):
return {
'album': '{album}',
'basename': '{basename}',
'camera_make': '{camera_make}',
'camera_model': '{camera_model}',
'city': '{city}',
'custom': '{".*"}',
'country': '{country}',
# 'folder': '{folder[<>]?[-+]?[1-9]?}',
'ext': '{ext}',
'folder': '{folder}',
'folders': r'{folders(\[[0-9:]{0,3}\])?}',
'location': '{location}',
'name': '{name}',
'original_name': '{original_name}',
'state': '{state}',
'title': '{title}',
'date': '{(%[a-zA-Z][^a-zA-Z]*){1,8}}' # search for date format string
}
def walklevel(self, src_path, maxlevel=None):
"""
Walk into input directory recursively until desired maxlevel
source: https://stackoverflow.com/questions/229186/os-walk-without-digging-into-directories-below
"""
src_path = src_path.rstrip(os.path.sep)
if not os.path.isdir(src_path):
return None
num_sep = src_path.count(os.path.sep)
for root, dirs, files in os.walk(src_path):
level = root.count(os.path.sep) - num_sep
yield root, dirs, files, level
if maxlevel is not None and level >= maxlevel:
del dirs[:]
def get_all_files(self, path, extensions=False, exclude_regex_list=set()):
"""Recursively get all files which match a path and extension.
:param str path string: Path to start recursive file listing
:param tuple(str) extensions: File extensions to include (whitelist)
:returns: generator
"""
if self.filter_by_ext != () and not extensions:
# Filtering files by extensions.
if '%media' in self.filter_by_ext:
extensions = set()
subclasses = get_all_subclasses()
for cls in subclasses:
extensions.update(cls.extensions)
else:
extensions = self.filter_by_ext
# Create a list of compiled regular expressions to match against the file path
compiled_regex_list = [re.compile(regex) for regex in exclude_regex_list]
for dirname, dirnames, filenames in os.walk(path):
if dirname == os.path.join(path, '.dozo'):
continue
for filename in filenames:
# If file extension is in `extensions`
# And if file path is not in exclude regexes
# Then append to the list
filename_path = os.path.join(dirname, filename)
if (
extensions == False
or os.path.splitext(filename)[1][1:].lower() in extensions
and not self.should_exclude(filename_path, compiled_regex_list, False)
):
yield filename_path
def check_for_early_morning_photos(self, date):
"""check for early hour photos to be grouped with previous day"""
if date.hour < self.day_begins:
self.logger.info('moving this photo to the previous day for\
classification purposes (day_begins=' + str(self.day_begins) + ')')
date = date - timedelta(hours=date.hour+1) # push it to the day before for classificiation purposes
return date
def get_location_part(self, mask, part, place_name):
"""Takes a mask for a location and interpolates the actual place names.
Given these parameters here are the outputs.
mask = 'city'
part = 'city-random'
place_name = {'city': u'Sunnyvale'}
return 'Sunnyvale'
mask = 'location'
part = 'location'
place_name = {'default': u'Sunnyvale', 'city': u'Sunnyvale'}
return 'Sunnyvale'
:returns: str
"""
folder_name = part
if(mask in place_name):
replace_target = mask
replace_with = place_name[mask]
else:
replace_target = part
replace_with = ''
folder_name = folder_name.replace(
replace_target,
replace_with,
)
return folder_name
def get_part(self, item, mask, metadata, db, subdirs):
"""Parse a specific folder's name given a mask and metadata.
:param item: Name of the item as defined in the path (i.e. date from %date)
:param mask: Mask representing the template for the path (i.e. %city %state
:param metadata: Metadata dictionary.
:returns: str
"""
# Each item has its own custom logic and we evaluate a single item and return
# the evaluated string.
part = ''
if item == 'basename':
part = os.path.basename(metadata['base_name'])
elif item == 'name':
# Remove date prefix added to the name.
part = metadata['base_name']
for i, rx in self.match_date_from_string(metadata['base_name']):
part = re.sub(rx, '', part)
elif item == 'date':
date = self.get_date_taken(metadata)
# early morning photos can be grouped with previous day
date = self.check_for_early_morning_photos(date)
if date is not None:
part = date.strftime(mask)
elif item in ('location', 'city', 'state', 'country'):
place_name = geolocation.place_name(
metadata['latitude'],
metadata['longitude'],
db,
self.cache,
self.logger
)
if item == 'location':
mask = 'default'
part = self.get_location_part(mask, item, place_name)
elif item == 'folder':
part = os.path.basename(subdirs)
elif item == 'folders':
folders = pathlib.Path(subdirs).parts
folders = eval(mask)
part = os.path.join(*folders)
elif item in ('album','camera_make', 'camera_model', 'ext',
'title'):
if metadata[item]:
part = metadata[item]
elif item == 'original_name':
# First we check if we have metadata['original_name'].
# We have to do this for backwards compatibility because
# we original did not store this back into EXIF.
if metadata[item]:
part = metadata['original_name']
elif item in 'custom':
# Fallback string
part = mask[1:-1]
return part
def get_path(self, metadata, db, subdirs='', whitespace_sub='_'):
"""path_format: {%Y-%d-%m}/%u{city}/{album}
Returns file path.
:returns: string"""
path_format = self.path_format
path = []
path_parts = path_format.split('/')
for path_part in path_parts:
this_parts = path_part.split('|')
# p = []
for this_part in this_parts:
# parts = ''
for item, regex in self.items.items():
matched = re.search(regex, this_part)
if matched:
# parts = re.split(mask, this_part)
# parts = this_part.split('%')[1:]
part = self.get_part(item, matched.group()[1:-1], metadata, db,
subdirs)
part = part.strip()
# Capitalization
u_regex = '%u' + regex
l_regex = '%l' + regex
if re.search(u_regex, this_part):
this_part = re.sub(u_regex, part.upper(), this_part)
elif re.search(l_regex, this_part):
this_part = re.sub(l_regex, part.lower(), this_part)
else:
this_part = re.sub(regex, part, this_part)
if this_part:
# Check if all masks are substituted
if True in [c in this_part for c in '{}']:
self.logger.error(f'Format path part invalid: \
{this_part}')
sys.exit(1)
path.append(this_part.strip())
# We break as soon as we have a value to append
break
# Else we continue for fallbacks
if(len(path[-1]) == 0):
path[-1] = metadata['base_name']
path_string = os.path.join(*path)
if whitespace_sub != ' ':
# Lastly we want to sanitize the name
path_string = re.sub(self.whitespace_regex, whitespace_sub, path_string)
return path_string
def match_date_from_string(self, string, user_regex=None):
if user_regex is not None:
matches = re.findall(user_regex, string)
else:
regex = {
# regex to match date format type %Y%m%d, %y%m%d, %d%m%Y,
# etc...
'a': re.compile(
r'.*[_-]?(?P<year>\d{4})[_-]?(?P<month>\d{2})[_-]?(?P<day>\d{2})[_-]?(?P<hour>\d{2})[_-]?(?P<minute>\d{2})[_-]?(?P<second>\d{2})'),
'b': re.compile (
r'[-_./](?P<year>\d{4})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})[-_./]'),
# not very accurate
'c': re.compile (
r'[-_./](?P<year>\d{2})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})[-_./]'),
'd': re.compile (
r'[-_./](?P<day>\d{2})[-_.](?P<month>\d{2})[-_.](?P<year>\d{4})[-_./]')
}
for i, rx in regex.items():
yield i, rx
def get_date_from_string(self, string, user_regex=None):
# If missing datetime from EXIF data check if filename is in datetime format.
# For this use a user provided regex if possible.
# Otherwise assume a filename such as IMG_20160915_123456.jpg as default.
matches = []
for i, rx in self.match_date_from_string(string, user_regex):
match = re.findall(rx, string)
if match != []:
if i == 'c':
match = [('20' + match[0][0], match[0][1], match[0][2])]
elif i == 'd':
# reorder items
match = [(match[0][2], match[0][1], match[0][0])]
# matches = match + matches
if len(match) != 1:
# The time string is not uniq
continue
matches.append((match[0], rx))
# We want only the first match for the moment
break
# check if there is only one result
if len(set(matches)) == 1:
try:
# Convert str to int
date_object = tuple(map(int, matches[0][0]))
time = False
if len(date_object) > 3:
time = True
date = datetime(*date_object)
except (KeyError, ValueError):
return None
return date
return None
def get_date_taken(self, metadata):
'''
Get the date taken from metadata or filename
:returns: datetime or None.
'''
if metadata is None:
return None
basename = metadata['base_name']
date_original = metadata['date_original']
if metadata['original_name'] is not None:
date_filename = self.get_date_from_string(metadata['original_name'])
else:
date_filename = self.get_date_from_string(basename)
date_created = metadata['date_created']
if metadata['date_original'] is not None:
if (date_filename is not None and
date_filename != date_original):
self.logger.warn(f"{basename} time mark is different from {date_original}")
# TODO ask for keep date taken, filename time, or neither
return metadata['date_original']
elif True:
if date_filename is not None:
if date_created is not None and date_filename > date_created:
self.logger.warn(f"{basename} time mark is more recent than {date_created}")
return date_filename
if True:
# TODO warm and ask for confirmation
if date_created is not None:
return date_created
elif metadata['date_modified'] is not None:
return metadata['date_modified']
def checksum(self, file_path, blocksize=65536):
"""Create a hash value for the given file.
See http://stackoverflow.com/a/3431835/1318758.
:param str file_path: Path to the file to create a hash for.
:param int blocksize: Read blocks of this size from the file when
creating the hash.
:returns: str or None
"""
hasher = hashlib.sha256()
with open(file_path, 'rb') as f:
buf = f.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(blocksize)
return hasher.hexdigest()
return None
def checkcomp(self, dest_path, src_checksum):
"""Check file.
"""
# src_checksum = self.checksum(src_path)
if self.dry_run:
return src_checksum
dest_checksum = self.checksum(dest_path)
if dest_checksum != src_checksum:
self.logger.info(f'Source checksum and destination checksum are not the same')
return False
return src_checksum
def sort_file(self, src_path, dest_path, remove_duplicates=True):
'''Copy or move file to dest_path.'''
mode = self.mode
dry_run = self.dry_run
# check for collisions
if(src_path == dest_path):
self.logger.info(f'File {dest_path} already sorted')
return True
if os.path.isfile(dest_path):
self.logger.info(f'File {dest_path} already exist')
if remove_duplicates:
if filecmp.cmp(src_path, dest_path):
self.logger.info(f'File in source and destination are identical. Duplicate will be ignored.')
if(mode == 'move'):
if not dry_run:
os.remove(src_path)
self.logger.info(f'remove: {src_path}')
return True
else: # name is same, but file is different
self.logger.info(f'File in source and destination are different.')
return False
else:
return False
else:
if(mode == 'move'):
if not dry_run:
# Move the processed file into the destination directory
shutil.move(src_path, dest_path)
self.logger.info(f'move: {src_path} -> {dest_path}')
elif mode == 'copy':
if not dry_run:
shutil.copy2(src_path, dest_path)
self.logger.info(f'copy: {src_path} -> {dest_path}')
return True
return False
def check_file(self, src_path, dest_path, src_checksum, db):
# Check if file remain the same
checksum = self.checkcomp(dest_path, src_checksum)
has_errors = False
if checksum:
if not self.dry_run:
db.add_hash(checksum, dest_path)
db.update_hash_db()
if dest_path:
self.logger.info(f'{src_path} -> {dest_path}')
self.summary.append((src_path, dest_path))
else:
self.logger.error(f'Files {src_path} and {dest_path} are not identical')
# sys.exit(1)
self.summary.append((src_path, False))
has_errors = True
return self.summary, has_errors
def get_files_in_path(self, path, extensions=False):
"""Recursively get files which match a path and extension.
:param str path string: Path to start recursive file listing
:param tuple(str) extensions: File extensions to include (whitelist)
:returns: file_path, subdirs
"""
if self.filter_by_ext != () and not extensions:
# Filtering files by extensions.
if '%media' in self.filter_by_ext:
extensions = set()
subclasses = get_all_subclasses()
for cls in subclasses:
extensions.update(cls.extensions)
else:
extensions = self.filter_by_ext
file_list = set()
if os.path.isfile(path):
if not self.should_exclude(path, self.exclude_regex_list, True):
file_list.add((path, ''))
# Create a list of compiled regular expressions to match against the file path
compiled_regex_list = [re.compile(regex) for regex in self.exclude_regex_list]
subdirs = ''
for dirname, dirnames, filenames, level in self.walklevel(path,
self.max_deep):
if dirname == os.path.join(path, '.dozo'):
continue
subdirs = os.path.join(subdirs, os.path.basename(dirname))
for filename in filenames:
# If file extension is in `extensions`
# And if file path is not in exclude regexes
# Then append to the list
filename_path = os.path.join(dirname, filename)
if (
extensions == False
or os.path.splitext(filename)[1][1:].lower() in extensions
and not self.should_exclude(filename_path, compiled_regex_list, False)
):
file_list.add((filename_path, subdirs))
return file_list
def sort_files(self, paths, destination, db, remove_duplicates=False,
ignore_tags=set()):
"""
Sort files into appropriate folder
"""
has_errors = False
for path in paths:
# some error checking
if not os.path.exists(path):
self.logger.error(f'Directory {path} does not exist')
path = os.path.expanduser(path)
conflict_file_list = set()
for src_path, subdirs in self.get_files_in_path(path):
# Process files
src_checksum = self.checksum(src_path)
media = get_media_class(src_path, ignore_tags, self.logger)
if media:
metadata = media.get_metadata()
# Get the destination path according to metadata
file_path = self.get_path(metadata, db, subdirs=subdirs)
else:
# Keep same directory structure
file_path = os.path.relpath(src_path, path)
dest_directory = os.path.join(destination,
os.path.dirname(file_path))
dest_path = os.path.join(destination, file_path)
self.create_directory(dest_directory)
result = self.sort_file(src_path, dest_path, remove_duplicates)
if result:
self.summary, has_errors = self.check_file(src_path,
dest_path, src_checksum, db)
else:
# There is conflict files
conflict_file_list.add((src_path, dest_path))
for src_path, dest_path in conflict_file_list:
# Try to sort the file
result = self.sort_file(src_path, dest_path, remove_duplicates)
if result:
conflict_file_list.remove((src_path, dest_path))
else:
n = 1
while not result:
# Add appendix to the name
pre, ext = os.path.splitext(dest_path)
dest_path = pre + '_' + str(n) + ext
result = self.sort_file(src_path, dest_path, remove_duplicates)
if n > 100:
self.logger.error(f'{self.mode}: to many append for {dest_path}...')
break
self.logger.info(f'Same name already exists...renaming to: {dest_path}')
if result:
self.summary, has_errors = self.check_file(src_path,
dest_path, src_checksum, db)
else:
self.summary.append((src_path, False))
has_errors = True
return self.summary, has_errors
def check_path(self, path):
path = os.path.abspath(os.path.expanduser(path))
# some error checking
if not os.path.exists(path):
self.logger.error(f'Directory {path} does not exist')
sys.exit(1)
return path
def set_hash(self, result, src_path, dest_path, src_checksum, db):
if result:
# Check if file remain the same
result = self.checkcomp(dest_path, src_checksum)
has_errors = False
if result:
if not self.dry_run:
db.add_hash(checksum, dest_path)
db.update_hash_db()
if dest_path:
self.logger.info(f'{src_path} -> {dest_path}')
self.summary.append((src_path, dest_path))
else:
self.logger.error(f'Files {src_path} and {dest_path} are not identical')
# sys.exit(1)
self.summary.append((src_path, False))
has_errors = True
else:
self.summary.append((src_path, False))
has_errors = True
return has_errors
def move_file(self, img_path, dest_path, checksum, db):
if not self.dry_run:
try:
shutil.move(img_path, dest_path)
except OSError as error:
self.logger.error(error)
self.logger.info(f'move: {img_path} -> {dest_path}')
return self.set_hash(True, img_path, dest_path, checksum, db)
def sort_similar_images(self, path, db, similarity=80):
has_errors = False
path = self.check_path(path)
for dirname, dirnames, filenames, level in self.walklevel(path, None):
if dirname == os.path.join(path, '.dozo'):
continue
if dirname.find('similar_to') == 0:
continue
file_paths = set()
for filename in filenames:
file_paths.add(os.path.join(dirname, filename))
photo = Photo(logger=self.logger)
images = set([ i for i in photo.get_images(file_paths) ])
for image in images:
if not os.path.isfile(image):
continue
checksum1 = self.checksum(image)
# Process files
# media = get_media_class(src_path, False, self.logger)
# TODO compare metadata
# if media:
# metadata = media.get_metadata()
similar = False
moved_imgs = set()
for img_path in photo.find_similar(image, file_paths, similarity):
similar = True
checksum2 = self.checksum(img_path)
# move image into directory
name = os.path.splitext(os.path.basename(image))[0]
directory_name = 'similar_to_' + name
dest_directory = os.path.join(os.path.dirname(img_path),
directory_name)
dest_path = os.path.join(dest_directory, os.path.basename(img_path))
result = self.create_directory(dest_directory)
# Move the simlars file into the destination directory
if result:
result = self.move_file(img_path, dest_path, checksum2, db)
moved_imgs.add(img_path)
if not result:
has_errors = True
else:
has_errors = True
if similar:
dest_path = os.path.join(dest_directory,
os.path.basename(image))
result = self.move_file(image, dest_path, checksum1, db)
moved_imgs.add(image)
if not result:
has_errors = True
# for moved_img in moved_imgs:
# os.remove(moved_img)
return self.summary, has_errors
def revert_compare(self, path, db):
has_errors = False
path = self.check_path(path)
for dirname, dirnames, filenames, level in self.walklevel(path, None):
if dirname == os.path.join(path, '.dozo'):
continue
if dirname.find('similar_to') == 0:
continue
for subdir in dirnames:
if subdir.find('similar_to') == 0:
file_names = os.listdir(os.path.abspath(os.path.join(dirname, subdir)))
for file_name in file_names:
# move file to initial folder
img_path = os.path.join(dirname, subdir, file_name)
if os.path.isdir(img_path):
continue
checksum = self.checksum(img_path)
dest_path = os.path.join(dirname, os.path.basename(img_path))
result = self.move_file(img_path, dest_path, checksum, db)
if not result:
has_errors = True
# remove directory
try:
os.rmdir(os.path.join (dirname, subdir))
except OSError as error:
self.logger.error(error)
return self.summary, has_errors
def set_utime_from_metadata(self, date_taken, file_path):
""" Set the modification time on the file based on the file name.
"""
# Initialize date taken to what's returned from the metadata function.
os.utime(file_path, (int(datetime.now().timestamp()), int(date_taken.timestamp())))
def should_exclude(self, path, regex_list=set(), needs_compiled=False):
if(len(regex_list) == 0):
return False
if(needs_compiled):
compiled_list = []
for regex in regex_list:
compiled_list.append(re.compile(regex))
regex_list = compiled_list
return any(regex.search(path) for regex in regex_list)

View File

@ -1,157 +0,0 @@
"""Look up geolocation information for media objects."""
from past.utils import old_div
from os import path
import geopy
from geopy.geocoders import Nominatim
import logging
from dozo import constants
from dozo.config import load_config, get_geocoder
__KEY__ = None
__DEFAULT_LOCATION__ = 'Unknown Location'
__PREFER_ENGLISH_NAMES__ = None
def coordinates_by_name(name, db):
# Try to get cached location first
cached_coordinates = db.get_location_coordinates(name)
if(cached_coordinates is not None):
return {
'latitude': cached_coordinates[0],
'longitude': cached_coordinates[1]
}
# If the name is not cached then we go ahead with an API lookup
geocoder = get_geocoder()
if geocoder == 'Nominatim':
locator = Nominatim(user_agent='myGeocoder')
geolocation_info = locator.geocode(name)
if geolocation_info is not None:
return {
'latitude': geolocation_info.latitude,
'longitude': geolocation_info.longitude
}
else:
raise NameError(geocoder)
return None
def decimal_to_dms(decimal):
decimal = float(decimal)
decimal_abs = abs(decimal)
minutes, seconds = divmod(decimal_abs*3600, 60)
degrees, minutes = divmod(minutes, 60)
degrees = degrees
sign = 1 if decimal >= 0 else -1
return (degrees, minutes, seconds, sign)
def dms_to_decimal(degrees, minutes, seconds, direction=' '):
sign = 1
if(direction[0] in 'WSws'):
sign = -1
return (
float(degrees) + old_div(float(minutes), 60) +
old_div(float(seconds), 3600)
) * sign
def dms_string(decimal, type='latitude'):
# Example string -> 38 deg 14' 27.82" S
dms = decimal_to_dms(decimal)
if type == 'latitude':
direction = 'N' if decimal >= 0 else 'S'
elif type == 'longitude':
direction = 'E' if decimal >= 0 else 'W'
return '{} deg {}\' {}" {}'.format(dms[0], dms[1], dms[2], direction)
def get_prefer_english_names():
global __PREFER_ENGLISH_NAMES__
if __PREFER_ENGLISH_NAMES__ is not None:
return __PREFER_ENGLISH_NAMES__
config = load_config(constants.CONFIG_FILE)
if('prefer_english_names' not in config['Geolocation']):
return False
__PREFER_ENGLISH_NAMES__ = bool(config['Geolocation']['prefer_english_names'])
return __PREFER_ENGLISH_NAMES__
def place_name(lat, lon, db, cache=True, logger=logging.getLogger()):
lookup_place_name_default = {'default': __DEFAULT_LOCATION__}
if(lat is None or lon is None):
return lookup_place_name_default
# Convert lat/lon to floats
if(not isinstance(lat, float)):
lat = float(lat)
if(not isinstance(lon, float)):
lon = float(lon)
# Try to get cached location first
# 3km distace radious for a match
cached_place_name = None
if cache:
cached_place_name = db.get_location_name(lat, lon, 3000)
# We check that it's a dict to coerce an upgrade of the location
# db from a string location to a dictionary. See gh-160.
if(isinstance(cached_place_name, dict)):
return cached_place_name
lookup_place_name = {}
geocoder = get_geocoder()
if geocoder == 'Nominatim':
geolocation_info = lookup_osm(lat, lon, logger)
else:
raise NameError(geocoder)
if(geolocation_info is not None and 'address' in geolocation_info):
address = geolocation_info['address']
# gh-386 adds support for town
# taking precedence after city for backwards compatability
for loc in ['city', 'town', 'village', 'state', 'country']:
if(loc in address):
lookup_place_name[loc] = address[loc]
# In many cases the desired key is not available so we
# set the most specific as the default.
if('default' not in lookup_place_name):
lookup_place_name['default'] = address[loc]
if(lookup_place_name):
db.add_location(lat, lon, lookup_place_name)
# TODO: Maybe this should only be done on exit and not for every write.
db.update_location_db()
if('default' not in lookup_place_name):
lookup_place_name = lookup_place_name_default
return lookup_place_name
def lookup_osm(lat, lon, logger=logging.getLogger()):
prefer_english_names = get_prefer_english_names()
try:
locator = Nominatim(user_agent='myGeocoder')
coords = (lat, lon)
if(prefer_english_names):
lang='en'
else:
lang='local'
return locator.reverse(coords, language=lang).raw
except geopy.exc.GeocoderUnavailable as e:
logger.error(e)
return None
# Fix *** TypeError: `address` must not be None
except (TypeError, ValueError) as e:
logger.error(e)
return None

View File

View File

@ -1,36 +0,0 @@
"""
The audio module contains classes specifically for dealing with audio files.
The :class:`Audio` class inherits from the :class:`~dozo.media.Media`
class.
.. moduleauthor:: Jaisen Mathai <jaisen@jmathai.com>
"""
import os
from .media import Media
class Audio(Media):
"""An audio object.
:param str source: The fully qualified path to the audio file.
"""
__name__ = 'Audio'
#: Valid extensions for audio files.
extensions = ('m4a',)
def __init__(self, source=None, ignore_tags=set()):
super().__init__(source, ignore_tags=set())
def is_valid(self):
"""Check the file extension against valid file extensions.
The list of valid file extensions come from self.extensions.
:returns: bool
"""
source = self.source
return os.path.splitext(source)[1][1:].lower() in self.extensions

View File

@ -1,349 +0,0 @@
"""
Base :class:`Media` class for media objects that are tracked by Dozo.
The Media class provides some base functionality used by all the media types.
Sub-classes (:class:`~dozo.media.Audio`, :class:`~dozo.media.Photo`, and :class:`~dozo.media.Video`).
"""
import mimetypes
import os
import six
import logging
# load modules
from dateutil.parser import parse
import re
from dozo.exiftool import ExifTool, ExifToolCaching
class Media():
"""The media class for all media objects.
:param str source: The fully qualified path to the video file.
"""
__name__ = 'Media'
d_coordinates = {
'latitude': 'latitude_ref',
'longitude': 'longitude_ref'
}
PHOTO = ('arw', 'cr2', 'dng', 'gif', 'heic', 'jpeg', 'jpg', 'nef', 'png', 'rw2')
AUDIO = ('m4a',)
VIDEO = ('avi', 'm4v', 'mov', 'mp4', 'mpg', 'mpeg', '3gp', 'mts')
extensions = PHOTO + AUDIO + VIDEO
def __init__(self, sources=None, ignore_tags=set(), logger=logging.getLogger()):
self.source = sources
self.ignore_tags = ignore_tags
self.tags_keys = self.get_tags()
self.exif_metadata = None
self.metadata = None
self.logger = logger
def get_tags(self):
tags_keys = {}
tags_keys['date_original'] = [
'EXIF:DateTimeOriginal',
'H264:DateTimeOriginal',
'QuickTime:ContentCreateDate'
]
tags_keys['date_created'] = [
'EXIF:CreateDate',
'QuickTime:CreationDate',
'QuickTime:CreateDate',
'QuickTime:CreationDate-und-US',
'QuickTime:MediaCreateDate'
]
tags_keys['date_modified'] = [
'File:FileModifyDate',
'QuickTime:ModifyDate'
]
tags_keys['camera_make'] = ['EXIF:Make', 'QuickTime:Make']
tags_keys['camera_model'] = ['EXIF:Model', 'QuickTime:Model']
tags_keys['album'] = ['XMP-xmpDM:Album', 'XMP:Album']
tags_keys['title'] = ['XMP:Title', 'XMP:DisplayName']
tags_keys['latitude'] = [
'EXIF:GPSLatitude',
'XMP:GPSLatitude',
# 'QuickTime:GPSLatitude',
'Composite:GPSLatitude'
]
tags_keys['longitude'] = [
'EXIF:GPSLongitude',
'XMP:GPSLongitude',
# 'QuickTime:GPSLongitude',
'Composite:GPSLongitude'
]
tags_keys['latitude_ref'] = ['EXIF:GPSLatitudeRef']
tags_keys['longitude_ref'] = ['EXIF:GPSLongitudeRef']
tags_keys['original_name'] = ['XMP:OriginalFileName']
# Remove ignored tag from list
for tag_regex in self.ignore_tags:
ignored_tags = set()
for key, tags in tags_keys.items():
for n, tag in enumerate(tags):
if re.match(tag_regex, tag):
del(tags_keys[key][n])
return tags_keys
def _del_ignored_tags(self, exif_metadata):
for tag_regex in self.ignore_tags:
ignored_tags = set()
for tag in exif_metadata:
if re.search(tag_regex, tag) is not None:
ignored_tags.add(tag)
for ignored_tag in ignored_tags:
del exif_metadata[ignored_tag]
def get_mimetype(self):
"""Get the mimetype of the file.
:returns: str or None
"""
mimetype = mimetypes.guess_type(self.source)
if(mimetype is None):
return None
return mimetype[0]
def _get_key_values(self, key):
"""Get the first value of a tag set
:returns: str or None if no exif tag
"""
if self.exif_metadata is None:
return None
for tag in self.tags_keys[key]:
if tag in self.exif_metadata:
yield self.exif_metadata[tag]
def get_value(self, tag):
"""Get given value from EXIF.
:returns: str or None
"""
exiftool_attributes = self.get_exiftool_attributes()
if exiftool_attributes is None:
return None
if(tag not in exiftool_attributes):
return None
return exiftool_attributes[tag]
def get_date_format(self, value):
"""Formate date attribute.
:returns: datetime object or None
"""
# We need to parse a string to datetime format.
# EXIF DateTimeOriginal and EXIF DateTime are both stored
# in %Y:%m:%d %H:%M:%S format
if value is None:
return None
try:
# correct nasty formated date
regex = re.compile(r'(\d{4}):(\d{2}):(\d{2})')
if(re.match(regex , value) is not None): # noqa
value = re.sub(regex , r'\g<1>-\g<2>-\g<3>', value)
return parse(value)
except BaseException or dateutil.parser._parser.ParserError as e:
self.logger.error(e)
return None
def get_coordinates(self, key, value):
"""Get latitude or longitude value
:param str key: Type of coordinate to get. Either "latitude" or
"longitude".
:returns: float or None
"""
if value is None:
return None
if isinstance(value, str) and len(value) == 0:
# If exiftool GPS output is empty, the data returned will be a str
# with 0 length.
# https://github.com/jmathai/elodie/issues/354
return None
# Cast coordinate to a float due to a bug in exiftool's
# -json output format.
# https://github.com/jmathai/elodie/issues/171
# http://u88.n24.queensu.ca/exiftool/forum/index.php/topic,7952.0.html # noqa
this_coordinate = float(value)
direction_multiplier = 1.0
# when self.set_gps_ref != True
if key == 'latitude':
if 'EXIF:GPSLatitudeRef' in self.exif_metadata:
if self.exif_metadata['EXIF:GPSLatitudeRef'] == 'S':
direction_multiplier = -1.0
elif key == 'longitude':
if 'EXIF:GPSLongitudeRef' in self.exif_metadata:
if self.exif_metadata['EXIF:GPSLongitudeRef'] == 'W':
direction_multiplier = -1.0
return this_coordinate * direction_multiplier
return None
def get_metadata(self):
"""Get a dictionary of metadata from exif.
All keys will be present and have a value of None if not obtained.
:returns: dict
"""
# Get metadata from exiftool.
self.exif_metadata = ExifToolCaching(self.source, logger=self.logger).asdict()
# TODO to be removed
self.metadata = {}
# Retrieve selected metadata to dict
if not self.exif_metadata:
return self.metadata
for key in self.tags_keys:
formated_data = None
for value in self._get_key_values(key):
if 'date' in key:
formated_data = self.get_date_format(value)
elif key in ('latitude', 'longitude'):
formated_data = self.get_coordinates(key, value)
else:
if value is not None and value != '':
formated_data = value
else:
formated_data = None
if formated_data:
# Use this data and break
break
self.metadata[key] = formated_data
self.metadata['base_name'] = os.path.basename(os.path.splitext(self.source)[0])
self.metadata['ext'] = os.path.splitext(self.source)[1][1:]
self.metadata['directory_path'] = os.path.dirname(self.source)
return self.metadata
def has_exif_data(self):
"""Check if file has metadata, date original"""
if not self.metadata:
return False
if 'date_original' in self.metadata:
if self.metadata['date_original'] != None:
return True
return False
@classmethod
def get_class_by_file(cls, _file, classes, ignore_tags=set(), logger=logging.getLogger()):
"""Static method to get a media object by file.
"""
basestring = (bytes, str)
if not isinstance(_file, basestring) or not os.path.isfile(_file):
return None
extension = os.path.splitext(_file)[1][1:].lower()
if len(extension) > 0:
for i in classes:
if(extension in i.extensions):
return i(_file, ignore_tags=ignore_tags)
exclude_list = ['.DS_Store', '.directory']
if os.path.basename(_file) == '.DS_Store':
return None
else:
return Media(_file, ignore_tags=ignore_tags, logger=logger)
def set_date_taken(self, date_key, time):
"""Set the date/time a photo was taken.
:param datetime time: datetime object of when the photo was taken
:returns: bool
"""
if(time is None):
return False
formatted_time = time.strftime('%Y:%m:%d %H:%M:%S')
status = self.set_value('date_original', formatted_time)
if status == False:
# exif attribute date_original d'ont exist
status = self.set_value('date_created', formatted_time)
return status
def set_coordinates(self, latitude, longitude):
status = []
if self.metadata['latitude_ref']:
latitude = abs(latitude)
if latitude > 0:
status.append(self.set_value('latitude_ref', 'N'))
else:
status.append(self.set_value('latitude_ref', 'S'))
status.append(self.set_value('latitude', latitude))
if self.metadata['longitude_ref']:
longitude = abs(longitude)
if longitude > 0:
status.append(self.set_value('latitude_ref', 'E'))
else:
status.append(self.set_value('longitude_ref', 'W'))
status.append(self.set_value('longitude', longitude))
if all(status):
return True
else:
return False
def set_album_from_folder(self, path):
"""Set the album attribute based on the leaf folder name
:returns: bool
"""
folder = os.path.basename(os.path.dirname(self.source))
return set_value(self, 'album', folder)
def get_all_subclasses(cls=None):
"""Module method to get all subclasses of Media.
"""
subclasses = set()
this_class = Media
if cls is not None:
this_class = cls
subclasses.add(this_class)
this_class_subclasses = this_class.__subclasses__()
for child_class in this_class_subclasses:
subclasses.update(get_all_subclasses(child_class))
return subclasses
def get_media_class(_file, ignore_tags=set(), logger=logging.getLogger()):
if not os.path.exists(_file):
logger.warning(f'Could not find {_file}')
logger.error(f'Could not find {_file}')
return False
media = Media.get_class_by_file(_file, get_all_subclasses(),
ignore_tags=set(), logger=logger)
if not media:
logger.warning(f'File{_file} is not supported')
logger.error(f'File {_file} can\'t be imported')
return False
return media

View File

@ -1,158 +0,0 @@
"""
The photo module contains the :class:`Photo` class, which is used to track
image objects (JPG, DNG, etc.).
.. moduleauthor:: Jaisen Mathai <jaisen@jmathai.com>
"""
import imagehash
import imghdr
import logging
import numpy as np
import os
from PIL import Image, UnidentifiedImageError
import time
from .media import Media
class Photo(Media):
"""A photo object.
:param str source: The fully qualified path to the photo file
"""
__name__ = 'Photo'
#: Valid extensions for photo files.
extensions = ('arw', 'cr2', 'dng', 'gif', 'heic', 'jpeg', 'jpg', 'nef', 'png', 'rw2')
def __init__(self, source=None, hash_size=8, ignore_tags=set(),
logger=logging.getLogger()):
super().__init__(source, ignore_tags)
self.hash_size = hash_size
self.logger = logger
logger.setLevel(logging.INFO)
# HEIC extension support (experimental, not tested)
self.pyheif = False
try:
from pyheif_pillow_opener import register_heif_opener
self.pyheif = True
# Allow to open HEIF/HEIC images from pillow
register_heif_opener()
except ImportError as e:
self.logger.info(e)
def is_image(self, img_path):
"""Check whether the file is an image.
:returns: bool
"""
# gh-4 This checks if the source file is an image.
# It doesn't validate against the list of supported types.
# We check with imghdr and pillow.
if imghdr.what(img_path) is None:
# Pillow is used as a fallback
# imghdr won't detect all variants of images (https://bugs.python.org/issue28591)
# see https://github.com/jmathai/elodie/issues/281
# before giving up, we use `pillow` imaging library to detect file type
#
# It is important to note that the library doesn't decode or load the
# raster data unless it really has to. When you open a file,
# the file header is read to determine the file format and extract
# things like mode, size, and other properties required to decode the file,
# but the rest of the file is not processed until later.
try:
im = Image.open(img_path)
except (IOError, UnidentifiedImageError):
return False
if(im.format is None):
return False
return True
def get_images(self, file_paths):
'''
:returns: img_path generator
'''
for img_path in file_paths:
if self.is_image(img_path):
yield img_path
def get_images_hashes(self, file_paths):
"""Get image hashes"""
hashes = {}
duplicates = []
# Searching for duplicates.
for img_path in self.get_images(file_paths):
with Image.open(img_path) as img:
yield imagehash.average_hash(img, self.hash_size)
def find_duplicates(self, file_paths):
"""Find duplicates"""
for temp_hash in get_images_hashes(file_paths):
if temp_hash in hashes:
self.logger.info("Duplicate {} \nfound for image {}\n".format(img_path, hashes[temp_hash]))
duplicates.append(img_path)
else:
hashes[temp_hash] = img_path
return duplicates
def remove_duplicates(self, duplicates):
for duplicate in duplicates:
try:
os.remove(duplicate)
except OSError as error:
self.logger.error(error)
def remove_duplicates_interactive(self, duplicates):
if len(duplicates) != 0:
answer = input(f"Do you want to delete these {duplicates} images? Y/n: ")
if(answer.strip().lower() == 'y'):
self.remove_duplicates(duplicates)
self.logger.info(f'{duplicate} deleted successfully!')
else:
self.logger.info("No duplicates found")
def get_hash(self, img_path):
with Image.open(img_path) as img:
return imagehash.average_hash(img, self.hash_size).hash
def diff(self, hash1, hash2):
return np.count_nonzero(hash1 != hash2)
def similarity(self, img_diff):
threshold_img = img_diff / (self.hash_size**2)
similarity_img = round((1 - threshold_img) * 100)
return similarity_img
def find_similar(self, image, file_paths, similarity=80):
'''
Find similar images
:returns: img_path generator
'''
hash1 = ''
if self.is_image(image):
hash1 = self.get_hash(image)
self.logger.info(f'Finding similar images to {image}')
threshold = 1 - similarity/100
diff_limit = int(threshold*(self.hash_size**2))
for img_path in self.get_images(file_paths):
if img_path == image:
continue
hash2 = self.get_hash(img_path)
img_diff = self.diff(hash1, hash2)
if img_diff <= diff_limit:
similarity_img = self.similarity(img_diff)
self.logger.info(f'{img_path} image found {similarity_img}% similar to {image}')
yield img_path

View File

@ -1,43 +0,0 @@
"""
The video module contains the :class:`Video` class, which represents video
objects (AVI, MOV, etc.).
.. moduleauthor:: Jaisen Mathai <jaisen@jmathai.com>
"""
# load modules
from datetime import datetime
import os
import re
import time
from .media import Media
class Video(Media):
"""A video object.
:param str source: The fully qualified path to the video file.
"""
__name__ = 'Video'
#: Valid extensions for video files.
extensions = ('avi', 'm4v', 'mov', 'mp4', 'mpg', 'mpeg', '3gp', 'mts')
def __init__(self, source=None, ignore_tags=set()):
super().__init__(source, ignore_tags=set())
# self.set_gps_ref = False
def is_valid(self):
"""Check the file extension against valid file extensions.
The list of valid file extensions come from self.extensions.
:returns: bool
"""
source = self.source
return os.path.splitext(source)[1][1:].lower() in self.extensions

View File

@ -1,40 +0,0 @@
from tabulate import tabulate
class Summary(object):
def __init__(self):
self.records = []
self.success = 0
self.error = 0
self.error_items = []
def append(self, row):
id, status = row
if status:
self.success += 1
else:
self.error += 1
self.error_items.append(id)
def write(self):
if self.error > 0:
error_headers = ["File"]
error_result = []
for id in self.error_items:
error_result.append([id])
print('Errors details:')
print(tabulate(error_result, headers=error_headers))
print("\n")
headers = ["Metric", "Count"]
result = [
["Success", self.success],
["Error", self.error],
]
print()
print('Summary:')
print(tabulate(result, tablefmt="plain"))

View File

@ -1,33 +0,0 @@
# -*- mode: python -*-
block_cipher = None
a = Analysis(['elodie.py'],
pathex=['/Users/jaisenmathai/dev/tools/elodie'],
binaries=None,
datas=[('configs/ExifTool_config', 'configs')],
hiddenimports=[],
hookspath=None,
runtime_hooks=None,
excludes=None,
win_no_prefer_redirects=None,
win_private_assemblies=None,
cipher=block_cipher)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
exclude_binaries=True,
name='elodie',
debug=False,
strip=None,
upx=True,
console=True )
coll = COLLECT(exe,
a.binaries,
a.zipfiles,
a.datas,
strip=None,
upx=True,
name='elodie')

35
ordigi.conf Normal file
View File

@ -0,0 +1,35 @@
[Exif]
#album_from_folder=False
fill_date_original=True
#cache=True
#ignore_tags=None
use_date_filename=True
#use_file_dates=False
[Filters]
exclude=["**/.directory", "**/.DS_Store"]
#extensions=None
#glob=**/*
#max_deep=None
remove_duplicates=True
[Geolocation]
geocoder=Nominatim
prefer_english_names=False
timeout=1
[Path]
# day_begins: what hour of the day you want the day to begin (only for
# classification purposes). Defaults at 0 as midnight. Can be
# used to group early morning photos with the previous day. Must
# be a number between 0-23')
day_begins=4
# Path format
dirs_path=<%Y>/<%m-%b>_<location>_<folder>
name=<%Y%m%d-%H%M%S>_<<name>.%l<ext>|<original_name>>
# name=<%Y%m%d-%H%M%S>-%u<original_name>.%l<ext>
[Terminal]
dry_run=False
interactive=False

3
ordigi/__init__.py Normal file
View File

@ -0,0 +1,3 @@
from ordigi import log
LOG = log.get_logger('ordigi')

628
ordigi/cli.py Executable file
View File

@ -0,0 +1,628 @@
#!/usr/bin/env python
from pathlib import Path
import sys
import click
from ordigi import log, LOG
from ordigi.collection import Collection
from ordigi import constants
from ordigi.geolocation import GeoLocation
from ordigi import utils
_logger_options = [
click.option(
'--quiet',
'-q',
default=False,
is_flag=True,
help='Log level set to ERROR',
),
click.option(
'--verbose',
'-v',
default=False,
is_flag=True,
help='Log level set to INFO',
),
click.option(
'--debug',
'-d',
default=False,
is_flag=True,
help='Log level set to DEBUG',
),
]
_input_options = [
click.option(
'--interactive', '-i', default=False, is_flag=True, help="Interactive mode"
),
]
_dry_run_options = [
click.option(
'--dry-run',
default=False,
is_flag=True,
help='Dry run only, no change made to the filesystem.',
),
]
_exclude_options = [
click.option(
'--exclude',
'-E',
default=None,
multiple=True,
help='Directories or files to exclude.',
),
]
_filter_options = [
click.option(
'--ext',
'-e',
default=None,
multiple=True,
help="""Use filename
extension to filter files for sorting. If value is '*', use
common media file extension for filtering. Ignored files remain in
the same directory structure""",
),
click.option(
'--ignore-tags',
'-I',
default=None,
multiple=True,
help='Specific tags or group that will be ignored when\
searching for file data. Example \'File:FileModifyDate\' or \'Filename\'',
),
click.option('--glob', '-g', default='**/*', help='Glob file selection'),
]
_sort_options = [
click.option(
'--album-from-folder',
'-a',
default=False,
is_flag=True,
help="Use images' folders as their album names.",
),
click.option(
'--fill-date-original',
'-O',
default=False,
is_flag=True,
help="Fill date original from date media if not set",
),
click.option(
'--path-format',
'-p',
default=constants.DEFAULT_PATH_FORMAT,
help='Custom featured path format',
),
click.option(
'--remove-duplicates',
'-R',
default=False,
is_flag=True,
help='True to remove files that are exactly the same in name\
and a file hash',
),
click.option(
'--use-date-filename',
'-f',
default=False,
is_flag=True,
help="Use filename date for media original date.",
),
click.option(
'--use-file-dates',
'-F',
default=False,
is_flag=True,
help="Use file date created or modified for media original date.",
),
]
def print_help(command):
click.echo(command.get_help(click.Context(command)))
def add_options(options):
def _add_options(func):
for option in reversed(options):
func = option(func)
return func
return _add_options
def _get_paths(paths, root):
root = Path(root).expanduser().absolute()
if not paths:
absolute_paths = {root}
else:
absolute_paths = set()
for path in paths:
absolute_paths.add(Path(path).expanduser().absolute())
return absolute_paths, root
def _cli_get_location(collection):
gopt = collection.opt['Geolocation']
return GeoLocation(
gopt['geocoder'],
gopt['prefer_english_names'],
gopt['timeout'],
)
def _cli_sort(collection, src_paths, import_mode):
loc = _cli_get_location(collection)
return collection.sort_files(src_paths, loc, import_mode)
@click.group()
def cli(**kwargs):
pass
@cli.command('check')
@add_options(_logger_options)
@click.argument('path', required=True, nargs=1, type=click.Path())
def _check(**kwargs):
"""
Check media collection.
"""
root = Path(kwargs['path']).expanduser().absolute()
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
collection = Collection(root)
result = collection.check_db()
if result:
summary = collection.check_files()
if log_level < 30:
summary.print()
if summary.errors:
LOG.error('Db data is not accurate run `ordigi update --checksum`')
sys.exit(1)
else:
LOG.error('Db data is not accurate run `ordigi update`')
sys.exit(1)
@cli.command('clean')
@add_options(_logger_options)
@add_options(_dry_run_options)
@add_options(_filter_options)
@click.option(
'--dedup-regex',
'-D',
default=None,
multiple=True,
help='Regex to match duplicate strings parts',
)
@click.option(
'--delete-excluded', '-d', default=False, is_flag=True, help='Remove excluded files'
)
@click.option(
'--folders', '-f', default=False, is_flag=True, help='Remove empty folders'
)
@click.option(
'--path-string', '-p', default=False, is_flag=True, help='Deduplicate path string'
)
@click.option(
'--remove-duplicates',
'-R',
default=False,
is_flag=True,
help='True to remove files that are exactly the same in name and a file hash',
)
@click.argument('subdirs', required=False, nargs=-1, type=click.Path())
@click.argument('collection', required=True, nargs=1, type=click.Path())
def _clean(**kwargs):
"""Clean media collection"""
folders = kwargs['folders']
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
subdirs = kwargs['subdirs']
root = kwargs['collection']
paths, root = _get_paths(subdirs, root)
collection = Collection(
root,
{
'dry_run': kwargs['dry_run'],
'extensions': kwargs['ext'],
'glob': kwargs['glob'],
'remove_duplicates': kwargs['remove_duplicates'],
},
)
# os.path.join(
# TODO make function to remove duplicates
# path_format = collection.opt['Path']['path_format']
# summary = collection.sort_files(paths, None)
if kwargs['path_string']:
dedup_regex = set(kwargs['dedup_regex'])
collection.dedup_path(paths, dedup_regex)
for path in paths:
if folders:
collection.remove_empty_folders(path)
if kwargs['delete_excluded']:
collection.remove_excluded_files()
summary = collection.summary
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('clone')
@add_options(_logger_options)
@add_options(_dry_run_options)
@click.argument('src', required=True, nargs=1, type=click.Path())
@click.argument('dest', required=True, nargs=1, type=click.Path())
def _clone(**kwargs):
"""Clone media collection to another location"""
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
src_path = Path(kwargs['src']).expanduser().absolute()
dest_path = Path(kwargs['dest']).expanduser().absolute()
dry_run = kwargs['dry_run']
src_collection = Collection(
src_path, {'cache': True, 'dry_run': dry_run}
)
if dest_path.exists() and not utils.empty_dir(dest_path):
LOG.error(f'Destination collection path {dest_path} must be empty directory')
sys.exit(1)
summary = src_collection.clone(dest_path)
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('compare')
@add_options(_logger_options)
@add_options(_dry_run_options)
@add_options(_filter_options)
@click.option('--find-duplicates', '-f', default=False, is_flag=True)
@click.option('--remove-duplicates', '-r', default=False, is_flag=True)
@click.option(
'--similar-to',
'-s',
default=False,
help='Similar to given image',
)
@click.option(
'--similarity',
'-S',
default=80,
help='Similarity level for images',
)
@click.argument('subdirs', required=False, nargs=-1, type=click.Path())
@click.argument('collection', required=True, nargs=1, type=click.Path())
def _compare(**kwargs):
"""
Sort similar images in directories
"""
subdirs = kwargs['subdirs']
root = kwargs['collection']
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
paths, root = _get_paths(subdirs, root)
collection = Collection(
root,
{
'extensions': kwargs['ext'],
'glob': kwargs['glob'],
'dry_run': kwargs['dry_run'],
'remove_duplicates': kwargs['remove_duplicates'],
},
)
for path in paths:
collection.sort_similar_images(path, kwargs['similarity'])
summary = collection.summary
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('edit')
@add_options(_logger_options)
@add_options(_exclude_options)
@add_options(_filter_options)
@click.option(
'--key',
'-k',
default=None,
multiple=True,
help="Select exif tags groups to edit",
)
@click.option(
'--overwrite',
'-O',
default=False,
is_flag=True,
help="Overwrite db and exif value by key value",
)
@click.argument('subdirs', required=False, nargs=-1, type=click.Path())
@click.argument('path', required=True, nargs=1, type=click.Path())
def _edit(**kwargs):
"""Edit EXIF metadata in files or directories"""
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
paths, root = _get_paths(kwargs['subdirs'], kwargs['path'])
overwrite = kwargs['overwrite']
collection = Collection(
root,
{
'cache': True,
'ignore_tags': kwargs['ignore_tags'],
'exclude': kwargs['exclude'],
'extensions': kwargs['ext'],
'glob': kwargs['glob'],
}
)
editable_keys = (
'album',
'camera_make',
'camera_model',
'city',
'country',
# 'date_created',
'date_media',
# 'date_modified',
'date_original',
'latitude',
'location',
'longitude',
'latitude_ref',
'longitude_ref',
'original_name',
'state',
'title',
)
if not kwargs['key']:
keys = set(editable_keys)
else:
keys = set(kwargs['key'])
if 'coordinates' in keys:
keys.remove('coordinates')
keys.update(['latitude', 'longitude'])
location = False
for key in keys:
if key not in editable_keys:
LOG.error(f"key '{key}' is not valid")
sys.exit(1)
if key in (
'city',
'latitude',
'location',
'longitude',
'latitude_ref',
'longitude_ref',
):
location = True
if location:
loc = _cli_get_location(collection)
else:
loc = None
summary = collection.edit_metadata(paths, keys, loc, overwrite)
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('init')
@add_options(_logger_options)
@click.argument('path', required=True, nargs=1, type=click.Path())
def _init(**kwargs):
"""
Init media collection database.
"""
root = Path(kwargs['path']).expanduser().absolute()
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
collection = Collection(root)
loc = _cli_get_location(collection)
summary = collection.init(loc)
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('import')
@add_options(_logger_options)
@add_options(_input_options)
@add_options(_dry_run_options)
@add_options(_exclude_options)
@add_options(_filter_options)
@add_options(_sort_options)
@click.option(
'--copy',
'-c',
default=False,
is_flag=True,
help='True if you want files to be copied over from src_dir to\
dest_dir rather than moved',
)
@click.argument('src', required=False, nargs=-1, type=click.Path())
@click.argument('dest', required=True, nargs=1, type=click.Path())
def _import(**kwargs):
"""Sort files or directories by reading their EXIF and organizing them
according to ordigi.conf preferences.
"""
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
src_paths, root = _get_paths(kwargs['src'], kwargs['dest'])
collection = Collection(
root,
{
'album_from_folder': kwargs['album_from_folder'],
'cache': False,
'ignore_tags': kwargs['ignore_tags'],
'use_date_filename': kwargs['use_date_filename'],
'use_file_dates': kwargs['use_file_dates'],
'exclude': kwargs['exclude'],
'extensions': kwargs['ext'],
'glob': kwargs['glob'],
'dry_run': kwargs['dry_run'],
'interactive': kwargs['interactive'],
'path_format': kwargs['path_format'],
'remove_duplicates': kwargs['remove_duplicates'],
}
)
if kwargs['copy']:
import_mode = 'copy'
else:
import_mode = 'move'
summary = _cli_sort(collection, src_paths, import_mode)
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('sort')
@add_options(_logger_options)
@add_options(_input_options)
@add_options(_dry_run_options)
@add_options(_filter_options)
@add_options(_sort_options)
@click.option('--clean', '-C', default=False, is_flag=True, help='Clean empty folders')
@click.option(
'--reset-cache',
'-r',
default=False,
is_flag=True,
help='Regenerate the hash.json and location.json database ',
)
@click.argument('subdirs', required=False, nargs=-1, type=click.Path())
@click.argument('dest', required=True, nargs=1, type=click.Path())
def _sort(**kwargs):
"""Sort files or directories by reading their EXIF and organizing them
according to ordigi.conf preferences.
"""
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
paths, root = _get_paths(kwargs['subdirs'], kwargs['dest'])
cache = not kwargs['reset_cache']
collection = Collection(
root,
{
'album_from_folder': kwargs['album_from_folder'],
'cache': cache,
'fill_date_original': kwargs['fill_date_original'],
'ignore_tags': kwargs['ignore_tags'],
'use_date_filename': kwargs['use_date_filename'],
'use_file_dates': kwargs['use_file_dates'],
'extensions': kwargs['ext'],
'glob': kwargs['glob'],
'dry_run': kwargs['dry_run'],
'interactive': kwargs['interactive'],
'remove_duplicates': kwargs['remove_duplicates'],
}
)
summary = _cli_sort(collection, paths, False)
if kwargs['clean']:
collection.remove_empty_folders(root)
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('update')
@add_options(_logger_options)
@click.option(
'--checksum',
'-c',
default=False,
is_flag=True,
help='Update checksum, assuming file are changed by the user',
)
@click.argument('path', required=True, nargs=1, type=click.Path())
def _update(**kwargs):
"""
Update media collection database.
"""
root = Path(kwargs['path']).expanduser().absolute()
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
collection = Collection(root)
loc = _cli_get_location(collection)
summary = collection.update(loc, kwargs['checksum'])
if log_level < 30:
summary.print()
if __name__ == '__main__':
cli()

1269
ordigi/collection.py Normal file

File diff suppressed because it is too large Load Diff

196
ordigi/config.py Normal file
View File

@ -0,0 +1,196 @@
import json
import re
from configparser import RawConfigParser
from ordigi import constants
from geopy.geocoders import options as gopt
def check_option(getoption):
"""Check option type int or boolean"""
try:
getoption
except ValueError as e:
# TODO
return None
else:
return getoption
def check_json(getoption):
"""Check if json string is valid"""
try:
getoption
except json.JSONDecodeError as e:
# TODO
return None
else:
return getoption
def check_re(getoption):
"""Check if regex string is valid"""
try:
getoption
except re.error as e:
# TODO
return None
else:
return getoption
class Config:
"""Manage config file"""
def __init__(self, conf_path=constants.CONFIG_FILE, conf=None):
self.conf_path = conf_path
if conf is None:
self.conf = self.load_config()
if self.conf == {}:
# Fallback to default config
self.conf_path = constants.CONFIG_FILE
self.conf = self.load_config()
else:
self.conf = conf
self.options = self.get_default_options()
def get_default_options(self) -> dict:
# Initialize with default options
return {
'Exif': {
'album_from_folder': False,
'fill_date_original': False,
'cache': True,
'ignore_tags': None,
'use_date_filename': False,
'use_file_dates': False,
},
'Filters': {
'exclude': set(),
'extensions': None,
'glob': '**/*',
'max_deep': None,
'remove_duplicates': False,
},
'Geolocation': {
'geocoder': constants.DEFAULT_GEOCODER,
'prefer_english_names': False,
'timeout': gopt.default_timeout,
},
'Path': {
'day_begins': 0,
'path_format': constants.DEFAULT_PATH_FORMAT,
},
'Terminal': {
'dry_run': False,
'interactive': False,
},
}
def write(self, conf):
with open(self.conf_path, 'w') as conf_file:
conf.write(conf_file)
return True
return False
def load_config(self):
if not self.conf_path.exists():
return {}
conf = RawConfigParser()
conf.read(self.conf_path)
return conf
def is_option(self, section, option):
"""Get ConfigParser options"""
if section in self.conf and option in self.conf[section]:
return True
return False
@check_option
def _getboolean(self, section, option):
return self.conf.getboolean(section, option)
getboolean = check_option(_getboolean)
@check_option
def _getint(self, section, option):
return self.conf.getint(section, option)
getint = check_option(_getint)
@check_json
def _getjson(self, section, option):
return json.loads(self.conf.get(section, option))
getjson = check_json(_getjson)
@check_re
def _getre(self, section, option):
return re.compile(self.conf.get(section, option))
getre = check_re(_getre)
def get_config_option(self, section, option):
bool_options = {
'album_from_folder',
'fill_date_original',
'cache',
'dry_run',
'interactive',
'prefer_english_names',
'remove_duplicates',
'use_date_filename',
'use_file_dates',
}
int_options = {
'day_begins',
'max_deep',
'timeout',
}
string_options = {
'glob',
'geocoder',
}
multi_options = {
'exclude',
'extensions',
'ignore_tags',
}
value = self.options[section][option]
if self.is_option(section, option):
if option in bool_options:
return self.getboolean(section, option)
if option in int_options:
return self.getint(section, option)
if option == 'geocoder' and value in ('Nominatim',):
return self.conf[section][option]
if option == 'glob':
return self.getre(section, option)
if option == 'path_format':
return self.conf[section][option]
if option in multi_options:
return set(self.getjson(section, option))
return value
if option == 'path_format':
if self.is_option('Path', 'name') and self.is_option('Path', 'dirs_path'):
# Path format is split in two parts
value = self.conf['Path']['dirs_path'] + '/' + self.conf['Path']['name']
return value
def get_config_options(self) -> dict:
"""Get config options"""
for section in self.options:
for option in self.options[section]:
# Option is in section
value = self.get_config_option(section, option)
self.options[section][option] = value
return self.options

31
ordigi/constants.py Normal file
View File

@ -0,0 +1,31 @@
"""
Settings.
"""
from os import environ
from pathlib import Path
#: If True, debug messages will be printed.
debug = False
# Ordigi settings directory.
def get_config_dir(name):
if 'XDG_CONFIG_HOME' in environ:
confighome = Path(environ['XDG_CONFIG_HOME'])
elif 'APPDATA' in environ:
confighome = Path(environ['APPDATA'])
else:
confighome = Path(environ['HOME'], '.config')
return confighome / name
APPLICATION_DIRECTORY = get_config_dir('ordigi')
DEFAULT_PATH = '<%Y-%m-%b>/<album>|<city>'
DEFAULT_NAME = '<%Y-%m-%d_%H-%M-%S>-<name>-<title>.%l<ext>'
DEFAULT_PATH_FORMAT = DEFAULT_PATH + '/' + DEFAULT_NAME
DEFAULT_GEOCODER = 'Nominatim'
CONFIG_FILE = APPLICATION_DIRECTORY / 'ordigi.conf'

350
ordigi/database.py Normal file
View File

@ -0,0 +1,350 @@
from datetime import datetime
import os
from pathlib import Path
import sqlite3
import sys
from ordigi import LOG
from ordigi.utils import distance_between_two_points
class Sqlite:
"""Methods for interacting with Sqlite database"""
def __init__(self, target_dir):
# Create dir for target database
db_dir = Path(target_dir, '.ordigi')
if not db_dir.exists():
try:
db_dir.mkdir()
except OSError:
pass
self.db_type = 'SQLite format 3'
self.log = LOG.getChild(self.__class__.__name__)
self.types = {'text': (str, datetime), 'integer': (int,), 'real': (float,)}
self.filename = Path(db_dir, 'collection.db')
self.con = sqlite3.connect(self.filename)
# Allow selecting column by name
self.con.row_factory = sqlite3.Row
self.cur = self.con.cursor()
metadata_header = {
'FilePath': 'text not null',
'Checksum': 'text',
'Album': 'text',
'Title': 'text',
'LocationId': 'integer',
'DateMedia': 'text',
'DateOriginal': 'text',
'DateCreated': 'text',
'DateModified': 'text',
'FileModifyDate': 'text',
'CameraMake': 'text',
'CameraModel': 'text',
'OriginalName': 'text',
'SrcDir': 'text',
'Subdirs': 'text',
'Filename': 'text',
}
location_header = {
'Latitude': 'real not null',
'Longitude': 'real not null',
'LatitudeRef': 'text',
'LongitudeRef': 'text',
'City': 'text',
'State': 'text',
'Country': 'text',
'Location': 'text',
}
self.tables = {
'metadata': {'header': metadata_header},
'location': {'header': location_header},
}
# Create tables
for table, d in self.tables.items():
if not self.is_table(table):
if table == 'metadata':
# https://www.quackit.com/sqlite/tutorial/create_a_relationship.cfm
self.create_table(
table, d['header'],
(
"unique('FilePath')",
"foreign key(LocationId) references location(Id)",
),
)
elif table == 'location':
self.create_table(
table, d['header'],
("unique('Latitude', 'Longitude')",),
)
def is_Sqlite3(self, filename):
if not os.path.isfile(filename):
return False
if os.path.getsize(filename) < 100: # SQLite database file header is 100 bytes
return False
with open(filename, 'rb') as fd:
header = fd.read(100)
return header[:16] == self.db_type + '\x00'
def is_table(self, table):
"""Check if table exist"""
try:
# get the count of tables with the name
self.cur.execute(
f"select count(name) from sqlite_master where type='table' and name='{table}'"
)
except sqlite3.DatabaseError as e:
# raise type(e)(e.message + ' :{self.filename} %s' % arg1)
raise sqlite3.DatabaseError(f"{self.filename} is not valid database")
# if the count is 1, then table exists
if self.cur.fetchone()[0] == 1:
return True
return False
def get_rows(self, table):
"""Cycle through rows in table
:params: str
:return: iter
"""
self.cur.execute(f'select * from {table}')
for row in self.cur:
yield row
def is_empty(self, table):
if [x for x in self.get_rows(table)] == []:
return True
return False
def _run(self, query, n=0):
self.log.debug(f"Sqlite run '{query}'")
try:
result = self.cur.execute(query).fetchone()
except sqlite3.DatabaseError as e:
self.log.error(e)
result = False
if result:
return result[n]
else:
return False
def _run_many(self, query, table_list):
self.cur.executemany(query, table_list)
if self.cur.fetchone()[0] != 1:
return False
self.con.commit()
return True
def create_table(self, table, header, statements=None):
"""
:params: row data (dict), primary_key (tuple)
:returns: bool
"""
fieldset = []
fieldset.append("Id integer primary key autoincrement")
for col, definition in header.items():
fieldset.append(f"{col} {definition}")
# https://stackoverflow.com/questions/11719073/sqlite-insert-or-update-without-changing-rowid-value
if statements:
for statement in statements:
fieldset.append(statement)
if len(fieldset) > 0:
query = "create table {0} ({1})".format(table, ", ".join(fieldset))
self.cur.execute(query)
self.tables[table]['header'] = header
return True
return False
def check_row(self, table, row_data):
header = self.tables[table]['header']
if len(row_data) != len(header):
raise ValueError(
f"""Table {table} length mismatch: row_data
{row_data}, header {header}"""
)
columns = ', '.join(row_data.keys())
placeholders = ', '.join('?' * len(row_data))
return columns, placeholders
def update_query(self, table, row_id, columns, placeholders):
"""
:returns: query (str)
"""
return f"""replace into {table} (Id, {columns})
values ((select id from {table} where id={row_id}), {placeholders})"""
def insert_query(self, table, columns, placeholders):
"""
:returns: query (str)
"""
return f"insert into {table} ({columns}) values ({placeholders})"
def upsert_row(self, table, row_data, columns, placeholders, row_id=None):
"""
:returns: lastrowid (int)
https://www.sqlitetutorial.net/sqlite-replace-statement/
https://www.sqlite.org/lang_UPSERT.html
"""
if row_id:
query = self.update_query(table, row_id, columns, placeholders)
else:
query = self.insert_query(table, columns, placeholders)
values = []
for key, value in row_data.items():
if isinstance(value, bool):
values.append(int(value))
else:
values.append(value)
self.cur.execute(query, values)
self.con.commit()
return self.cur.lastrowid
def upsert_location(self, row_data):
# Check if row already exist
row_id = self.get_location(row_data['Latitude'], row_data['Longitude'], 'Id')
columns, placeholders = self.check_row('location', row_data)
return self.upsert_row('location', row_data, columns, placeholders, row_id)
def upsert_metadata(self, row_data):
# Check if row already exist
row_id = self.get_metadata(row_data['FilePath'], 'Id')
columns, placeholders = self.check_row('metadata', row_data)
return self.upsert_row('metadata', row_data, columns, placeholders, row_id)
def get_header(self, row_data):
"""
:params: row data (dict)
:returns: header
"""
sql_table = {}
for key, value in row_data.items():
for sql_type, t in self.types.items():
# Find corresponding sql_type from python type
if type(value) in t:
sql_table[key] = sql_type
return sql_table
def build_table(self, table, row_data, statements=None):
header = self.get_header(row_data)
return self.create_table(table, header, statements=None)
def check_table(self, table, row_data):
"""
:params: row data (dict), primary_key (tuple)
:returns: bool
"""
if not self.tables[table]['header']:
self.log.error(f"Table {table} do not exist")
return False
return True
def escape_quote(self, string):
return string.translate(str.maketrans({"'": r"''"}))
def get_checksum(self, file_path):
file_path_e = self.escape_quote(str(file_path))
query = f"select Checksum from metadata where FilePath='{file_path_e}'"
return self._run(query)
def get_metadata(self, file_path, column):
file_path_e = self.escape_quote(str(file_path))
query = f"select {column} from metadata where FilePath='{file_path_e}'"
return self._run(query)
def match_location(self, latitude, longitude):
query = f"""select 1 from location where Latitude='{latitude}'
and Longitude='{longitude}'"""
return self._run(query)
def get_location_data(self, location_id, data):
query = f"select {data} from location where Id='{location_id}'"
return self._run(query)
def get_location(self, latitude, longitude, column):
query = f"""select {column} from location where Latitude='{latitude}'
and Longitude='{longitude}'"""
return self._run(query)
def _get_table(self, table):
self.cur.execute(f'SELECT * FROM {table}').fetchall()
def get_location_nearby(self, latitude, longitude, Column, threshold_m=3000):
"""
Find a name for a location in the database.
:param float latitude: Latitude of the location.
:param float longitude: Longitude of the location.
:param int threshold_m: Location in the database must be this close to
the given latitude and longitude.
:returns: str, or None if a matching location couldn't be found.
"""
shorter_distance = sys.maxsize
value = None
self.cur.execute('SELECT * FROM location')
for row in self.cur:
distance = distance_between_two_points(
latitude, longitude, row['Latitude'], row['Longitude']
)
# Use if closer then threshold_km reuse lookup
if distance < shorter_distance and distance <= threshold_m:
shorter_distance = distance
value = row[Column]
return value
def delete_row(self, table, column, value):
"""
Delete a row by row id in table
:param table: database table
:param id: id of the row
:return:
"""
sql = f'delete from {table} where {column}=?'
self.cur.execute(sql, (value,))
self.con.commit()
def delete_filepath(self, value):
self.delete_row('metadata', 'FilePath', value)
def delete_all_rows(self, table):
"""
Delete all row in table
:param table: database table
:return:
"""
sql = f'delete from {table}'
self.cur.execute(sql)
self.con.commit()
def len(self, table):
sql = f'select count() from {table}'
return self._run(sql)

View File

@ -4,14 +4,16 @@ https://github.com/RhetTbull/osxphotos/blob/master/osxphotos/exiftool.py
import atexit
import json
import logging
import os
from pathlib import Path
import re
import shutil
import subprocess
from abc import ABC, abstractmethod
from functools import lru_cache # pylint: disable=syntax-error
from ordigi import LOG
# exiftool -stay_open commands outputs this EOF marker after command is run
EXIFTOOL_STAYOPEN_EOF = "{ready}"
EXIFTOOL_STAYOPEN_EOF_LEN = len(EXIFTOOL_STAYOPEN_EOF)
@ -28,14 +30,14 @@ def exiftool_is_running():
@atexit.register
def terminate_exiftool():
"""Terminate any running ExifTool subprocesses; call this to cleanup when done using ExifTool """
"""Terminate any running ExifTool subprocesses; call this to cleanup when done using ExifTool"""
for proc in EXIFTOOL_PROCESSES:
proc._stop_proc()
@lru_cache(maxsize=1)
def get_exiftool_path():
""" return path of exiftool, cache result """
"""return path of exiftool, cache result"""
exiftool_path = shutil.which("exiftool")
if exiftool_path:
return exiftool_path.rstrip()
@ -51,33 +53,33 @@ class _ExifToolProc:
Creates a singleton object"""
def __new__(cls, *args, **kwargs):
""" create new object or return instance of already created singleton """
"""create new object or return instance of already created singleton"""
if not hasattr(cls, "instance") or not cls.instance:
cls.instance = super().__new__(cls)
return cls.instance
def __init__(self, exiftool=None, logger=logging.getLogger()):
def __init__(self, exiftool=None):
"""construct _ExifToolProc singleton object or return instance of already created object
exiftool: optional path to exiftool binary (if not provided, will search path to find it)"""
self.logger = logger
self.log = LOG.getChild(self.__class__.__name__)
self._exiftool = exiftool or get_exiftool_path()
if hasattr(self, "_process_running") and self._process_running:
# already running
if exiftool is not None and exiftool != self._exiftool:
self.logger.warning(
self.log.warning(
f"exiftool subprocess already running, "
f"ignoring exiftool={exiftool}"
)
return
self._process_running = False
self._exiftool = exiftool or get_exiftool_path()
self._start_proc()
@property
def process(self):
""" return the exiftool subprocess """
"""return the exiftool subprocess"""
if self._process_running:
return self._process
else:
@ -86,19 +88,19 @@ class _ExifToolProc:
@property
def pid(self):
""" return process id (PID) of the exiftool process """
"""return process id (PID) of the exiftool process"""
return self._process.pid
@property
def exiftool(self):
""" return path to exiftool process """
"""return path to exiftool process"""
return self._exiftool
def _start_proc(self):
""" start exiftool in batch mode """
"""start exiftool in batch mode"""
if self._process_running:
self.logger.warning("exiftool already running: {self._process}")
self.log.warning("exiftool already running: {self._process}")
return
# open exiftool process
@ -123,7 +125,7 @@ class _ExifToolProc:
EXIFTOOL_PROCESSES.append(self)
def _stop_proc(self):
""" stop the exiftool process if it's running, otherwise, do nothing """
"""stop the exiftool process if it's running, otherwise, do nothing"""
if not self._process_running:
return
@ -146,9 +148,15 @@ class _ExifToolProc:
class ExifTool:
""" Basic exiftool interface for reading and writing EXIF tags """
"""Basic exiftool interface for reading and writing EXIF tags"""
def __init__(self, filepath, exiftool=None, overwrite=True, flags=None, logger=logging.getLogger()):
def __init__(
self,
filepath,
exiftool=None,
overwrite=True,
flags=None,
):
"""Create ExifTool object
Args:
@ -168,7 +176,7 @@ class ExifTool:
self.error = None
# if running as a context manager, self._context_mgr will be True
self._context_mgr = False
self._exiftoolproc = _ExifToolProc(exiftool=exiftool, logger=logger)
self._exiftoolproc = _ExifToolProc(exiftool=exiftool)
self._read_exif()
@property
@ -318,12 +326,12 @@ class ExifTool:
@property
def pid(self):
""" return process id (PID) of the exiftool process """
"""return process id (PID) of the exiftool process"""
return self._process.pid
@property
def version(self):
""" returns exiftool version """
"""returns exiftool version"""
ver, _, _ = self.run_commands("-ver", no_file=True)
return ver.decode("utf-8")
@ -361,12 +369,12 @@ class ExifTool:
return exifdict
def json(self):
""" returns JSON string containing all EXIF tags and values from exiftool """
"""returns JSON string containing all EXIF tags and values from exiftool"""
json, _, _ = self.run_commands("-json")
return json
def _read_exif(self):
""" read exif data from file """
"""read exif data from file"""
data = self.asdict()
self.data = {k: v for k, v in data.items()}
@ -387,23 +395,24 @@ class ExifTool:
class ExifToolCaching(ExifTool):
""" Basic exiftool interface for reading and writing EXIF tags, with caching.
Use this only when you know the file's EXIF data will not be changed by any external process.
Creates a singleton cached ExifTool instance """
"""Basic exiftool interface for reading and writing EXIF tags, with caching.
Use this only when you know the file's EXIF data will not be changed by any external process.
_singletons = {}
Creates a singleton cached ExifTool instance"""
def __new__(cls, filepath, exiftool=None, logger=logging.getLogger()):
""" create new object or return instance of already created singleton """
_singletons: dict[Path, ExifTool] = {}
def __new__(cls, filepath, exiftool=None):
"""create new object or return instance of already created singleton"""
if filepath not in cls._singletons:
cls._singletons[filepath] = _ExifToolCaching(filepath,
exiftool=exiftool, logger=logger)
cls._singletons[filepath] = _ExifToolCaching(
filepath, exiftool=exiftool
)
return cls._singletons[filepath]
class _ExifToolCaching(ExifTool):
def __init__(self, filepath, exiftool=None, logger=logging.getLogger()):
def __init__(self, filepath, exiftool=None):
"""Create read-only ExifTool object that caches values
Args:
@ -415,8 +424,9 @@ class _ExifToolCaching(ExifTool):
"""
self._json_cache = None
self._asdict_cache = {}
super().__init__(filepath, exiftool=exiftool, overwrite=False,
flags=None, logger=logger)
super().__init__(
filepath, exiftool=exiftool, overwrite=False, flags=None
)
def run_commands(self, *commands, no_file=False):
if commands[0] not in ["-json", "-ver"]:
@ -453,7 +463,6 @@ class _ExifToolCaching(ExifTool):
return self._asdict_cache[tag_groups][normalized]
def flush_cache(self):
""" Clear cached data so that calls to json or asdict return fresh data """
"""Clear cached data so that calls to json or asdict return fresh data"""
self._json_cache = None
self._asdict_cache = {}

101
ordigi/geolocation.py Normal file
View File

@ -0,0 +1,101 @@
from os import path
import geopy
from geopy.geocoders import Nominatim, options
from ordigi import LOG
from ordigi import config
__KEY__ = None
class GeoLocation:
"""Look up geolocation information for media objects."""
def __init__(
self,
geocoder='Nominatim',
prefer_english_names=False,
timeout=options.default_timeout,
):
self.geocoder = geocoder
self.log = LOG.getChild(self.__class__.__name__)
self.prefer_english_names = prefer_english_names
self.timeout = timeout
def coordinates_by_name(self, name, timeout=options.default_timeout):
"""Get coordinates from given location name"""
geocoder = self.geocoder
if geocoder == 'Nominatim':
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
geolocation_info = locator.geocode(name)
if geolocation_info is not None:
return {
'latitude': geolocation_info.latitude,
'longitude': geolocation_info.longitude,
}
else:
raise NameError(geocoder)
return None
def place_name(self, lat, lon, timeout=options.default_timeout):
"""get place name from coordinates"""
lookup_place_name_default = {'default': None}
if lat is None or lon is None:
return lookup_place_name_default
# Convert lat/lon to floats
if not isinstance(lat, float):
lat = float(lat)
if not isinstance(lon, float):
lon = float(lon)
lookup_place_name = {}
geocoder = self.geocoder
if geocoder == 'Nominatim':
geolocation_info = self.lookup_osm(lat, lon, timeout)
else:
raise NameError(geocoder)
if geolocation_info is not None and 'address' in geolocation_info:
address = geolocation_info['address']
# gh-386 adds support for town
# taking precedence after city for backwards compatability
for loc in ['city', 'town', 'village', 'state', 'country']:
if loc in address:
lookup_place_name[loc] = address[loc]
# In many cases the desired key is not available so we
# set the most specific as the default.
if 'default' not in lookup_place_name:
lookup_place_name['default'] = address[loc]
if 'default' not in lookup_place_name:
lookup_place_name = lookup_place_name_default
return lookup_place_name
def lookup_osm( self, lat, lon, timeout=options.default_timeout):
"""Get Geolocation address data from latitude and longitude"""
locator_reverse = None
try:
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
coords = (lat, lon)
if self.prefer_english_names:
lang = 'en'
else:
lang = 'local'
try:
locator_reverse = locator.reverse(coords, language=lang)
except geopy.exc.GeocoderUnavailable or geopy.exc.GeocoderTimedOut as e:
self.log.error(e)
# Fix *** TypeError: `address` must not be None
except (TypeError, ValueError) as e:
self.log.error(e)
else:
if locator_reverse is not None:
return locator_reverse.raw
return None

187
ordigi/images.py Normal file
View File

@ -0,0 +1,187 @@
"""
The image module contains the :class:`Images` class, which is used to track
image objects (JPG, DNG, etc.).
.. moduleauthor:: Jaisen Mathai <jaisen@jmathai.com>
"""
import imghdr
import os
import imagehash
import numpy as np
from PIL import Image as img
from PIL import UnidentifiedImageError
from ordigi import LOG
# HEIC extension support (experimental, not tested)
PYHEIF = False
try:
from pyheif_pillow_opener import register_heif_opener
PYHEIF = True
# Allow to open HEIF/HEIC image from pillow
register_heif_opener()
except ImportError as e:
LOG.info(e)
class Image:
"""Image file class"""
def __init__(self, img_path, hash_size=8):
self.img_path = img_path
self.hash_size = hash_size
def is_image(self):
"""Check whether the file is an image.
:returns: bool
"""
# gh-4 This checks if the file is an image.
# It doesn't validate against the list of supported types.
# We check with imghdr and pillow.
if imghdr.what(self.img_path) is None:
# Pillow is used as a fallback
# imghdr won't detect all variants of images (https://bugs.python.org/issue28591)
# see https://github.com/jmathai/elodie/issues/281
# before giving up, we use `pillow` imaging library to detect file type
#
# It is important to note that the library doesn't decode or load the
# raster data unless it really has to. When you open a file,
# the file header is read to determine the file format and extract
# things like mode, size, and other properties required to decode the file,
# but the rest of the file is not processed until later.
try:
image = img.open(self.img_path)
except (IOError, UnidentifiedImageError):
return False
if image.format is None:
return False
return True
def get_hash(self):
"""Get image hash"""
try:
with img.open(self.img_path) as image:
return imagehash.average_hash(image, self.hash_size).hash
except (OSError, UnidentifiedImageError):
return None
class Images:
"""A image object.
:param str img_path: The fully qualified path to the image file
"""
#: Valid extensions for image files.
extensions = (
'arw',
'cr2',
'dng',
'gif',
'heic',
'jpeg',
'jpg',
'nef',
'png',
'rw2',
)
def __init__(self, images, hash_size=8):
self.images = images
self.duplicates = []
self.hash_size = hash_size
self.log = LOG.getChild(self.__class__.__name__)
if not PYHEIF:
self.log.info("No module named 'pyheif_pillow_opener'")
def get_images_hashes(self):
"""Get image hashes"""
# Searching for duplicates.
for image in self.images:
with img.open(image.img_path) as i:
yield imagehash.average_hash(i, self.hash_size)
def find_duplicates(self, img_path):
"""Find duplicates"""
duplicates = []
hashes = {}
for temp_hash in self.get_images_hashes():
if temp_hash in hashes:
self.log.info(
"Duplicate {} \nfound for image {}\n".format(
img_path, hashes[temp_hash]
)
)
duplicates.append(img_path)
else:
hashes[temp_hash] = img_path
return duplicates
def remove_duplicates(self, duplicates):
"""Remove duplicate files"""
for duplicate in duplicates:
try:
os.remove(duplicate)
except OSError as error:
self.log.error(error)
def remove_duplicates_interactive(self, duplicates):
"""Remove duplicate files: interactive mode"""
if len(duplicates) != 0:
answer = input(f"Do you want to delete these {duplicates} images? Y/n: ")
if answer.strip().lower() == 'y':
self.remove_duplicates(duplicates)
self.log.info('Duplicates images deleted successfully!')
else:
self.log.info("No duplicates found")
def diff(self, hash1, hash2):
return np.count_nonzero(hash1 != hash2)
def similarity(self, img_diff):
"""Similarity rate in %"""
threshold_img = img_diff / (self.hash_size ** 2)
similarity_img = round((1 - threshold_img) * 100)
return similarity_img
def find_similar(self, image0, similarity=80):
"""
Find similar images
:returns: img_path generator
"""
hash1 = image0.get_hash()
if hash1 is None:
return
self.log.info(f"Finding similar images to {image0.img_path}")
threshold = 1 - similarity / 100
diff_limit = int(threshold * (self.hash_size ** 2))
for image in self.images:
if not image.img_path.is_file():
continue
if image.img_path == image0.img_path:
continue
hash2 = image.get_hash()
# Be sure that hash are not None
if hash2 is None:
continue
img_diff = self.diff(hash1, hash2)
if img_diff <= diff_limit:
similarity_img = self.similarity(img_diff)
self.log.info(
f"{image.img_path} image found {similarity_img}% similar to {image0.img_path}"
)
yield image.img_path

61
ordigi/log.py Normal file
View File

@ -0,0 +1,61 @@
"""Logging module"""
import logging
def get_logger(name, level=30):
"""Get logger"""
logger = logging.getLogger(name)
logger.setLevel(level)
return logger
def log_format(level):
if level > 10:
return '%(levelname)s:%(message)s'
return '%(levelname)s:%(name)s:%(message)s'
def set_formatter(handler, level):
"""create formatter and add it to the handlers"""
formatter = logging.Formatter(log_format(level))
handler.setFormatter(formatter)
def console(logger, level=30):
"""create console handler with a higher log level"""
logger.setLevel(level)
handler = logging.StreamHandler()
handler.setLevel(level)
set_formatter(handler, level)
# add the handlers to logger
logger.addHandler(handler)
def file_logger(logger, file, level=30):
"""create file handler that logs debug and higher level messages"""
logger.setLevel(level)
handler = logging.FileHandler(file)
handler.setLevel(level)
set_formatter(handler, log_format(level))
# add the handlers to logger
logger.addHandler(handler)
def get_level(quiet=False, verbose=False, debug=False, num=None):
"""Return int logging level from command line args"""
if num and num.isnumeric():
return int(verbose)
if debug:
return int(logging.getLevelName('DEBUG'))
if verbose:
return int(logging.getLevelName('INFO'))
if quiet:
return int(logging.getLevelName('ERROR'))
return int(logging.getLevelName('WARNING'))

770
ordigi/media.py Normal file
View File

@ -0,0 +1,770 @@
import mimetypes
import os
import re
import sys
from dateutil import parser
import inquirer
from ordigi import LOG
from ordigi.exiftool import ExifTool, ExifToolCaching
from ordigi import utils
from ordigi import request
class ExifMetadata:
def __init__(self, file_path, ignore_tags=None):
self.file_path = file_path
if ignore_tags is None:
ignore_tags = set()
self.ignore_tags = ignore_tags
self.log = LOG.getChild(self.__class__.__name__)
self.tags_keys = self.get_tags()
def get_tags(self) -> dict:
"""Get exif tags groups in dict"""
tags_keys = {}
tags_keys['date_original'] = [
'EXIF:DateTimeOriginal',
'H264:DateTimeOriginal',
'QuickTime:ContentCreateDate',
]
tags_keys['date_created'] = [
'EXIF:CreateDate',
'QuickTime:CreationDate',
'QuickTime:CreateDate',
'QuickTime:CreationDate-und-US',
'QuickTime:MediaCreateDate',
]
tags_keys['date_modified'] = [
'EXIF:ModifyDate',
'QuickTime:ModifyDate',
]
tags_keys['file_modify_date'] = [
'File:FileModifyDate',
]
tags_keys['camera_make'] = ['EXIF:Make', 'QuickTime:Make']
tags_keys['camera_model'] = ['EXIF:Model', 'QuickTime:Model']
tags_keys['album'] = ['XMP-xmpDM:Album', 'XMP:Album']
tags_keys['title'] = ['XMP:Title', 'XMP:DisplayName']
tags_keys['latitude'] = [
'EXIF:GPSLatitude',
'XMP:GPSLatitude',
# 'QuickTime:GPSLatitude',
'Composite:GPSLatitude',
]
tags_keys['longitude'] = [
'EXIF:GPSLongitude',
'XMP:GPSLongitude',
# 'QuickTime:GPSLongitude',
'Composite:GPSLongitude',
]
tags_keys['latitude_ref'] = ['EXIF:GPSLatitudeRef']
tags_keys['longitude_ref'] = ['EXIF:GPSLongitudeRef']
tags_keys['original_name'] = ['EXIF:OriginalFileName', 'XMP:OriginalFileName']
# Remove ignored tag from list
for tag_regex in self.ignore_tags:
for key, tags in tags_keys.items():
for i, tag in enumerate(tags):
if re.match(tag_regex, tag):
del tags_keys[key][i]
return tags_keys
def get_date_format(self, value):
"""
Formatting date attribute.
:returns: datetime object or None
"""
# We need to parse a string to datetime format.
# EXIF DateTimeOriginal and EXIF DateTime are both stored
# in %Y:%m:%d %H:%M:%S format
if value is None:
return None
try:
# correct nasty formated date
regex = re.compile(r'(\d{4}):(\d{2}):(\d{2})[-_ .]')
if re.match(regex, value):
value = re.sub(regex, r'\g<1>-\g<2>-\g<3> ', value)
else:
regex = re.compile(r'(\d{4})(\d{2})(\d{2})[-_ .]?(\d{2})?(\d{2})?(\d{2})?')
if re.match(regex, value):
value = re.sub(regex, r'\g<1>-\g<2>-\g<3> \g<4>:\g<5>:\g<6>', value)
return parser.parse(value)
except BaseException or parser._parser.ParserError as e:
self.log.warning(e.args, value)
return None
class ReadExif(ExifMetadata):
"""Read exif metadata to file"""
def __init__(
self,
file_path,
exif_metadata=None,
cache=True,
ignore_tags=None,
):
super().__init__(file_path, ignore_tags)
# Options
self.log = LOG.getChild(self.__class__.__name__)
self.cache = cache
if exif_metadata:
self.exif_metadata = exif_metadata
elif self.cache:
self.exif_metadata = self.get_exif_metadata_caching()
else:
self.exif_metadata = self.get_exif_metadata()
def get_exif_metadata(self):
"""Get metadata from exiftool."""
return ExifToolCaching(self.file_path).asdict()
def get_exif_metadata_caching(self):
"""Get metadata from exiftool."""
return ExifToolCaching(self.file_path).asdict()
def get_key_values(self, key):
"""
Get tags values of a key
:returns: str or None if no exif tag
"""
if self.exif_metadata is None:
return None
for tag in self.tags_keys[key]:
if tag in self.exif_metadata:
yield self.exif_metadata[tag]
def get_coordinates(self, key, value):
"""Get latitude or longitude value
:param str key: Type of coordinate to get. Either "latitude" or
"longitude".
:returns: float or None
"""
if value is None:
return None
if isinstance(value, str) and len(value) == 0:
# If exiftool GPS output is empty, the data returned will be a str
# with 0 length.
# https://github.com/jmathai/elodie/issues/354
return None
# Cast coordinate to a float due to a bug in exiftool's
# -json output format.
# https://github.com/jmathai/elodie/issues/171
# http://u88.n24.queensu.ca/exiftool/forum/index.php/topic,7952.0.html # noqa
this_coordinate = float(value)
direction_multiplier = 1.0
# when self.set_gps_ref != True
if key == 'latitude':
if 'EXIF:GPSLatitudeRef' in self.exif_metadata:
if self.exif_metadata['EXIF:GPSLatitudeRef'] == 'S':
direction_multiplier = -1.0
elif key == 'longitude':
if 'EXIF:GPSLongitudeRef' in self.exif_metadata:
if self.exif_metadata['EXIF:GPSLongitudeRef'] == 'W':
direction_multiplier = -1.0
return this_coordinate * direction_multiplier
class WriteExif(ExifMetadata):
"""Write exif metadata to file"""
def __init__(
self,
file_path,
metadata,
ignore_tags=None,
):
super().__init__(file_path, ignore_tags)
self.metadata = metadata
self.log = LOG.getChild(self.__class__.__name__)
def set_value(self, tag, value):
"""Set value of a tag.
:returns: value (str)
"""
# TODO overwrite mode check if fail
return ExifTool(self.file_path, overwrite=True).setvalue(tag, value)
def set_key_values(self, key, value):
"""Set tags values for given key"""
status = True
for tag in self.tags_keys[key]:
if not self.set_value(tag, value):
status = False
return status
def set_date_media(self, time):
"""
Set the date/time a photo was taken.
:param datetime time: datetime object of when the photo was taken
:returns: bool
"""
if time is None:
return False
formatted_time = time.strftime('%Y:%m:%d %H:%M:%S')
status = self.set_value('date_original', formatted_time)
if not status:
# exif attribute date_original d'ont exist
status = self.set_value('date_created', formatted_time)
return status
def set_coordinates(self, latitude, longitude):
status = []
if self.metadata['latitude_ref']:
latitude = abs(latitude)
if latitude > 0:
status.append(self.set_value('latitude_ref', 'N'))
else:
status.append(self.set_value('latitude_ref', 'S'))
status.append(self.set_value('latitude', latitude))
if self.metadata['longitude_ref']:
longitude = abs(longitude)
if longitude > 0:
status.append(self.set_value('latitude_ref', 'E'))
else:
status.append(self.set_value('longitude_ref', 'W'))
status.append(self.set_value('longitude', longitude))
if all(status):
return True
return False
def set_album_from_folder(self):
"""Set the album attribute based on the leaf folder name
:returns: bool
"""
# TODO use tag key
return self.set_value('Album', self.file_path.parent.name)
class Media(ReadExif):
"""
Extract matadatas from exiftool and sort them to dict structure
"""
d_coordinates = {'latitude': 'latitude_ref', 'longitude': 'longitude_ref'}
def __init__(
self,
file_path,
src_dir,
album_from_folder=False,
ignore_tags=None,
interactive=False,
cache=True,
checksum=None,
use_date_filename=False,
use_file_dates=False,
):
super().__init__(
file_path,
cache=True,
ignore_tags=ignore_tags,
)
self.src_dir = src_dir
self.album_from_folder = album_from_folder
self.cache = cache
if checksum:
self.checksum = checksum
else:
self.checksum = utils.checksum(file_path)
self.interactive = interactive
self.log = LOG.getChild(self.__class__.__name__)
self.metadata = None
self.use_date_filename = use_date_filename
self.use_file_dates = use_file_dates
self.theme = request.load_theme()
self.loc_keys = (
'latitude',
'longitude',
'location',
'latitude_ref',
'longitude_ref',
'city',
'state',
'country',
)
def get_mimetype(self):
"""
Get the mimetype of the file.
:returns: str or None
"""
# TODO add to metadata
mimetype = mimetypes.guess_type(self.file_path)
if mimetype is None:
return None
return mimetype[0]
def _get_date_media_interactive(self, choices, default):
print(f"Date conflict for file: {self.file_path}")
choices_list = [
inquirer.List(
'date_list',
message="Choice appropriate original date",
choices=choices,
default=default,
),
]
answers = inquirer.prompt(choices_list, theme=self.theme)
if not answers:
sys.exit()
if not answers['date_list']:
answer = self.prompt.text("date")
return self.get_date_format(answer)
return answers['date_list']
def get_date_media(self):
'''
Get the date taken from self.metadata or filename
:returns: datetime or None.
'''
if self.metadata is None:
return None
filename = self.metadata['filename']
stem = os.path.splitext(filename)[0]
date_original = self.metadata['date_original']
if self.metadata['original_name']:
date_filename, _, _ = utils.get_date_from_string(self.metadata['original_name'])
else:
date_filename, _, _ = utils.get_date_from_string(stem)
self.log.debug(f'date_filename: {date_filename}')
date_original = self.metadata['date_original']
date_created = self.metadata['date_created']
date_modified = self.metadata['date_modified']
file_modify_date = self.metadata['file_modify_date']
if self.metadata['date_original']:
if date_filename and date_filename != date_original:
timedelta = abs(date_original - date_filename)
if timedelta.total_seconds() > 60:
self.log.warning(
f"{filename} time mark is different from {date_original}"
)
if self.interactive:
# Ask for keep date taken, filename time, or neither
choices = [
(f"date original:'{date_original}'", date_original),
(f"date filename:'{date_filename}'", date_filename),
("custom", None),
]
default = f'{date_original}'
return self._get_date_media_interactive(choices, default)
return self.metadata['date_original']
self.log.warning(f"could not find date original for {self.file_path}")
if self.use_date_filename and date_filename:
self.log.info(
f"use date from filename:{date_filename} for {self.file_path}"
)
if date_created and date_filename > date_created:
timedelta = abs(date_created - date_filename)
if timedelta.total_seconds() > 60:
self.log.warning(
f"{filename} time mark is more recent than {date_created}"
)
return date_created
if self.interactive:
choices = [
(f"date filename:'{date_filename}'", date_filename),
(f"date created:'{date_created}'", date_created),
("custom", None),
]
default = date_filename
return self._get_date_media_interactive(choices, default)
return date_filename
if date_created:
self.log.warning(
f"use date created:{date_created} for {self.file_path}"
)
return date_created
if date_modified:
self.log.warning(
f"use date modified:{date_modified} for {self.file_path}"
)
return date_modified
if self.use_file_dates:
if file_modify_date:
self.log.warning(
f"use date modified:{file_modify_date} for {self.file_path}"
)
return file_modify_date
elif self.interactive:
choices = []
if date_filename:
choices.append((f"date filename:'{date_filename}'", date_filename))
if date_created:
choices.append((f"date created:'{date_created}'", date_created))
if date_modified:
choices.append((f"date modified:'{date_modified}'", date_modified))
if file_modify_date:
choices.append(
(f"date modified:'{file_modify_date}'", file_modify_date)
)
choices.append(("custom", None))
default = date_filename
return self._get_date_media_interactive(choices, default)
def _set_album(self, album, folder):
print(f"Metadata conflict for file: {self.file_path}")
choices_list = [
inquirer.List(
'album',
message=f"Exif album is already set to {album}, choices",
choices=[
(f"album:'{album}'", album),
(f"folder:'{folder}'", folder),
("custom", None),
],
default=f'{album}',
),
]
answers = inquirer.prompt(choices_list, theme=self.theme)
if not answers:
sys.exit()
if not answers['album']:
return self.input.text("album")
return answers['album']
def _set_metadata_from_exif(self):
"""
Get selected metadata from exif to dict structure
"""
if not self.exif_metadata:
return
for key in self.tags_keys:
formated_data = None
for value in self.get_key_values(key):
if 'date' in key:
formated_data = self.get_date_format(value)
elif key in ('latitude', 'longitude'):
formated_data = self.get_coordinates(key, value)
else:
if value is not None and value != '':
formated_data = value
else:
formated_data = None
if formated_data:
# Use this data and break
break
self.metadata[key] = formated_data
def _set_metadata_from_db(self, db, relpath):
# Get metadata from db
formated_data = None
for key in self.tags_keys:
if key in (
'latitude',
'longitude',
'latitude_ref',
'longitude_ref',
'file_path',
):
continue
label = utils.snake2camel(key)
value = db.get_metadata(relpath, label)
if 'date' in key:
formated_data = self.get_date_format(value)
else:
formated_data = value
self.metadata[key] = formated_data
for key in 'src_dir', 'subdirs', 'filename':
label = utils.snake2camel(key)
formated_data = db.get_metadata(relpath, label)
self.metadata[key] = formated_data
return db.get_metadata(relpath, 'LocationId')
def set_location_from_db(self, location_id, db):
self.metadata['location_id'] = location_id
if location_id:
for key in self.loc_keys:
# use str to convert non string format data like latitude and
# longitude
self.metadata[key] = str(
db.get_location_data(location_id, utils.snake2camel(key))
)
else:
for key in self.loc_keys:
self.metadata[key] = None
def set_location_from_coordinates(self, loc):
self.metadata['location_id'] = None
if loc:
place_name = loc.place_name(
self.metadata['latitude'], self.metadata['longitude']
)
self.log.debug("location: {place_name['default']}")
for key in ('city', 'state', 'country', 'location'):
# mask = 'city'
# place_name = {'default': u'Sunnyvale', 'city-random': u'Sunnyvale'}
if key in place_name:
self.metadata[key] = place_name[key]
elif key == 'location':
self.metadata[key] = place_name['default']
else:
self.metadata[key] = None
else:
for key in self.loc_keys:
self.metadata[key] = None
def _set_album_from_folder(self):
album = self.metadata['album']
folder = self.file_path.parent.name
if album and album != '':
if self.interactive:
answer = self._set_album(album, folder)
if answer == 'c':
self.metadata['album'] = input('album=')
if answer == 'a':
self.metadata['album'] = album
elif answer == 'f':
self.metadata['album'] = folder
if not album or album == '':
self.metadata['album'] = folder
def get_metadata(self, root, loc=None, db=None, cache=False):
"""
Get a dictionary of metadata from exif.
All keys will be present and have a value of None if not obtained.
"""
self.metadata = {}
self.metadata['checksum'] = self.checksum
db_checksum = False
location_id = None
if cache and db and str(self.file_path).startswith(str(root)):
relpath = os.path.relpath(self.file_path, root)
db_checksum = db.get_checksum(relpath)
if db_checksum:
location_id = self._set_metadata_from_db(db, relpath)
self.set_location_from_db(location_id, db)
else:
self.metadata['src_dir'] = str(self.src_dir)
self.metadata['subdirs'] = str(
self.file_path.relative_to(self.src_dir).parent
)
self.metadata['filename'] = self.file_path.name
self._set_metadata_from_exif()
self.set_location_from_coordinates(loc)
self.metadata['date_media'] = self.get_date_media()
self.metadata['location_id'] = location_id
if self.album_from_folder:
self._set_album_from_folder()
def has_exif_data(self):
"""Check if file has metadata, date original"""
if not self.metadata:
return False
if 'date_original' in self.metadata:
if self.metadata['date_original']:
return True
return False
class Medias:
"""
Extract matadatas from exiftool in paths and sort them to dict structure
"""
PHOTO = ('arw', 'cr2', 'dng', 'gif', 'heic', 'jpeg', 'jpg', 'nef', 'png', 'rw2')
AUDIO = ('m4a',)
VIDEO = ('avi', 'm4v', 'mov', 'mp4', 'mpg', 'mpeg', '3gp', 'mts')
extensions = PHOTO + AUDIO + VIDEO
def __init__(
self,
paths,
root,
exif_options,
checksums=None,
db=None,
interactive=False,
):
# Modules
self.db = db
self.paths = paths
# Arguments
self.root = root
# Options
if checksums:
self.checksums = checksums
else:
self.checksums = {}
self.exif_opt = exif_options
self.ignore_tags = self.exif_opt['ignore_tags']
self.interactive = interactive
self.log = LOG.getChild(self.__class__.__name__)
# Attributes
# List to store medias datas
self.datas = {}
self.theme = request.load_theme()
def get_media(self, file_path, src_dir, checksum=None):
media = Media(
file_path,
src_dir,
self.exif_opt['album_from_folder'],
self.exif_opt['ignore_tags'],
self.interactive,
self.exif_opt['cache'],
checksum,
self.exif_opt['use_date_filename'],
self.exif_opt['use_file_dates'],
)
return media
def get_media_data(self, file_path, src_dir, loc=None):
"""Get media class instance with metadata"""
if self.checksums and file_path in self.checksums.keys():
checksum = self.checksums[file_path]
else:
checksum = None
media = self.get_media(file_path, src_dir, checksum)
media.get_metadata(
self.root, loc, self.db.sqlite, self.exif_opt['cache']
)
return media
def get_metadata(self, src_path, src_dir, loc=None):
"""Get metadata"""
return self.get_media_data(src_path, src_dir, loc).metadata
def get_paths(self, src_dirs, imp=False):
"""Get paths"""
for src_dir in src_dirs:
src_dir = self.paths.check(src_dir)
if src_dir.is_file():
yield src_dir.parent, src_dir
continue
paths = self.paths.get_paths_list(src_dir)
# Get medias and src_dirs
for src_path in paths:
if self.root not in src_path.parents:
if not imp:
self.log.error(f"""{src_path} not in {self.root}
collection, use `ordigi import`""")
sys.exit(1)
yield src_dir, src_path
def get_medias_datas(self, src_dirs, imp=False, loc=None):
"""Get medias datas"""
for src_dir, src_path in self.get_paths(src_dirs, imp=imp):
# Get file metadata
media = self.get_media_data(src_path, src_dir, loc=loc)
yield src_path, media
def get_metadatas(self, src_dirs, imp=False, loc=None):
"""Get medias data"""
for src_dir, src_path in self.get_paths(src_dirs, imp=imp):
# Get file metadata
metadata = self.get_metadata(src_path, src_dir, loc=loc)
yield src_path, metadata
def update_exif_data(self, metadata, imp=False):
file_path = self.root / metadata['file_path']
exif = WriteExif(
file_path,
metadata,
ignore_tags=self.exif_opt['ignore_tags'],
)
updated = False
if imp and metadata['original_name'] in (None, ''):
exif.set_key_values('original_name', metadata['filename'])
updated = True
if self.exif_opt['album_from_folder']:
exif.set_album_from_folder()
album = metadata['album']
if album and album != '':
exif.set_value('album', album)
updated = True
if (
self.exif_opt['fill_date_original']
and metadata['date_original'] in (None, '')
):
exif.set_key_values('date_original', metadata['date_media'])
updated = True
if updated:
return True
return False

79
ordigi/request.py Normal file
View File

@ -0,0 +1,79 @@
import inquirer
from blessed import Terminal
from colorama import init,Fore,Style,Back
term = Terminal()
# TODO allow exit from inquierer prompt
# TODO fix 'opening_prompt_color': term.yellow,
def load_theme():
"""
Customize inquirer
source:https://github.com/magmax/python-inquirer/blob/master/inquirer/themes.py
"""
custom_theme = {
'Question': {
'brackets_color': term.dodgerblue4,
'default_color': term.yellow,
},
'Checkbox': {
'selection_icon': '',
'selected_icon': '',
'unselected_icon': '',
'selection_color': term.bold_on_dodgerblue4,
'selected_color': term.dodgerblue2,
'unselected_color': term.yellow,
},
'List': {
'selection_color': term.bold_on_dodgerblue4,
'selection_cursor': '',
'unselected_color': term.yellow,
},
}
return inquirer.themes.load_theme_from_dict(custom_theme)
class Input():
def __init__(self):
init()
def text(self, message):
return input(f'{Fore.BLUE}[{Fore.YELLOW}?{Fore.BLUE}]{Fore.WHITE} {message}: ')
# def edit_prompt(self, key: str, value: str) -> str:
# print(f"Date conflict for file: {self.file_path}")
# choices_list = [
# inquirer.List(
# 'edit',
# message=f"Edit '{key}' metadata",
# choices = [
# (f"{key}: '{value}'", value),
# ("custom", None),
# ],
# default=value,
# ),
# ]
# answers = inquirer.prompt(choices_list, theme=self.theme)
# if not answers['edit']:
# prompt = [
# inquirer.Text('edit', message="value"),
# ]
# answers = inquirer.prompt(prompt, theme=self.theme)
# return self.get_date_format(answers['edit'])
# else:
# return answers['date_list']
# choices = [
# (f"date original:'{date_original}'", date_original),
# (f"date filename:'{date_filename}'", date_filename),
# ("custom", None),
# ]
# default = f'{date_original}'
# return self._get_date_media_interactive(choices, default)

99
ordigi/summary.py Normal file
View File

@ -0,0 +1,99 @@
# import pandas as pd
from tabulate import tabulate
class Tables:
"""Create table and display result in Pandas DataFrame"""
def __init__(self, actions):
self.actions = actions
self.table = []
self.columns = ['action', 'file_path', 'dest_path']
# self.df = self.dataframe()
def append(self, action, file_path=None, dest_path=None):
row = (action, file_path, dest_path)
self.table.append(row)
def sum(self, action=None):
if not action:
return len(self.table)
count = 0
for row in self.table:
if row[0] == action:
count += 1
return count
# def dataframe(self):
# return pd.DataFrame(self.table, columns=self.columns)
def tabulate(self):
errors_headers = self.columns
return tabulate(self.table, headers=errors_headers)
class Summary:
"""Result summary of ordigi program call"""
def __init__(self, root):
self.actions = (
'check',
'import',
'remove',
'sort',
'update',
)
# Set labels
self.state = ['success', 'errors']
self.root = root
self.success_table = Tables(self.actions)
self.errors_table = Tables(self.actions)
self.errors = 0
def append(self, action, success, file_path=None, dest_path=None):
if action:
if success:
self.success_table.append(action, file_path, dest_path)
else:
self.errors_table.append(action, file_path, dest_path)
if not success:
self.errors += 1
def print(self):
"""Print summary"""
print()
for action in self.actions:
nb = self.success_table.sum(action)
if nb != 0:
if action == 'check':
print(f"SUMMARY: {nb} files checked in {self.root}.")
elif action == 'import':
print(f"SUMMARY: {nb} files imported into {self.root}.")
elif action == 'sort':
print(f"SUMMARY: {nb} files sorted inside {self.root}.")
elif action == 'remove_excluded':
print(f"SUMMARY: {nb} files deleted in {self.root}.")
elif action == 'remove_empty_folders':
print(f"SUMMARY: {nb} empty folders removed in {self.root}.")
elif action == 'update':
print(f"SUMMARY: {nb} files updated in {self.root} database.")
success = self.success_table.sum()
if not success and not self.errors:
print(f"SUMMARY: no action done in {self.root}.")
errors = self.errors_table.sum()
if errors:
print()
print(f"ERROR: {errors} errors reported for files:")
print(self.success_table.tabulate())
elif self.errors:
print(f"ERROR: {errors} errors reported.")

189
ordigi/utils.py Normal file
View File

@ -0,0 +1,189 @@
from math import radians, cos, sqrt
from datetime import datetime
import hashlib
import os
import platform
import re
import subprocess
def checksum(file_path, blocksize=65536):
"""Create a hash value for the given file.
See http://stackoverflow.com/a/3431835/1318758.
:param str file_path: Path to the file to create a hash for.
:param int blocksize: Read blocks of this size from the file when
creating the hash.
:returns: str or None
"""
hasher = hashlib.sha256()
with open(file_path, 'rb') as file:
buf = file.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = file.read(blocksize)
return hasher.hexdigest()
return None
def distance_between_two_points(lat1, lon1, lat2, lon2):
"""Return distance between two points"""
# From http://stackoverflow.com/questions/15736995/how-can-i-quickly-estimate-the-distance-between-two-latitude-longitude-points # noqa
# convert decimal degrees to radians
lat1, lon1, lat2, lon2 = list(map(radians, [lat1, lon1, lat2, lon2]))
rad = 6371000 # radius of the earth in m
x = (lon2 - lon1) * cos(0.5 * (lat2 + lat1))
y = lat2 - lat1
return rad * sqrt(x * x + y * y)
def empty_dir(dir_path):
return not next(os.scandir(dir_path), None)
def filename_filter(filename):
"""
Take a string and return a valid filename constructed from the string.
"""
blacklist = '/\\:*"<>|'
if filename is None:
return filename
# Remove blacklisted chars.
for char in blacklist:
filename = filename.replace(char, '')
return filename
def get_date_regex(user_regex=None):
"""Return date regex generator"""
if user_regex:
regex = {'a': re.compile(user_regex)}
else:
regex = {
# regex to match date format type %Y%m%d, %y%m%d, %d%m%Y,
# etc...
'a': re.compile(
r'[-_./ ](?P<year>\d{4})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})[-_.]?(?P<hour>\d{2})[-_.]?(?P<minute>\d{2})[-_.]?(?P<second>\d{2})([-_./ ])'
),
'b': re.compile(
r'[-_./ ](?P<year>\d{4})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})([-_./ ])'
),
# not very accurate
'c': re.compile(
r'[-_./ ](?P<year>\d{2})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})([-_./ ])'
),
'd': re.compile(
r'[-_./ ](?P<day>\d{2})[-_.](?P<month>\d{2})[-_.](?P<year>\d{4})([-_./ ])'
),
}
return regex
DATE_REGEX = get_date_regex()
def get_date_from_string(string):
"""Retrieve date stamp from string"""
# If missing datetime from EXIF data check if filename is in datetime format.
# For this use a user provided regex if possible.
# Otherwise assume a filename such as IMG_20160915_123456.jpg as default.
matches = []
sep = ''
for i, regex in DATE_REGEX.items():
match = re.findall(regex, string)
if match != []:
sep = match[0][3]
if i == 'c':
match = [('20' + match[0][0], match[0][1], match[0][2])]
elif i == 'd':
# reorder items
match = [(match[0][2], match[0][1], match[0][0])]
else:
match = [(match[0][0], match[0][1], match[0][2])]
if len(match) != 1:
# The time string is not uniq
continue
matches.append((match[0], regex))
# We want only the first match for the moment
break
# check if there is only one result
if len(set(matches)) == 1:
try:
# Convert str to int
date_object = tuple(map(int, matches[0][0]))
date = datetime(*date_object)
except (KeyError, ValueError):
return None, matches[0][1], sep
return date, matches[0][1], sep
return None, None, sep
def match_date_regex(regex, value):
if re.match(regex, value) is not None:
return re.sub(regex, r'\g<1>-\g<2>-\g<3>-', value)
return value
def split_part(dedup_regex, path_part, items=None):
"""
Split part from regex
:returns: parts
"""
if not items:
items = []
regex = dedup_regex.pop()
parts = re.split(regex, path_part)
# Loop thought part, search matched regex part and proceed with
# next regex for others parts
for n, part in enumerate(parts):
if re.match(regex, part):
if part[0] in '-_ .':
if n > 0:
# move the separator to previous item
parts[n - 1] = parts[n - 1] + part[0]
items.append(part[1:])
else:
items.append(part)
elif dedup_regex:
# Others parts
items = split_part(dedup_regex, part, items)
else:
items.append(part)
return items
# Conversion functions
# source:https://rodic.fr/blog/camelcase-and-snake_case-strings-conversion-with-python/
def snake2camel(name):
return re.sub(r'(?:^|_)([a-z])', lambda x: x.group(1).upper(), name)
def camel2snake(name):
return name[0].lower() + re.sub(
r'(?!^)[A-Z]', lambda x: '_' + x.group(0).lower(), name[1:]
)
def open_file(path):
if platform.system() == "Windows":
os.startfile(path)
elif platform.system() == "Darwin":
subprocess.Popen(["open", path])
else:
subprocess.Popen(["xdg-open", path])

View File

@ -1,23 +0,0 @@
{
"name": "elodie",
"version": "1.0.0",
"description": "GUI for Elodie",
"main": "app/index.js",
"dependencies": {
"menubar": "^2.3.0"
},
"devDependencies": {},
"scripts": {
"test": "electron app.js"
},
"repository": {
"type": "git",
"url": "https://github.com/jmathai/elodie"
},
"author": "Jaisen Mathai",
"license": "ISC",
"bugs": {
"url": "https://github.com/jmathai/elodie/issues"
},
"homepage": "https://github.com/jmathai/elodie"
}

6
pyproject.toml Normal file
View File

@ -0,0 +1,6 @@
[build-system]
requires = [
"setuptools>=42",
"wheel"
]
build-backend = "setuptools.build_meta"

7
pytest.ini Normal file
View File

@ -0,0 +1,7 @@
[pytest]
# addopts = --ignore=old_tests -s
# collect_ignore = ["old_test"]
[pycodestyle]
# ignore = old_test/* ALL

View File

@ -1,8 +1,9 @@
click==6.6
imagehash==4.2.1
requests==2.20.0
Send2Trash==1.3.0
configparser==3.5.0
tabulate==0.7.7
Pillow==8.0
six==1.9
click
python-dateutil
geopy
imagehash
inquirer
configparser
tabulate
Pillow
#xpyheif_pillow_opener

View File

@ -1,41 +0,0 @@
#!/usr/bin/env python
import nose
import os
import shutil
import sys
import tempfile
if __name__ == "__main__":
# test_directory is what we pass nose.run for where to find tests
test_directory = os.path.abspath('tests')
# create a temporary directory to use for the application directory while running tests
temporary_application_directory = tempfile.mkdtemp('-elodie-tests')
os.environ['ELODIE_APPLICATION_DIRECTORY'] = temporary_application_directory
# copy config.ini-sample over to the test application directory
temporary_config_file_sample = '{}/config.ini-sample'.format(os.path.dirname(test_directory))
temporary_config_file = '{}/config.ini'.format(temporary_application_directory)
shutil.copy2(
temporary_config_file_sample,
temporary_config_file,
)
# read the sample config file and store contents to be replaced
with open(temporary_config_file_sample, 'r') as f:
config_contents = f.read()
# set the mapquest key in the temporary config file and write it to the temporary application directory
config_contents = config_contents.replace('your-api-key-goes-here', 'x8wQLqGhW7qK3sFpjYtVTogVtoMK0S8s')
with open(temporary_config_file, 'w+') as f:
f.write(config_contents)
test_argv = sys.argv
test_argv.append('--verbosity=2')
test_argv.append('-s')
result = nose.run(argv=test_argv)
if(result):
sys.exit(0)
else:
sys.exit(1)

BIN
samples/images/DSC03584.dng Executable file

Binary file not shown.

BIN
samples/test_exif/.DS_Store vendored Normal file

Binary file not shown.

View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

Before

Width:  |  Height:  |  Size: 2.9 KiB

After

Width:  |  Height:  |  Size: 2.9 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 222 B

After

Width:  |  Height:  |  Size: 222 B

View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 9.2 KiB

After

Width:  |  Height:  |  Size: 9.2 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

BIN
samples/test_exif/photo.arw Normal file

Binary file not shown.

BIN
samples/test_exif/photo.dng Normal file

Binary file not shown.

BIN
samples/test_exif/photo.nef Normal file

Binary file not shown.

BIN
samples/test_exif/photo.rw2 Normal file

Binary file not shown.

39
setup.cfg Normal file
View File

@ -0,0 +1,39 @@
[metadata]
# For upload use:
# name = example-pkg-YOUR-USERNAME-HERE
name = ordigi-pkg-local
version = 0.1.0
author = Cedric Leporcq
author_email = cedl38@gmail.com
description = Media organizer tools
long_description = file: README.md
long_description_content_type = text/markdown
# url =
# project_urls =
# Bug Tracker =
classifiers =
Development Status :: 3 - Alpha
Environment :: Console
Programming Language :: Python :: 3
License :: OSI Approved :: GPL version 3
Operating System :: OS Independent
[options]
# package_dir = ordigi
packages = find:
python_requires = >=3.6
[options.packages.find]
exclude =
tests
docs
[options.entry_points]
console_scripts =
ordigi = ordigi.cli:cli
[flake8]
[pycodestyle]
max-line-length = 88

3
setup.py Normal file
View File

@ -0,0 +1,3 @@
from setuptools import setup
setup()

View File

@ -1,15 +1,18 @@
""" pytest test configuration """
from configparser import RawConfigParser
import pytest
from pathlib import Path
import os
from pathlib import Path, PurePath
import random
import shutil
import tempfile
from dozo import config
from dozo.exiftool import _ExifToolProc
import pytest
from ordigi.exiftool import _ExifToolProc
ORDIGI_PATH = Path(__file__).parent.parent
DOZO_PATH = Path(__file__).parent.parent
@pytest.fixture(autouse=True)
def reset_singletons():
@ -17,33 +20,57 @@ def reset_singletons():
_ExifToolProc.instance = None
def copy_sample_files():
src_path = tempfile.mkdtemp(prefix='dozo-src')
paths = Path(DOZO_PATH, 'samples/test_exif').glob('*')
@pytest.fixture(scope="module")
def sample_files_paths(tmpdir_factory):
tmp_path = Path(tmpdir_factory.mktemp("ordigi-src-"))
path = Path(ORDIGI_PATH, 'samples/test_exif')
shutil.copytree(path, tmp_path / path.name)
paths = Path(tmp_path).glob('**/*')
file_paths = [x for x in paths if x.is_file()]
for file_path in file_paths:
source_path = Path(src_path, file_path.name)
shutil.copyfile(file_path, source_path)
return src_path, file_paths
return tmp_path, file_paths
def randomize_files(dest_dir):
# Get files randomly
for path, subdirs, files in os.walk(dest_dir):
if '.ordigi' in path:
continue
for name in files:
file_path = PurePath(path, name)
if bool(random.getrandbits(1)):
with open(file_path, 'wb') as fout:
fout.write(os.urandom(random.randrange(128, 2048)))
if bool(random.getrandbits(1)):
dest_path = PurePath(path, file_path.stem + '_1'+ file_path.suffix)
shutil.copyfile(file_path, dest_path)
def randomize_db(dest_dir):
# alterate database
file_path = Path(str(dest_dir), '.ordigi', 'collection.db')
with open(file_path, 'wb') as fout:
fout.write(os.urandom(random.randrange(128, 2048)))
@pytest.fixture(scope="module")
def conf_path():
tmp_path = tempfile.mkdtemp(prefix='dozo-')
conf_dir = tempfile.mkdtemp(prefix='ordigi-')
conf = RawConfigParser()
conf['Path'] = {
'day_begins': '4',
'dirs_path':'%u{%Y-%m}/{city}|{city}-{%Y}/{folders[:1]}/{folder}',
'name':'{%Y-%m-%b-%H-%M-%S}-{basename}.%l{ext}'
'dirs_path':'%u<%Y-%m>/<city>|<city>-<%Y>/<folders[:1]>/<folder>',
'name':'<%Y-%m-%b-%H-%M-%S>-<basename>.%l<ext>'
}
conf['Geolocation'] = {
'geocoder': 'Nominatium'
}
conf_path = Path(tmp_path, "dozo.conf")
config.write(conf_path, conf)
conf_path = Path(conf_dir, "ordigi.conf")
with open(conf_path, 'w') as conf_file:
conf.write(conf_file)
yield conf_path
shutil.rmtree(tmp_path)
shutil.rmtree(conf_dir)

251
tests/test_cli.py Normal file
View File

@ -0,0 +1,251 @@
import shutil
from click.testing import CliRunner
from pathlib import Path
import pytest
import inquirer
from ordigi import cli
from ordigi.request import Input
CONTENT = "content"
ORDIGI_PATH = Path(__file__).parent.parent
def get_arg_options_list(arg_options):
arg_options_list = []
for opt, arg in arg_options:
arg_options_list.append(opt)
arg_options_list.append(arg)
return arg_options_list
class TestOrdigi:
@pytest.fixture(autouse=True)
def setup_class(cls, sample_files_paths):
cls.runner = CliRunner()
cls.src_path, cls.file_paths = sample_files_paths
cls.logger_options = ('--debug',)
cls.filter_options = (
('--ignore-tags', 'CreateDate'),
('--ext', 'jpg'),
('--glob', '*'),
)
cls.sort_options = (
'--album-from-folder',
'--fill-date-original',
'--path-format',
'--remove-duplicates',
'--use-date-filename',
'--use-file-dates',
)
def assert_cli(self, command, attributes, state=0):
result = self.runner.invoke(command, [*attributes])
assert result.exit_code == state, (command, attributes)
def assert_options(self, command, bool_options, arg_options, paths):
for bool_option in bool_options:
self.assert_cli(command, [bool_option, *paths])
for opt, arg in arg_options:
self.assert_cli(command, [opt, arg, *paths])
def assert_all_options(self, command, bool_options, arg_options, paths):
arg_options_list = get_arg_options_list(arg_options)
self.assert_cli(command, [
*bool_options, *arg_options_list, *paths,
])
def test_commands(self):
# Check if fail if path not exist
commands = [
cli._check,
cli._clean,
cli._compare,
cli._edit,
cli._import,
cli._init,
cli._sort,
cli._update,
]
for command in commands:
if command.name == 'edit':
self.assert_cli(command, ['-k', 'date_original', 'not_exist'], state=1)
else:
self.assert_cli(command, ['not_exist'], state=1)
self.assert_cli(cli._clone, ['not_exist'], state=2)
def test_edit(self, monkeypatch):
bool_options = (
*self.logger_options,
)
arg_options = (
*self.filter_options,
)
def mockreturn(self, message):
return '03-12-2021 08:12:35'
monkeypatch.setattr(Input, 'text', mockreturn)
args = (
'--key',
'date_original',
'--overwrite',
str(self.src_path.joinpath('test_exif/photo.png')),
str(self.src_path),
)
self.assert_cli(cli._edit, args)
# self.assert_options(cli._edit, bool_options, arg_options, args)
# self.assert_all_options(cli._edit, bool_options, arg_options, args)
def test_sort(self):
bool_options = (
*self.logger_options,
# '--interactive',
'--dry-run',
'--album-from-folder',
'--remove-duplicates',
'--use-date-filename',
'--use-file-dates',
'--clean',
)
arg_options = (
*self.filter_options,
('--path-format', '{%Y}/{folder}/{name}.{ext}'),
)
paths = (str(self.src_path),)
self.assert_cli(cli._sort, paths)
self.assert_options(cli._sort, bool_options, arg_options, paths)
self.assert_all_options(cli._sort, bool_options, arg_options, paths)
def test_clone(self, tmp_path):
paths = (str(self.src_path), str(tmp_path))
self.assert_cli(cli._init, [str(self.src_path)])
self.assert_cli(cli._clone, ['--dry-run', *self.logger_options, *paths])
self.assert_cli(cli._clone, paths)
def assert_init(self):
self.assert_cli(cli._init, [*self.logger_options, str(self.src_path)])
def assert_update(self):
file_path = Path(ORDIGI_PATH, 'samples/test_exif/photo.cr2')
dest_path = self.src_path / 'photo_moved.cr2'
shutil.copyfile(file_path, dest_path)
self.assert_cli(cli._update, [*self.logger_options, str(self.src_path)])
self.assert_cli(cli._update, ['--checksum', str(self.src_path)])
def assert_check(self):
self.assert_cli(cli._check, [*self.logger_options, str(self.src_path)])
def assert_clean(self):
bool_options = (
*self.logger_options,
# '--interactive',
'--dry-run',
'--delete-excluded',
'--folders',
'--path-string',
'--remove-duplicates',
)
arg_options = (
*self.filter_options,
('--dedup-regex', r'\d{4}-\d{2}'),
)
paths = ('test_exif', str(self.src_path))
self.assert_cli(cli._clean, paths)
paths = (str(self.src_path),)
self.assert_cli(cli._clean, paths)
self.assert_options(cli._clean, bool_options, arg_options, paths)
self.assert_all_options(cli._clean, bool_options, arg_options, paths)
def test_init_update_check_clean(self):
self.assert_init()
self.assert_update()
self.assert_clean()
def test_import(self, tmp_path):
bool_options = (
*self.logger_options,
# '--interactive',
'--dry-run',
'--album-from-folder',
'--remove-duplicates',
'--use-date-filename',
'--use-file-dates',
'--copy',
)
arg_options = (
('--exclude', '.DS_Store'),
*self.filter_options,
('--path-format', '{%Y}/{folder}/{stem}.{ext}'),
)
paths = (str(self.src_path), str(tmp_path))
result = self.runner.invoke(cli._import, ['--copy', *paths])
assert result.exit_code == 0
self.assert_options(cli._import, bool_options, arg_options, paths)
self.assert_all_options(cli._import, bool_options, arg_options, paths)
def test_compare(self):
bool_options = (
*self.logger_options,
# '--interactive',
'--dry-run',
'--find-duplicates',
'--remove-duplicates',
)
arg_options = (
*self.filter_options,
# ('--similar-to', ''),
('--similarity', '65'),
)
paths = (str(self.src_path),)
# Workaround
self.assert_cli(cli._update, paths)
self.assert_cli(cli._compare, paths)
self.assert_options(cli._compare, bool_options, arg_options, paths)
def test_check(self):
self.assert_check()
def test_needsfiles(tmpdir):
assert tmpdir
def test_create_file(tmp_path):
directory = tmp_path / "sub"
directory.mkdir()
path = directory / "hello.txt"
path.write_text(CONTENT)
assert path.read_text() == CONTENT
assert len(list(tmp_path.iterdir())) == 1

298
tests/test_collection.py Normal file
View File

@ -0,0 +1,298 @@
from datetime import datetime
import shutil
import sqlite3
from pathlib import Path
import re
import pytest
import inquirer
from ordigi import LOG
from ordigi import constants
from ordigi import utils
from ordigi.summary import Summary
from ordigi.collection import Collection, FPath, Paths
from ordigi.exiftool import ExifTool, ExifToolCaching, exiftool_is_running, terminate_exiftool
from ordigi.geolocation import GeoLocation
from ordigi.media import Media, ReadExif
from ordigi.request import Input
from .conftest import randomize_files, randomize_db
LOG.setLevel(10)
class TestFPath:
@pytest.fixture(autouse=True)
def setup_class(cls, sample_files_paths):
cls.src_path, cls.file_paths = sample_files_paths
cls.path_format = constants.DEFAULT_PATH + '/' + constants.DEFAULT_NAME
def test_get_part(self, tmp_path):
"""
Test all parts
"""
fpath = FPath(self.path_format, 4)
# Item to search for:
items = fpath.get_items()
masks = [
'<album>',
'<basename>',
'<camera_make>',
'<camera_model>',
'<city>',
'<"custom">',
'<country>',
'<ext>',
'<folder>',
'<folders[1:3]>',
'<location>',
'<name>',
'<original_name>',
'<state>',
'<title>',
'<%Y-%m-%d>',
'<%Y-%m-%d_%H-%M-%S>',
'<%Y-%m-%b>'
]
for file_path in self.file_paths:
media = Media(file_path, self.src_path, use_date_filename=True,
use_file_dates=True)
subdirs = file_path.relative_to(self.src_path).parent
exif_tags = {}
for key in ('album', 'camera_make', 'camera_model', 'latitude',
'longitude', 'original_name', 'title'):
exif_tags[key] = media.tags_keys[key]
exif_data = ExifToolCaching(str(file_path)).asdict()
loc = GeoLocation()
media.get_metadata(self.src_path, loc)
for item, regex in items.items():
for mask in masks:
matched = re.search(regex, mask)
if matched:
part = fpath.get_part(item, mask[1:-1], media.metadata)
# check if part is correct
assert isinstance(part, str), file_path
if item == 'basename':
assert part == file_path.stem, file_path
elif item == 'date':
if part == '':
media.get_date_media()
assert datetime.strptime(part, mask[1:-1])
elif item == 'folder':
assert part == subdirs.name, file_path
elif item == 'folders':
assert part in str(subdirs)
elif item == 'ext':
assert part == file_path.suffix[1:], file_path
elif item == 'name':
expected_part = file_path.stem
for rx in utils.get_date_regex().values():
part = re.sub(rx, '', expected_part)
assert part == expected_part, file_path
elif item == 'custom':
assert part == mask[2:-2], file_path
elif item in ('city', 'country', 'location', 'state'):
pass
elif item in exif_tags.keys():
f = False
for key in exif_tags[item]:
if key in exif_data:
f = True
assert part == exif_data[key], file_path
break
if f == False:
assert part == '', file_path
else:
assert part == '', file_path
def test_get_early_morning_photos_date(self):
date = datetime(2021, 10, 16, 2, 20, 40)
fpath = FPath(self.path_format, 4)
part = fpath.get_early_morning_photos_date(date, '%Y-%m-%d')
assert part == '2021-10-15'
part = fpath.get_early_morning_photos_date(date, '%Y%m%d-%H%M%S')
assert part == '20211016-022040'
class TestCollection:
@pytest.fixture(autouse=True)
def setup_class(cls, sample_files_paths):
cls.src_path, cls.file_paths = sample_files_paths
cls.path_format = constants.DEFAULT_PATH + '/' + constants.DEFAULT_NAME
def teardown_class(self):
terminate_exiftool()
assert not exiftool_is_running()
def assert_import(self, summary, nb):
# Summary is created and there is no errors
assert summary.errors == 0
assert summary.success_table.sum('import') == nb
def assert_sort(self, summary, nb):
# Summary is created and there is no errors
assert summary.errors == 0
assert summary.success_table.sum('sort') == nb
def test_sort_files(self, tmp_path):
cli_options = {
'album_from_folder': True, 'cache': False, 'path_format': self.path_format
}
collection = Collection(tmp_path, cli_options=cli_options)
loc = GeoLocation()
summary = collection.sort_files([self.src_path], loc, imp='copy')
self.assert_import(summary, 29)
summary = collection.check_files()
assert summary.success_table.sum('import') == 29
assert summary.success_table.sum('update') == 0
assert not summary.errors
# check if album value are set
filters = {
'exclude': None,
'extensions': None,
'glob': '**/*',
'max_deep': None,
}
paths = Paths(filters).get_files(tmp_path)
for file_path in paths:
if '.db' not in str(file_path):
for value in ReadExif(file_path).get_key_values('album'):
assert value != '' or None
collection = Collection(tmp_path, cli_options=cli_options)
# Try to change path format and sort files again
path_format = 'test_exif/<city>/<%Y>-<name>.%l<ext>'
summary = collection.sort_files([tmp_path], loc)
self.assert_sort(summary, 23)
shutil.copytree(tmp_path / 'test_exif', tmp_path / 'test_exif_copy')
collection.summary = Summary(tmp_path)
assert collection.summary.success_table.sum() == 0
summary = collection.update(loc)
assert summary.success_table.sum('update') == 2
assert summary.success_table.sum() == 2
assert not summary.errors
collection.summary = Summary(tmp_path)
summary = collection.update(loc)
assert summary.success_table.sum() == 0
assert not summary.errors
# test with populated dest dir
randomize_files(tmp_path)
summary = collection.check_files()
assert summary.errors
# test summary update
collection.summary = Summary(tmp_path)
summary = collection.update(loc)
assert summary.success_table.sum('sort') == 0
assert summary.success_table.sum('update')
assert not summary.errors
def test_sort_files_invalid_db(self, tmp_path):
collection = Collection(tmp_path, {'path_format': self.path_format})
loc = GeoLocation()
randomize_db(tmp_path)
with pytest.raises(sqlite3.DatabaseError) as e:
summary = collection.sort_files([self.src_path], loc, imp='copy')
def test_sort_file(self, tmp_path):
for imp in ('copy', 'move', False):
collection = Collection(tmp_path)
# copy mode
src_path = Path(self.src_path, 'test_exif', 'photo.png')
media = Media(src_path, self.src_path)
media.get_metadata(tmp_path)
name = 'photo_' + str(imp) + '.png'
media.metadata['file_path'] = name
dest_path = Path(tmp_path, name)
src_checksum = utils.checksum(src_path)
summary = collection.sort_file(
src_path, dest_path, media.metadata, imp=imp
)
assert not summary.errors
# Ensure files remain the same
if not imp:
assert collection._checkcomp(dest_path, src_checksum)
if imp == 'copy':
assert src_path.exists()
else:
assert not src_path.exists()
shutil.copyfile(dest_path, src_path)
def test_get_files(self):
filters = {
'exclude': {'**/*.dng',},
'extensions': None,
'glob': '**/*',
'max_deep': 1,
}
paths = Paths(filters)
paths = list(paths.get_files(self.src_path))
assert len(paths) == 9
assert Path(self.src_path, 'test_exif/photo.dng') not in paths
for path in paths:
assert isinstance(path, Path)
def test_sort_similar_images(self, tmp_path):
path = tmp_path / 'collection'
shutil.copytree(self.src_path, path)
collection = Collection(path)
loc = GeoLocation()
summary = collection.init(loc)
summary = collection.sort_similar_images(path, similarity=60)
# Summary is created and there is no errors
assert not summary.errors
def test_edit_date_metadata(self, tmp_path, monkeypatch):
path = tmp_path / 'collection'
shutil.copytree(self.src_path, path)
collection = Collection(path, {'cache': False})
def mockreturn(self, message):
return '03-12-2021 08:12:35'
monkeypatch.setattr(Input, 'text', mockreturn)
collection.edit_metadata({path}, {'date_original'}, overwrite=True)
# check if db value is set
file_path = 'test_exif/photo.rw2'
date = collection.db.sqlite.get_metadata(file_path, 'DateOriginal')
assert date == '2021-03-12 08:12:35'
# Check if exif value is set
path_file = path.joinpath(file_path)
date = ExifTool(path_file).asdict()['EXIF:DateTimeOriginal']
assert date == '2021-03-12 08:12:35'
def test_edit_location_metadata(self, tmp_path, monkeypatch):
path = tmp_path / 'collection'
shutil.copytree(self.src_path, path)
collection = Collection(path, {'cache': False})
loc = GeoLocation()
def mockreturn(self, message):
return 'lyon'
monkeypatch.setattr(Input, 'text', mockreturn)
collection.edit_metadata({path}, {'location'}, loc, True)
# check if db value is set
file_path = 'test_exif/photo.rw2'
location_id = collection.db.sqlite.get_metadata(file_path, 'LocationId')
location = collection.db.sqlite.get_location_data(location_id, 'Location')
assert location_id, location == 'Lyon'
# Check if exif value is set
path_file = path.joinpath(file_path)
latitude = ExifTool(path_file).asdict()['EXIF:GPSLatitude']
longitude = ExifTool(path_file).asdict()['EXIF:GPSLongitude']
assert latitude == 45.7578136999889
assert longitude == 4.83201140001667

View File

@ -4,7 +4,7 @@ import shutil
import tempfile
from unittest import mock
from dozo import config
from ordigi.config import Config
# Helpers
import random
@ -21,7 +21,8 @@ class TestConfig:
@pytest.fixture(scope="module")
def conf(self, conf_path):
return config.load_config(conf_path)
config = Config(conf_path)
return config.load_config()
def test_write(self, conf_path):
assert conf_path.is_file()
@ -31,27 +32,34 @@ class TestConfig:
Read files from config and return variables
"""
# test valid config file
assert conf['Path']['dirs_path'] == '%u{%Y-%m}/{city}|{city}-{%Y}/{folders[:1]}/{folder}'
assert conf['Path']['name'] == '{%Y-%m-%b-%H-%M-%S}-{basename}.%l{ext}'
assert conf['Path']['dirs_path'] == '%u<%Y-%m>/<city>|<city>-<%Y>/<folders[:1]>/<folder>'
assert conf['Path']['name'] == '<%Y-%m-%b-%H-%M-%S>-<basename>.%l<ext>'
assert conf['Path']['day_begins'] == '4'
assert conf['Geolocation']['geocoder'] == 'Nominatium'
def test_load_config_no_exist(self):
# test file not exist
conf = config.load_config('filename')
assert conf == {}
config = Config()
config.conf_path = Path('filename')
assert config.load_config() == {}
def test_load_config_invalid(self, conf_path):
# test invalid config
write_random_file(conf_path)
with pytest.raises(Exception) as e:
config.load_config(conf_path)
config = Config(conf_path)
assert e.typename == 'MissingSectionHeaderError'
def test_get_path_definition(self, conf):
"""
Get path definition from config
"""
path = config.get_path_definition(conf)
assert path == '%u{%Y-%m}/{city}|{city}-{%Y}/{folders[:1]}/{folder}/{%Y-%m-%b-%H-%M-%S}-{basename}.%l{ext}'
# def test_get_path_definition(self, conf):
# """
# Get path definition from config
# """
# config = Config(conf=conf)
# path = config.get_path_definition()
# assert path == '%u<%Y-%m>/<city>|<city>-<%Y>/<folders[:1]>/<folder>/<%Y-%m-%b-%H-%M-%S>-<basename>.%l<ext>'
def test_get_config_options(self, conf):
config = Config(conf=conf)
options = config.get_config_options()
assert isinstance(options, dict)
# assert isinstance(options['Path'], dict)

130
tests/test_database.py Normal file
View File

@ -0,0 +1,130 @@
from datetime import datetime
from pathlib import Path
import pytest
import shutil
import sqlite3
from ordigi.database import Sqlite
class TestSqlite:
@pytest.fixture(autouse=True)
def setup_class(cls, tmp_path):
cls.test='abs'
cls.sqlite = Sqlite(tmp_path)
row_data = {
'FilePath': 'file_path',
'Checksum': 'checksum',
'Album': 'album',
'Title': 'title',
'LocationId': 2,
'DateMedia': datetime(2012, 3, 27),
'DateOriginal': datetime(2013, 3, 27),
'DateCreated': 'date_created',
'DateModified': 'date_modified',
'FileModifyDate': 'file_modify_date',
'CameraMake': 'camera_make',
'CameraModel': 'camera_model',
'OriginalName':'original_name',
'SrcDir': 'src_dir',
'Subdirs': 'subdirs',
'Filename': 'filename'
}
location_data = {
'Latitude': 24.2,
'Longitude': 7.3,
'LatitudeRef': 'latitude_ref',
'LongitudeRef': 'longitude_ref',
'City': 'city',
'State': 'state',
'Country': 'country',
'Location': 'location'
}
cls.sqlite.upsert_metadata(row_data)
cls.sqlite.upsert_location(location_data)
# cls.sqlite.add_metadata_data('filename', 'ksinslsdosic', 'original_name', 'date_original', 'album', 1)
# cls.sqlite.add_location(24.2, 7.3, 'city', 'state', 'country', 'location')
yield
shutil.rmtree(tmp_path)
def test_init(self):
assert isinstance(self.sqlite.filename, Path)
assert isinstance(self.sqlite.con, sqlite3.Connection)
assert isinstance(self.sqlite.cur, sqlite3.Cursor)
def test_create_table(self):
assert self.sqlite.is_table('metadata')
assert self.sqlite.is_table('location')
def test_add_metadata_data(self):
result = tuple(self.sqlite.cur.execute("""select * from metadata where
rowid=1""").fetchone())
assert result == (
1,
'file_path',
'checksum',
'album',
'title',
2,
'2012-03-27 00:00:00',
'2013-03-27 00:00:00',
'date_created',
'date_modified',
'file_modify_date',
'camera_make',
'camera_model',
'original_name',
'src_dir',
'subdirs',
'filename'
)
def test_get_checksum(self):
assert not self.sqlite.get_checksum('invalid')
assert self.sqlite.get_checksum('file_path') == 'checksum'
def test_get_metadata(self):
assert not self.sqlite.get_metadata('invalid', 'DateOriginal')
assert self.sqlite.get_metadata('file_path', 'Album') == 'album'
def test_add_location(self):
result = tuple(self.sqlite.cur.execute("""select * from location where
rowid=1""").fetchone())
assert result == (
1,
24.2,
7.3,
'latitude_ref',
'longitude_ref',
'city',
'state',
'country',
'location',
)
@pytest.mark.skip('TODO')
def test_get_location_data(self, LocationId, data):
pass
@pytest.mark.skip('TODO')
def test_get_location(self, Latitude, Longitude, column):
pass
def test_get_location_nearby(self):
value = self.sqlite.get_location_nearby(24.2005, 7.3004, 'Location')
assert value == 'location'
@pytest.mark.skip('TODO')
def test_delete_row(self, table, id):
pass
@pytest.mark.skip('TODO')
def test_delete_all_rows(self, table):
pass

View File

@ -1,19 +0,0 @@
import pytest
CONTENT = "content"
class TestDozo:
@pytest.mark.skip()
def test__sort(self):
assert 0
def test_needsfiles(tmpdir):
assert tmpdir
def test_create_file(tmp_path):
d = tmp_path / "sub"
d.mkdir()
p = d / "hello.txt"
p.write_text(CONTENT)
assert p.read_text() == CONTENT
assert len(list(tmp_path.iterdir())) == 1

Some files were not shown because too many files have changed in this diff Show More