Compare commits

..

10 Commits

92 changed files with 11898 additions and 5242 deletions

152
.gitignore vendored
View File

@ -1,141 +1,13 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# Compiled python modules.
*.pyc
# Other
/diagnostics.lua
docs/Ordigi_data_scheme.odg
/ressources
/Session.vim
**/.DS_Store
**/*.pyc
**/config.ini
**/node_modules/**
dist/**
docs/_build
build/**
**/*.arw
**/*.dng
**/*.nef
**/*.rw2
env/**
/tags
/workflow.sh

37
Dockerfile Normal file
View File

@ -0,0 +1,37 @@
FROM debian:jessie
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update -y && \
apt-get install -y --no-install-recommends ca-certificates libimage-exiftool-perl python2.7 python-pip python-pyexiv2 wget make && \
pip install --upgrade pip setuptools && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/*
RUN apt-get update -qq && \
apt-get install -y locales -qq && \
locale-gen en_US.UTF-8 en_us && \
dpkg-reconfigure locales && \
locale-gen C.UTF-8 && \
/usr/sbin/update-locale LANG=C.UTF-8
ENV LANG C.UTF-8
ENV LANGUAGE C.UTF-8
ENV LC_ALL C.UTF-8
RUN wget http://www.sno.phy.queensu.ca/~phil/exiftool/Image-ExifTool-10.20.tar.gz && \
gzip -dc Image-ExifTool-10.20.tar.gz | tar -xf - && \
cd Image-ExifTool-10.20 && perl Makefile.PL && \
make install && cd ../ && rm -r Image-ExifTool-10.20
COPY requirements.txt /opt/elodie/requirements.txt
COPY docs/requirements.txt /opt/elodie/docs/requirements.txt
COPY elodie/tests/requirements.txt /opt/elodie/elodie/tests/requirements.txt
WORKDIR /opt/elodie
RUN pip install -r docs/requirements.txt && \
pip install -r elodie/tests/requirements.txt && \
rm -rf /root/.cache/pip
COPY . /opt/elodie
CMD ["/bin/bash"]

709
LICENSE

File diff suppressed because it is too large Load Diff

167
README.md
View File

@ -1,167 +0,0 @@
# Ordigi
## Description
This tool aims to make media files organized among giving pattern. It is based on
exif metadata and use Sqlite database.
Goals:
- Organize your existing collection of photos or others media types into a customizable folder structure.
- Record metadata and other file data to an Sqlite database
- Ability to edit metadata
## Install
Ordigi relies on the great [ExifTool library by Phil Harvey](http://www.sno.phy.queensu.ca/~phil/exiftool/). Make sure is installed.
Clone this repository and install ordigi:
```
pip install .
```
## Usage Instructions
### Client interface
You can invoke several commands from the client interface.
Use `ordigi --help` and `ordigi [command] --help` for usage
instructions. For each command there are several options that can be invoked.
#### Import photos to collection
The default behavior is to move files from one or several sources directories
to your destination directory. However, if you want to copy use `-c` or
`--copy` flag.
```
ordigi import -c /source1 /source2 /collection
```
#### Sort photos into collection
The `sort` command is essentially the same as import but restricted to the files already into the
collection.
```
ordigi sort /subdir1 /subdir2 /collection
```
#### Compare images into collection
Sort file by similarity:
```
ordigi compare /subdir1 /subdir2 /collection
```
Undo sort files:
```
ordigi compare --revert-compare /subdir1 /subdir2 /collection
```
#### Verify collection against bit rot / data rot
```
ordigi check
```
### Edit metadata and Reorganize by changing location and dates (WIP)
```
ordigi edit --location="Las Vegas, NV" --sort
ordigi edit --time="2015-04-15" --sort
```
### Configuration
#### Config file
The sample configuration file `ordigi.conf` can be copied to `~/.config/ordigi/ordigi.conf` (default location).
Numerous of option like the folder structure, exclusions and other options can
be configured in this file.
#### Folder structure and name
The folder structure and name can be customized via placeholders, a *f-String like* bracket
keywords. Each keyword can be freely combined in any part of the path
pattern.
Default folder structure:
```
dirs_path=<%Y>/<%m-%b>-<city>-<folder>
name=<%Y%m%d-%H%M%S>-<%u<original_name>|%u<basename>>.%l<ext>
```
Example folder structure:
```
├── 2015
│ ├── 06-Jun-California
│ │ ├── 20150629_163414-img_3900.jpg
│ │ └── 20150629_170706-img_3901.jpg
│ └── Paris
│ └── 20150630_024043-img_3903.jpg
├── 2015
│ ├── 07-Jul-Mountain View
│ │ ├── 20150719_171637-img_9426.jpg
│ │ └── 20150724_190633-img_9432.jpg
└── 2015
│ ├── 09-Sep
│ ├── 20150927_014138-_dsc8705.dng
│ └── 20150927_014138-_dsc8705.nef
```
The folder structure use standard unix path separator (`/`). Fallback folder part can be optionally specified using a pipe separator and brackets (`<.*|.*>`).
Valid keywords are:
- Date string like *%Y%m%d* pattern For details of the supported formats see [strftime.org](https://strftime.org/).
- Geolocation info from OpenStreetMap: *country, city, location, state*
- Folder structure of source subdirectories like *folder* or *folders[1:]* pattern,
similar to python list syntax.
- File data : *basename, ext, name, original_name*
- Exif metadata info: *album, camera_make, camera_model, title*.
- custom string using *custom* keyword.
- Special modifiers *%u*/*%l* for upper/lower case respectively.
The default file path structure would look like `2015/07-Jul-Mountain_View/20150712-142231-original_name.jpg`.
## Retrieving data from media
### EXIF and XMP tags
Ordigi use embedded Exif metadata to organize media files and store them in a Sqlite database.
| Data type | Tags | Notes |
|---|---|---|
| Date Original | EXIF:DateTimeOriginal, H264:DateTimeOriginal, EXIF:ModifyDate, file created, file modified | |
| Date Created | EXIF:CreateDate, QuickTime:CreationDate, QuickTime:CreateDate, QuickTime:CreationDate-und-US, QuickTime:MediaCreateDate | |
| Date Modified | 'File:FileModifyDate', 'QuickTime:ModifyDate' | |
| Location | EXIF:GPSLatitude/EXIF:GPSLatitudeRef, EXIF:GPSLongitude/EXIF:GPSLongitudeRef, XMP:GPSLatitude, Composite:GPSLatitude, XMP:GPSLongitude, Composite:GPSLongitude | Composite tags are read-only |
| Title | XMP:Title, XMP:DisplayName | |
| Album | XMP-xmpDM:Album, XMP:Album | XMP:Album is user defined in `configs/ExifTool_config` for backwards compatability |
| Camera Make | EXIF:Make, QuickTime:Make, EXIF:Model, QuickTime:Model | |
For example, the media date can be retrieved, by order of preference, from
*Date Original*, *Date Created*. Optionally *Date Modified* and even filename *date string* can be used, depending of options used (see `ordigi sort --help`).
### Geolocation info
Ordigi use *location* Exif metadata *Nominatim* geocoder to retrive geolocation info from OpenStreetMap
## Credits
The code is based on [Elodie](https://github.com/jmathai/elodie) media organizer and take inspiration from [SortPhotos](https://github.com/andrewning/sortphotos/blob/master/src/sortphotos.py) and [OSXPhotos](https://github.com/RhetTbull/osxphotos) for the Exiftool module.

402
Readme.md Normal file
View File

@ -0,0 +1,402 @@
# Hello, I'm Elodie
~~ *Your Personal EXIF-based Photo, Video and Audio Assistant* ~~
[![Build Status](https://travis-ci.org/jmathai/elodie.svg?branch=master)](https://travis-ci.org/jmathai/elodie) [![Coverage Status](https://coveralls.io/repos/github/jmathai/elodie/badge.svg?branch=master)](https://coveralls.io/github/jmathai/elodie?branch=master) [![Scrutinizer Code Quality](https://scrutinizer-ci.com/g/jmathai/elodie/badges/quality-score.png?b=master)](https://scrutinizer-ci.com/g/jmathai/elodie/?branch=master)
I made an easy to use app from this open source project. [Check out Elodie, the EXIF-based photo organizer app](https://getelodie.com).
<p align="center"><img src ="https://jmathai.s3.amazonaws.com/github/elodie/elodie-folder-anim.gif" /></p>
## Quickstart guide
Getting started takes just a few minutes.
### Install ExifTool
Elodie relies on the great [ExifTool library by Phil Harvey](http://www.sno.phy.queensu.ca/~phil/exiftool/). You'll need to install it for your platform.
Some features for video files will only work with newer versions of ExifTool and have been tested on version 10.20 or higher. Support for HEIC files requires version 11.50 or higher. Check your version by typing `exiftool -ver` and see the [manual installation instructions for ExifTool](http://www.sno.phy.queensu.ca/~phil/exiftool/install.html#Unix) if needed.
```
# OSX (uses homebrew, http://brew.sh/)
brew install exiftool
# Debian / Ubuntu
apt-get install libimage-exiftool-perl
# Fedora / Redhat
dnf install perl-Image-ExifTool
# Windows users can install the binary
# http://www.sno.phy.queensu.ca/~phil/exiftool/install.html
```
### Clone the Elodie repository
You can clone Elodie from GitHub. You'll need `git` installed ([instructions](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)).
```
git clone https://github.com/jmathai/elodie.git
cd elodie
pip install -r requirements.txt
```
### Give Elodie a test drive
Now that you've got the minimum dependencies installed you can start using Elodie. You'll need a photo, video or audio file and a folder you'd like Elodie to organize them into.
```
# Run these commands from the root of the repository you just cloned.
./elodie.py import --debug --destination="/where/i/want/my/photos/to/go" /where/my/photo/is.jpg
```
You'll notice that the photo was organized into an *Unknown Location* folder. That's because you haven't set up your MapQuest API ([instructions](#using-openstreetmap-data-from-mapquest)).
Now you're ready to learn more about Elodie.
<p align="center"><img src ="creative/logo@300x.png" /></p>
## Slowstart guide
[Read a 3 part blog post on why I was created](https://medium.com/vantage/understanding-my-need-for-an-automated-photo-workflow-a2ff95b46f8f#.dmwyjlc57) and how [I can be used with Google Photos](https://medium.com/@jmathai/my-automated-photo-workflow-using-google-photos-and-elodie-afb753b8c724).
I work tirelessly to make sure your photos are always sorted and organized so you can focus on more important things. By photos I mean JPEG, DNG, NEF and common video and audio files.
You don't love me yet but you will.
I only do 3 things.
* Firstly I organize your existing collection of photos into a customizable folder structure.
* Second I help make it easy for all the photos you haven't taken yet to flow into the exact location they belong.
* Third but not least I promise to do all this without a yucky propietary database that some friends of mine use.
*NOTE: make sure you've installed everything I need before running the commands below. [Instructions](#quickstart-guide) at the top of this page.*
## Let's organize your existing photos
My guess is you've got quite a few photos scattered around. The first thing I'll help you do is to get those photos organized. It doesn't matter if you have hundreds, thousands or tens of thousands of photos; the more the merrier.
Fire up your terminal and run this command which *__copies__* your photos into something a bit more structured.
```
./elodie.py import --destination="/where/i/want/my/photos/to/go" /where/my/photos/are
```
I'm pretty fast but depending on how many photos you have you might want to grab a snack. When you run this command I'll `print` out my work as I go along. If you're bored you can open `/where/i/want/my/photos/to/go` in *Finder* and watch as I effortlessly copy your photos there.
You'll notice that your photos are now organized by date and location. Some photos do not have proper dates or location information in them. I do my best and in the worst case scenario I'll use the earlier of the files access or modified time. Ideally your photos have dates and location in the EXIF so my work is more accurate.
Don't fret if your photos don't have much EXIF information. I'll show you how you can fix them up later on but let's walk before we run.
Back to your photos. When I'm done you should see something like this. Notice that I've renamed your files by adding the date and time they were taken. This helps keep them in chronological order when using most viewing applications. You'll thank me later.
```
├── 2015-06-Jun
│ ├── California
│ │ ├── 2015-06-29_16-34-14-img_3900.jpg
│ │ └── 2015-06-29_17-07-06-img_3901.jpg
│ └── Paris
│ └── 2015-06-30_02-40-43-img_3903.jpg
├── 2015-07-Jul
│ ├── Mountain View
│ │ ├── 2015-07-19_17-16-37-img_9426.jpg
│ │ └── 2015-07-24_19-06-33-img_9432.jpg
└── 2015-09-Sep
│ ├── Unknown Location
│ ├── 2015-09-27_01-41-38-_dsc8705.dng
│ └── 2015-09-27_01-41-38-_dsc8705.nef
```
Not too bad, eh? Wait a second, what's *Unknown Location*? If I'm not able to figure out where a photo was taken I'll place it into a folder named *Unknown Location*. This typically happens when photos do not have GPS information in their EXIF. You shouldn't see this for photos taken on a smartphone but it's often the case with digital cameras and SLRs. I can help you add GPS information to those photos and get them organized better. Let me show you how.
### Usage Instructions
You can view these instructions on the command line by typing `./elodie.py import --help`, `./elodie.py update --help` or `./elodie.py generate-db --help`.
#### Import photos
```
Usage: elodie.py import [OPTIONS] [PATHS]...
Import files or directories by reading their EXIF and organizing them
accordingly.
Options:
--destination DIRECTORY Copy imported files into this directory.
[required]
--source DIRECTORY Import files from this directory, if specified.
--file PATH Import this file, if specified.
--album-from-folder Use images' folders as their album names.
--trash After copying files, move the old files to the
trash.
--allow-duplicates Import the file even if it's already been imported.
--debug Override the value in constants.py with True.
--exclude-regex TEXT Regular expression for directories or files to
exclude.
--help Show this message and exit.
```
#### Update photos
```
Usage: elodie.py update [OPTIONS] FILES...
Update a file's EXIF. Automatically modifies the file's location and file
name accordingly.
Options:
--album TEXT Update the image album.
--location TEXT Update the image location. Location should be the name of a
place, like "Las Vegas, NV".
--time TEXT Update the image time. Time should be in YYYY-mm-dd
hh:ii:ss or YYYY-mm-dd format.
--title TEXT Update the image title.
--help Show this message and exit.
```
#### (Re)Generate checksum database
```
Usage: elodie.py generate-db [OPTIONS]
Regenerate the hash.json database which contains all of the sha256
signatures of media files. The hash.json file is located at ~/.elodie/.
Options:
--source DIRECTORY Source of your photo library. [required]
--help Show this message and exit.
```
#### Verify library against bit rot / data rot
```
Usage: elodie.py verify
```
### Excluding folders and files from being imported
If you have specific folders or files which you would like to prevent from being imported you can provide regular expressions which will be used to match and skip files from being imported.
You can specify an exclusion at run time by using the `--exclude-regex` argument of the `import` command. You can pass multiple `--exclude-regex` arguments and all folder/file paths which match will be (silently) skipped.
If there are certain file or folder paths you *never* want to import then you can also add an `[Exclusions]` section to your `config.ini` file. Similar to the command line argument you can provide multiple exclusions. Here is an example.
```
[Exclusions]
synology_folders=@eaDir
thumbnails=.thumbnails
```
### Create your own folder structure
OK, so what if you don't like the folders being named `2015-07-Jul/Mountain View`? No problem!
You can add a custom folder structure by editing your `config.ini` file (which should be placed under `~/.elodie/config.ini`). If you'd like to use a different folder for your configuration file then set an environment variable named `ELODIE_APPLICATION_DIRECTORY` with the fully qualified directory path.
#### Custom folder examples
Sometimes examples are easier to understand than explainations so I'll start there. If you'd like to understand my magic I explain it in more detail below these examples. You customize your folder structure in the `Directory` section of your `config.ini`. For details of the supported formats see [strftime.org](http://strftime.org/)
```
[Directory]
location=%city, %state
year=%Y
full_path=%year/%location
# -> 2015/Sunnyvale, California
location=%city, %state
month=%B
year=%Y
full_path=%year/%month/%location
# -> 2015/December/Sunnyvale, California
location=%city, %state
month=%m
year=%Y
full_path=%year-%month/%location
# -> 2015-12/Sunnyvale, California
date=%Y
location=%city, %state
custom=%date %album
full_path=%location/%custom
# -> Sunnyvale, California/2015 Birthday Party
```
#### Using fallback folders
There are times when the EXIF needed to correctly name a folder doesn't exist on a photo. I came up with fallback folders to help you deal with situations such as this. Here's how it works.
You can specify a series of folder names by separating them with a `|`. That's a pipe, not an L. Let's look at an example.
```
[Directory]
month=%m
year=%Y
location=%city
full_path=%month/%year/%album|%location|%"Beats me"
```
What this asks me to do is to name the last folder the same as the album I find in EXIF. If I don't find an album in EXIF then I should use the location. If there's no GPS in the EXIf then I should name the last folder `Beats me`.
#### How folder customization works
You can construct your folder structure using a combination of the location, dates and camera make/model. Under the `Directory` section of your `config.ini` file you can define placeholder names and assign each a value. For example, `date=%Y-%m` would create a date placeholder with a value of YYYY-MM which would be filled in with the date from the EXIF on the photo.
The placeholders can be used to define the folder structure you'd like to create. The default structure would look like `2015-07-Jul/Mountain View`.
I have some date placeholders you can customize. You can use any of [the standard Python time directives](https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior) to customize the date format to your liking.
* `%day` the day the photo was taken.
* `%month` the month the photo was taken.
* `%year` the year the photo was taken.
I have camera make and model placeholders which can be used to include the camera make and model into the folder path.
* `%camera_make` the make of the camera which took the photo.
* `%camera_model` the model of the camera which took the photo.
I also have a few built-in location placeholders you can use. Use this to construct the `%location` you use in `full_path`.
* `%city` the name of the city the photo was taken. Requires geolocation data in EXIF.
* `%state` the name of the state the photo was taken. Requires geolocation data in EXIF.
* `%country` the name of the country the photo was taken. Requires geolocation data in EXIF.
In addition to my built-in and date placeholders you can combine them into a single folder name using my complex placeholders.
* `%location` can be used to combine multiple values of `%city`, `%state` and `%country`. For example, `location=%city, %state` would result in folder names like `Sunnyvale, California`.
* `%date` can be used to combine multiple values from [the standard Python time directives](https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior). For example, `date=%Y-%m` would result in folder names like `2015-12`.
* `%custom` can be used to combine multiple values from anything else. Think of it as a catch-all when `%location` and `%date` don't meet your needs.
#### How file customization works
You can configure how Elodie names your files using placeholders. This works similarly to how folder customization works. The default naming format is what's referred to elsewhere in this document and has many thought through benefits. Using the default will gives you files named like `2015-09-27_01-41-38-_dsc8705.jpg`.
* Minimizes the likelihood of naming conflicts.
* Encodes important EXIF information into the file name.
* Optimizes for sort order when listing in most file and photo viewers.
If you'd like to specify your own naming convention it's recommended you include something that's mostly unique like the time including seconds. You'll need to include a `[File]` section in your `config.ini` file with a name attribute. If a placeholder doesn't have a value then it plus any preceding characters which are not alphabetic are removed.
By default the resulting filename is all lowercased. To change this behavior to uppercasing add capitalization=upper.
```
[File]
date=%Y-%m-%b-%H-%M-%S
name=%date-%original_name-%title.%extension
# -> 2012-05-mar-12-59-30-dsc_1234-my-title.jpg
date=%Y-%m-%b-%H-%M-%S
name=%date-%original_name-%album.%extension
capitalization=upper
# -> 2012-05-MAR-12-59-30-DSC_1234-MY-ALBUM.JPG
```
### Reorganize by changing location and dates
If you notice some photos were incorrectly organized you should definitely let me know. In the example above I put two photos into an *Unknown Location* folder because I didn't find GPS information in their EXIF. To fix this I'll help you add GPS information into the photos' EXIF and then I'll reorganize them.
#### Tell me where your photos were taken
Run the command below if you want to tell me the photos were taken in Las Vegas. You don't have to type all that in though. It's easier to just type `./elodie.py update --location="Las Vegas, NV" ` and select and drag the files from *OS X Finder* into the terminal.
```
./elodie.py update --location="Las Vegas, NV" /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.dng /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.nef
```
You should see this after running that command.
```
└── 2015-09-Sep
│ ├── Las Vegas
│ ├── 2015-09-27_01-41-38-_dsc8705.dng
│ └── 2015-09-27_01-41-38-_dsc8705.nef
```
#### Tell me when you took your photos
Run the command below if I got the date wrong when organizing your photos. Similarly to the above command you can drag files from *Finder* into your terminal.
```
./elodie.py update --time="2015-04-15" /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.dng /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.nef
```
That will change the date folder like so.
```
└── 2015-04-Apr
│ ├── Las Vegas
│ ├── 2015-09-27_01-41-38-_dsc8705.dng
│ └── 2015-09-27_01-41-38-_dsc8705.nef
```
You can, of course, ask me to change the location and time. I'll happily update the photos and move them around accordingly.
```
./elodie.py update --location="Las Vegas, NV" --time="2015-04-15" /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.dng /where/i/want/my/photos/to/go/2015-09-Sep/Unknown\ Location/2015-09-27_01-41-38-_dsc8705.nef
```
## What about photos I take in the future?
Organizing your existing photos is great. But I'd be lying if I said I was the only one who could help you with that. Unlike other programs I put the same effort into keeping your library organized into the future as I have in getting it organized in the first place.
### Letting me know when you've got more photos to organize
In order to sort new photos that I haven't already organized I need someone to tell me about them. There's no single way to do this. You could use inotify, cron, Automator or my favorite app - Hazel; it doesn't matter.
If you'd like to let me know of a specific photo or group of photos to add to your library you would run one of the following commands. Use fully qualified paths for everything since you won't be running this manually.
```
# I can import a single file into your library.
./elodie.py import --destination="/where/i/want/my/photo/to/go" /full/path/to/file.jpg
# I can also import all the photos from a directory into your library.
./elodie.py import --destination="/where/i/want/my/photo/to/go" /where/my/photos/are
```
## Why not use a database?
Look, it's not that I think databases are evil. One of my friends is a database. It's just that I've been doing this for a long time and I've always used a database for it. In the end they're more trouble than they're worth. I should have listened to my mother when she told me to not date a database.
It's a lot more work to organize photos without a database. No wonder everyone else uses them. But your happiness is my happiness. If a little elbow grease on my part makes you happy then I'm glad to do it.
### A bit on how I do all this without a database
Every photo is essentially a database. So it's more accurate to say I use the thousands of tiny databases you already have and use them to organize your photos.
I'm simple. I put a photo into its proper location. I can update a photo to have the right date or location. The latter triggers the first; creating a nice tidy loop of organizational goodness.
I don't do anything else so don't bother asking.
## EXIF and XMP tags
When I organize photos I look at the embedded metadata. Here are the details of how I determine what information to use in order of precedence.
| Dimension | Fields | Notes |
|---|---|---|
| Date Taken (photo) | EXIF:DateTimeOriginal, EXIF:CreateDate, EXIF:ModifyDate, file created, file modified | |
| Date Taken (video, audio) | QuickTime:CreationDate, QuickTime:CreateDate, QuickTime:CreationDate-und-US, QuickTime:MediaCreateDate, H264:DateTimeOriginal, file created, file modified | |
| Location (photo) | EXIF:GPSLatitude/EXIF:GPSLatitudeRef, EXIF:GPSLongitude/EXIF:GPSLongitudeRef | |
| Location (video, audio) | XMP:GPSLatitude, Composite:GPSLatitude, XMP:GPSLongitude, Composite:GPSLongitude | Composite tags are read-only |
| Title (photo) | XMP:Title | |
| Title (video, audio) | XMP:DisplayName | |
| Album | XMP-xmpDM:Album, XMP:Album | XMP:Album is user defined in `configs/ExifTool_config` for backwards compatability |
| Camera Make (photo, video) | EXIF:Make, QuickTime:Make | |
| Camera Model (photo, video) | EXIF:Model, QuickTime:Model | |
## Using OpenStreetMap data from MapQuest
I use MapQuest to help me organize your photos by location. You'll need to sign up for a [free developer account](https://developer.mapquest.com/plan_purchase/steps/business_edition/business_edition_free) and get an API key. They give you 15,000 calls per month so I can't do any more than that unless you shell out some big bucks to them. Once I hit my limit the best I'll be able to do is *Unknown Location* until the following month.
Once you sign up you'll have to get an API key and copy it into a file named `~/.elodie/config.ini`. I've included a `config.ini-sample` file which you can copy to `config.ini`.
```
mkdir ~/.elodie
cp config.ini-sample ~/.elodie/config.ini
# now you're ready to add your MapQuest key
```
If you're an english speaker then you will probably want to add `prefer_english_names=True` to the `[MapQuest]` section else you'll have cities named using the local language.
## Questions, comments or concerns?
The best ways to provide feedback is by opening a [GitHub issue](https://github.com/jmathai/elodie/issues) or emailing me at [jaisen@jmathai.com](mailto:jaisen@jmathai.com).

54
app/Readme.md Normal file
View File

@ -0,0 +1,54 @@
# Hello, I'm Elodie's GUI
~~ *Your Personal EXIF-based Photo, Video and Audio Assistant* ~~
<p align="center"><img src ="../../../blob/master/creative/logo@300x.png" /></p>
You can download [my latest GUI from the releases page](https://github.com/jmathai/elodie/releases).
My GUI taskbar app sits nestled away in your taskbar until you need me.
Let's say you took a few hundred photos in New York City. I'll have put the photos into a folder named *New York City*. You decide you'd rather organize those photos into a folder named *Summer in NYC*. What you'd do is select the photos using Finder and drag them onto my taskbar icon. I'll display a few options and one of them would be to *Create album*. Type in an album name and I'll add this to the EXIF of your photos and move them to a folder with the same name.
*NOTE: I've extensively used the GUI but it's a work in progress.*
## See me in action
Updating EXIF of photos using the GUI taskbar app.
[![IMAGE ALT TEXT](http://img.youtube.com/vi/fF_jGCaMog0/0.jpg)](http://www.youtube.com/watch?v=fF_jGCaMog0 "Updating Photos Using GUI Taskbar App")
## Building the app
You'll need to bundle up the python dependencies and create an electron app using Node.js.
### Bundling the python libraries
First you'll need to [install the python dependencies](../../../#install-everything-you-need).
Once you've done that you'll need to install `pyinstaller`.
```
pip install pyinstaller
```
Next you can `cd` to the root of the repository and run `pyinstaller`.
```
pyinstaller elodie.spec
```
This should create a `dist` folder that bundles all of the dependencies. Now you're ready to build the GUI app.
### Building the GUI app
The GUI app is written using [Node.js](https://github.com/nodejs) and [Electron](https://github.com/atom/electron) and you'll need [electron-packager](https://github.com/maxogden/electron-packager) to create an executable file for your operating system.
I'm going to assume you've got *Node.js* installed. I've successfully built the app using version `5.1.0` on OS X.
```
# use --platform=win32 for Windows or --platform=linux for linux
electron-packager . Elodie --platform=darwin --arch=x64 --version=0.34.2 --overwrite
```
This will create a folder named `Elodie-darwin-x64` which contains the executable. Running the executable should add my face to your taskbar which you can click on or drag photos over.

6
app/html/blank.html Normal file
View File

@ -0,0 +1,6 @@
<html>
<head></head>
<body>
<div style="width:100%; height:100%; background-color:#fff;"></div>
</body>
</html>

34
app/html/config.html Normal file
View File

@ -0,0 +1,34 @@
<html>
<head>
<script src="js/handlers.js"></script>
<link href='https://fonts.googleapis.com/css?family=Lato:400,100,300,100italic,300italic' rel='stylesheet' type='text/css'>
<link rel="stylesheet" href="css/bootstrap.css"></script>
<link rel="stylesheet" href="css/boilerplate.css"></script>
<link rel="stylesheet" href="css/styles.css"></script>
<link rel="stylesheet" href="css/fontello/css/animation.css"></script>
<link rel="stylesheet" href="css/fontello/css/elodie.css"></script>
</head>
<body>
<div class="titlebar">
<!--<a href="" class="left quit quitProgram"><i class="icon-cancel-circle"></i></a>
<a href="" class="left minus minimizeProgram"><i class="icon-minus-circle"></i></a>-->
How can I help you? <em>-- Elodie</em><i></i>
</div>
<form class="updateConfig" action="" method="post">
<div class="content">
<p>
Doesn't look like you have a MapQuest API key. Get one for free <a href="#" class="launchUrl" data-url="https://developer.mapquest.com/plan_purchase/steps/business_edition/business_edition_free">here</a>.
</p>
<div class="location">
<label for="mapquest-api-key-field"><i class="icon-map"></i>MapQuest API Key</label>
<input id="mapquest-api-key-field" type="text" placeholder="i.e. pzjNKTtTjLydLtxUBwdgKAIC8OQbGLUy">
<button type="submit" class="push">Get Started<i></i></button>
</div>
</div>
</form>
<script>
document.getElementById('location-field').focus();
</script>
</body>
</html>

View File

@ -0,0 +1,293 @@
/*
* HTML5 Boilerplate
*
* What follows is the result of much research on cross-browser styling.
* Credit left inline and big thanks to Nicolas Gallagher, Jonathan Neal,
* Kroc Camen, and the H5BP dev community and team.
*
* Detailed information about this CSS: h5bp.com/css
*
* ==|== normalize ==========================================================
*/
/* =============================================================================
HTML5 display definitions
========================================================================== */
article, aside, details, figcaption, figure, footer, header, hgroup, nav, section { display: block; }
audio, canvas, video { display: inline-block; *display: inline; *zoom: 1; }
audio:not([controls]) { display: none; }
[hidden] { display: none; }
/* =============================================================================
Base
========================================================================== */
/*
* 1. Correct text resizing oddly in IE6/7 when body font-size is set using em units
* 2. Force vertical scrollbar in non-IE
* 3. Prevent iOS text size adjust on device orientation change, without disabling user zoom: h5bp.com/g
*/
html { font-size: 100%; overflow-y: scroll; -webkit-text-size-adjust: 100%; -ms-text-size-adjust: 100%; }
body { margin: 0; font-size: 13px; line-height: 1.231; }
body, button, input, select, textarea { font-family: sans-serif; color: #222; }
/*
* Remove text-shadow in selection highlight: h5bp.com/i
* These selection declarations have to be separate
* Also: hot pink! (or customize the background color to match your design)
*/
::-moz-selection { background: #fe57a1; color: #fff; text-shadow: none; }
::selection { background: #fe57a1; color: #fff; text-shadow: none; }
/* =============================================================================
Links
========================================================================== */
a { color: #00e; }
a:visited { color: #551a8b; }
a:hover { color: #06e; }
a:focus { outline: thin dotted; }
/* Improve readability when focused and hovered in all browsers: h5bp.com/h */
a:hover, a:active { outline: 0; }
/* =============================================================================
Typography
========================================================================== */
abbr[title] { border-bottom: 1px dotted; }
b, strong { font-weight: bold; }
blockquote { margin: 1em 40px; }
dfn { font-style: italic; }
hr { display: block; height: 1px; border: 0; border-top: 1px solid #ccc; margin: 1em 0; padding: 0; }
ins { background: #ff9; color: #000; text-decoration: none; }
mark { background: #ff0; color: #000; font-style: italic; font-weight: bold; }
/* Redeclare monospace font family: h5bp.com/j */
pre, code, kbd, samp { font-family: monospace, serif; _font-family: 'courier new', monospace; font-size: 1em; }
/* Improve readability of pre-formatted text in all browsers */
pre { white-space: pre; white-space: pre-wrap; word-wrap: break-word; }
q { quotes: none; }
q:before, q:after { content: ""; content: none; }
small { font-size: 85%; }
/* Position subscript and superscript content without affecting line-height: h5bp.com/k */
sub, sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; }
sup { top: -0.5em; }
sub { bottom: -0.25em; }
/* =============================================================================
Lists
========================================================================== */
ul, ol { margin: 1em 0; padding: 0 0 0 40px; }
dd { margin: 0 0 0 40px; }
nav ul, nav ol { list-style: none; list-style-image: none; margin: 0; padding: 0; }
/* =============================================================================
Embedded content
========================================================================== */
/*
* 1. Improve image quality when scaled in IE7: h5bp.com/d
* 2. Remove the gap between images and borders on image containers: h5bp.com/e
*/
img { border: 0; -ms-interpolation-mode: bicubic; vertical-align: middle; }
/*
* Correct overflow not hidden in IE9
*/
svg:not(:root) { overflow: hidden; }
/* =============================================================================
Figures
========================================================================== */
figure { margin: 0; }
/* =============================================================================
Forms
========================================================================== */
form { margin: 0; }
fieldset { border: 0; margin: 0; padding: 0; }
/* Indicate that 'label' will shift focus to the associated form element */
label { cursor: pointer; }
/*
* 1. Correct color not inheriting in IE6/7/8/9
* 2. Correct alignment displayed oddly in IE6/7
*/
legend { border: 0; *margin-left: -7px; padding: 0; }
/*
* 1. Correct font-size not inheriting in all browsers
* 2. Remove margins in FF3/4 S5 Chrome
* 3. Define consistent vertical alignment display in all browsers
*/
button, input, select, textarea { font-size: 100%; margin: 0; vertical-align: baseline; *vertical-align: middle; }
/*
* 1. Define line-height as normal to match FF3/4 (set using !important in the UA stylesheet)
* 2. Correct inner spacing displayed oddly in IE6/7
*/
button, input { line-height: normal; *overflow: visible; }
/*
* Reintroduce inner spacing in 'table' to avoid overlap and whitespace issues in IE6/7
*/
table button, table input { *overflow: auto; }
/*
* 1. Display hand cursor for clickable form elements
* 2. Allow styling of clickable form elements in iOS
*/
button, input[type="button"], input[type="reset"], input[type="submit"] { cursor: pointer; -webkit-appearance: button; }
/*
* Consistent box sizing and appearance
*/
input[type="checkbox"], input[type="radio"] { box-sizing: border-box; padding: 0; }
input[type="search"] { -webkit-appearance: textfield; -moz-box-sizing: content-box; -webkit-box-sizing: content-box; box-sizing: content-box; }
input[type="search"]::-webkit-search-decoration { -webkit-appearance: none; }
/*
* Remove inner padding and border in FF3/4: h5bp.com/l
*/
button::-moz-focus-inner, input::-moz-focus-inner { border: 0; padding: 0; }
/*
* 1. Remove default vertical scrollbar in IE6/7/8/9
* 2. Allow only vertical resizing
*/
textarea { overflow: auto; vertical-align: top; resize: vertical; }
/* Colors for form validity */
input:valid, textarea:valid { }
input:invalid, textarea:invalid { background-color: #f0dddd; }
/* =============================================================================
Tables
========================================================================== */
table { border-collapse: collapse; border-spacing: 0; }
td { vertical-align: top; }
/* ==|== primary styles =====================================================
Author:
========================================================================== */
/* ==|== media queries ======================================================
PLACEHOLDER Media Queries for Responsive Design.
These override the primary ('mobile first') styles
Modify as content requires.
========================================================================== */
@media only screen and (min-width: 480px) {
/* Style adjustments for viewports 480px and over go here */
}
@media only screen and (min-width: 768px) {
/* Style adjustments for viewports 768px and over go here */
}
/* ==|== non-semantic helper classes ========================================
Please define your styles before this section.
========================================================================== */
/* For image replacement */
.ir { display: block; border: 0; text-indent: -999em; overflow: hidden; background-color: transparent; background-repeat: no-repeat; text-align: left; direction: ltr; }
.ir br { display: none; }
/* Hide from both screenreaders and browsers: h5bp.com/u */
.hidden { display: none !important; visibility: hidden; }
/* Hide only visually, but have it available for screenreaders: h5bp.com/v */
.visuallyhidden { border: 0; clip: rect(0 0 0 0); height: 1px; margin: -1px; overflow: hidden; padding: 0; position: absolute; width: 1px; }
/* Extends the .visuallyhidden class to allow the element to be focusable when navigated to via the keyboard: h5bp.com/p */
.visuallyhidden.focusable:active, .visuallyhidden.focusable:focus { clip: auto; height: auto; margin: 0; overflow: visible; position: static; width: auto; }
/* Hide visually and from screenreaders, but maintain layout */
.invisible { visibility: hidden; }
/* Contain floats: h5bp.com/q */
.clearfix:before, .clearfix:after { content: ""; display: table; }
.clearfix:after { clear: both; }
.clearfix { *zoom: 1; }
/* ==|== print styles =======================================================
Print styles.
Inlined to avoid required HTTP connection: h5bp.com/r
========================================================================== */
@media print {
* { background: transparent !important; color: black !important; text-shadow: none !important; filter:none !important; -ms-filter: none !important; } /* Black prints faster: h5bp.com/s */
a, a:visited { text-decoration: underline; }
a[href]:after { content: " (" attr(href) ")"; }
abbr[title]:after { content: " (" attr(title) ")"; }
.ir a:after, a[href^="javascript:"]:after, a[href^="#"]:after { content: ""; } /* Don't show links for images, or javascript/internal links */
pre, blockquote { border: 1px solid #999; page-break-inside: avoid; }
thead { display: table-header-group; } /* h5bp.com/t */
tr, img { page-break-inside: avoid; }
img { max-width: 100% !important; }
@page { margin: 0.5cm; }
p, h2, h3 { orphans: 3; widows: 3; }
h2, h3 { page-break-after: avoid; }
}

6800
app/html/css/bootstrap.css vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,30 @@
Font license info
## Fontelico
Copyright (C) 2012 by Fontello project
Author: Crowdsourced, for Fontello project
License: SIL (http://scripts.sil.org/OFL)
Homepage: http://fontello.com
## Modern Pictograms
Copyright (c) 2012 by John Caserta. All rights reserved.
Author: John Caserta
License: SIL (http://scripts.sil.org/OFL)
Homepage: http://thedesignoffice.org/project/modern-pictograms/
## Typicons
(c) Stephen Hutchings 2012
Author: Stephen Hutchings
License: SIL (http://scripts.sil.org/OFL)
Homepage: http://typicons.com/

View File

@ -0,0 +1,88 @@
{
"name": "elodie",
"css_prefix_text": "icon-",
"css_use_suffix": false,
"hinting": true,
"units_per_em": 1000,
"ascent": 850,
"glyphs": [
{
"uid": "c64623255a4a7c72436b199b05296c4f",
"css": "happy",
"code": 59392,
"src": "fontelico"
},
{
"uid": "53ed8570225581269cd7eff5795e8bea",
"css": "unhappy",
"code": 59396,
"src": "fontelico"
},
{
"uid": "f0c301ac841dafc38d8eb1b933fc73e5",
"css": "spin",
"code": 59393,
"src": "fontelico"
},
{
"uid": "0f99ab40ab0b4d64a74f2d0deeb03e42",
"css": "video",
"code": 59397,
"src": "fontawesome"
},
{
"uid": "b091a8bd0fdade174951f17d936f51e4",
"css": "folder-closed",
"code": 59402,
"src": "fontawesome"
},
{
"uid": "6533bdc16ab201eb3f3b27ce989cab33",
"css": "folder-open",
"code": 59401,
"src": "fontawesome"
},
{
"uid": "c5845105a87df2ee1999826d90622f6a",
"css": "title",
"code": 59399,
"src": "fontawesome"
},
{
"uid": "87d337fee4866c2c28f6082994ce0f41",
"css": "map",
"code": 59395,
"src": "typicons"
},
{
"uid": "bd517dbd6ccbc464f6d80efca97abb7d",
"css": "media-add",
"code": 59400,
"src": "typicons"
},
{
"uid": "dplw5xo88mzzr7b45nvjcamyyhni6drs",
"css": "book",
"code": 59394,
"src": "modernpics"
},
{
"uid": "64abb7c56aefca89046bb69f7251d2e2",
"css": "calendar",
"code": 59398,
"src": "elusive"
},
{
"uid": "15739f3032c2aa3df67efc96b3ffef56",
"css": "cancel-circle",
"code": 59403,
"src": "websymbols"
},
{
"uid": "994eaa764b3f30721f3839c64c390ce3",
"css": "minus-circle",
"code": 59404,
"src": "websymbols"
}
]
}

85
app/html/css/fontello/css/animation.css vendored Normal file
View File

@ -0,0 +1,85 @@
/*
Animation example, for spinners
*/
.animate-spin {
-moz-animation: spin 2s infinite linear;
-o-animation: spin 2s infinite linear;
-webkit-animation: spin 2s infinite linear;
animation: spin 2s infinite linear;
display: inline-block;
}
@-moz-keyframes spin {
0% {
-moz-transform: rotate(0deg);
-o-transform: rotate(0deg);
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-moz-transform: rotate(359deg);
-o-transform: rotate(359deg);
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
@-webkit-keyframes spin {
0% {
-moz-transform: rotate(0deg);
-o-transform: rotate(0deg);
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-moz-transform: rotate(359deg);
-o-transform: rotate(359deg);
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
@-o-keyframes spin {
0% {
-moz-transform: rotate(0deg);
-o-transform: rotate(0deg);
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-moz-transform: rotate(359deg);
-o-transform: rotate(359deg);
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
@-ms-keyframes spin {
0% {
-moz-transform: rotate(0deg);
-o-transform: rotate(0deg);
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-moz-transform: rotate(359deg);
-o-transform: rotate(359deg);
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
@keyframes spin {
0% {
-moz-transform: rotate(0deg);
-o-transform: rotate(0deg);
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-moz-transform: rotate(359deg);
-o-transform: rotate(359deg);
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}

View File

@ -0,0 +1,40 @@
{
"name": "elodie",
"css_prefix_text": "icon-",
"css_use_suffix": false,
"hinting": true,
"units_per_em": 1000,
"ascent": 850,
"glyphs": [
{
"uid": "c64623255a4a7c72436b199b05296c4f",
"css": "happy",
"code": 59392,
"src": "fontelico"
},
{
"uid": "53ed8570225581269cd7eff5795e8bea",
"css": "emo-unhappy",
"code": 59396,
"src": "fontelico"
},
{
"uid": "f0c301ac841dafc38d8eb1b933fc73e5",
"css": "spin",
"code": 59393,
"src": "fontelico"
},
{
"uid": "87d337fee4866c2c28f6082994ce0f41",
"css": "map",
"code": 59395,
"src": "typicons"
},
{
"uid": "dplw5xo88mzzr7b45nvjcamyyhni6drs",
"css": "book",
"code": 59394,
"src": "modernpics"
}
]
}

69
app/html/css/fontello/css/elodie.css vendored Normal file
View File

@ -0,0 +1,69 @@
@font-face {
font-family: 'elodie';
src: url('../font/elodie.eot?99803888');
src: url('../font/elodie.eot?99803888#iefix') format('embedded-opentype'),
url('../font/elodie.woff?99803888') format('woff'),
url('../font/elodie.ttf?99803888') format('truetype'),
url('../font/elodie.svg?99803888#elodie') format('svg');
font-weight: normal;
font-style: normal;
}
/* Chrome hack: SVG is rendered more smooth in Windozze. 100% magic, uncomment if you need it. */
/* Note, that will break hinting! In other OS-es font will be not as sharp as it could be */
/*
@media screen and (-webkit-min-device-pixel-ratio:0) {
@font-face {
font-family: 'elodie';
src: url('../font/elodie.svg?99803888#elodie') format('svg');
}
}
*/
[class^="icon-"]:before, [class*=" icon-"]:before {
font-family: "elodie";
font-style: normal;
font-weight: normal;
speak: none;
display: inline-block;
text-decoration: inherit;
width: 1em;
margin-right: .2em;
text-align: center;
/* opacity: .8; */
/* For safety - reset parent styles, that can break glyph codes*/
font-variant: normal;
text-transform: none;
/* fix buttons height, for twitter bootstrap */
line-height: 1em;
/* Animation center compensation - margins should be symmetric */
/* remove if not needed */
margin-left: .2em;
/* you can be more comfortable with increased icons size */
/* font-size: 120%; */
/* Font smoothing. That was taken from TWBS */
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
/* Uncomment for 3D effect */
/* text-shadow: 1px 1px 1px rgba(127, 127, 127, 0.3); */
}
.icon-happy:before { content: '\e800'; } /* '' */
.icon-spin:before { content: '\e801'; } /* '' */
.icon-book:before { content: '\e802'; } /* '' */
.icon-map:before { content: '\e803'; } /* '' */
.icon-unhappy:before { content: '\e804'; } /* '' */
.icon-video:before { content: '\e805'; } /* '' */
.icon-calendar:before { content: '\e806'; } /* '' */
.icon-title:before { content: '\e807'; } /* '' */
.icon-media-add:before { content: '\e808'; } /* '' */
.icon-folder-open:before { content: '\e809'; } /* '' */
.icon-folder-closed:before { content: '\e80a'; } /* '' */
.icon-cancel-circle:before { content: '\e80b'; } /* '' */
.icon-minus-circle:before { content: '\e80c'; } /* '' */

Binary file not shown.

View File

@ -0,0 +1,24 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg">
<metadata>Copyright (C) 2015 by original authors @ fontello.com</metadata>
<defs>
<font id="elodie" horiz-adv-x="1000" >
<font-face font-family="elodie" font-weight="400" font-stretch="normal" units-per-em="1000" ascent="850" descent="-150" />
<missing-glyph horiz-adv-x="1000" />
<glyph glyph-name="happy" unicode="&#xe800;" d="m261 800c-60 0-109-65-109-144 0-80 49-145 109-145s110 65 110 145c0 79-49 144-110 144z m477 0c-61 0-110-65-110-144 0-80 49-145 110-145 60 0 110 65 110 145 0 79-50 144-110 144z m208-599c-13 0-27-5-37-16-4-4-8-8-12-12-111-109-253-164-396-165-142-2-285 50-396 155l-3 3-12 12c-21 21-54 20-75-1-20-21-20-55 1-76 3-4 8-8 14-14l3-3c132-124 301-186 469-184 169 1 337 67 468 195 5 5 9 10 14 14 20 22 20 56-1 77-10 10-23 15-37 15z" horiz-adv-x="999" />
<glyph glyph-name="spin" unicode="&#xe801;" d="m46 144l0 0c0 0-1 0-1 0-8 18-15 37-21 55-6 19-11 38-15 58-19 99-8 203 35 298 3 6 10 8 15 5 1 0 2 0 2-1l0 0 80-59c5-3 6-9 4-14-5-12-9-25-12-38-4-12-7-26-9-39-11-67-3-137 23-201 2-5 0-10-4-13l0 0-80-56c-5-4-12-3-16 3-1 0-1 1-1 2l0 0z m120 574l0 0c0 1 0 1 0 1 15 13 30 25 46 37 16 11 33 22 51 31 89 50 192 72 297 60 6-1 10-6 10-13 0-1-1-1-1-2l0 0-31-94c-2-5-8-8-13-7-13 0-27 0-40 0-14-1-27-2-40-4-68-11-133-40-186-84-4-3-10-3-14 0l0 0-79 58c-5 3-6 11-2 16 0 0 1 1 2 1l0 0z m588 65l0 0c0 0 1 0 1 0 17-10 34-21 50-32 16-12 31-25 46-38 74-69 127-160 148-262 2-6-2-12-9-13-1 0-1 0-2 0l0 0-100 1c-5 0-10 4-11 9-3 13-8 26-12 38-5 12-10 25-17 36-31 61-78 113-137 150-5 3-6 8-5 13l0 0 31 92c2 6 9 9 15 7 1 0 2-1 2-1l0 0z m244-535l0 0c0 0 0 0 0 0-4-20-9-39-15-57-7-19-14-37-22-55-44-92-114-170-205-221-6-3-13-1-16 4 0 1-1 2-1 2l0 0-30 94c-2 6 1 12 6 14 11 7 22 15 32 23 11 9 21 18 30 27 49 48 84 109 101 176 2 5 6 8 11 8l0 0 98-1c6 0 11-5 11-11 0-1 0-2 0-3l0 0z m-438-395l0 0c0 0 0 0 0 0-20-2-40-3-60-3-20 0-40 1-59 4-102 12-198 54-276 125-5 4-5 11 0 16 0 0 1 1 1 1l0 0 81 58c5 3 12 2 16-2 10-8 20-16 32-23 11-7 22-14 34-20 62-31 131-45 200-41 6 0 10-3 12-8l0 0 29-92c2-6-1-12-7-14-1-1-2-1-3-1l0 0z" horiz-adv-x="1000" />
<glyph glyph-name="book" unicode="&#xe802;" d="m600 630l35 0 0-672-502 0c-74 0-133 52-133 128l0 581c0 41 34 75 75 75l465 0 0-576-407 0c-52 0-88-28-88-78l0-2c0-50 36-83 88-83l467 0 0 627z m-60-562l-402 0c-12 0-22 7-22 19 0 10 10 16 22 16l402 0 0-35z" horiz-adv-x="635" />
<glyph glyph-name="map" unicode="&#xe803;" d="m53-93q-23 0-38 16t-15 36l0 521q0 21 15 36l235 235q14 14 34 15t35-11l224-179 202 201q25 25 57 10t32-47l0-520q0-21-16-38l-234-233q-14-14-35-15t-35 11l-224 179-201-202q-15-15-36-15z m51 178q152 150 156 152l0 378-156-156 0-374z m215 149l202-162 0 389-208 165 0-389q1-1 3-2t3-1z m410 7l0 374q-153-151-156-154l0-376z" horiz-adv-x="834" />
<glyph glyph-name="unhappy" unicode="&#xe804;" d="m261 800c-60 0-109-65-109-144 0-80 49-145 109-145s110 65 110 145c0 79-49 144-110 144z m477 0c-61 0-110-65-110-144 0-80 49-145 110-145 60 0 110 65 110 145 0 79-50 144-110 144z m-244-599c-165 0-331-62-461-184l-3-3c-6-5-11-10-14-14-21-21-21-55-1-76 21-21 54-21 75-1l12 12 3 3c111 105 254 157 396 155 143-1 285-56 396-165 4-4 8-8 12-12 20-21 54-21 74-1 21 21 21 55 1 77-5 5-9 10-14 14-131 129-299 194-468 195-3 0-6 0-8 0z" horiz-adv-x="999" />
<glyph glyph-name="video" unicode="&#xe805;" d="m1000 654v-608q0-23-22-32-7-3-14-3-15 0-25 10l-225 225v-92q0-67-47-114t-113-47h-393q-67 0-114 47t-47 114v392q0 67 47 114t114 47h393q66 0 113-47t47-114v-92l225 225q10 10 25 10 7 0 14-3 22-9 22-32z" horiz-adv-x="1000" />
<glyph glyph-name="calendar" unicode="&#xe806;" d="m0-150l0 649 893 0 0-649-893 0z m0 705l0 221 109 0 0-141 200 0 0 141 275 0 0-141 199 0 0 141 110 0 0-221-893 0z m168 139l0 156 82 0 0-156-82 0z m59-619q0-112 123-112 47 0 84 32 39 31 39 80 0 68-78 90 48 15 64 48 12 28-2 73-27 62-107 62-51 0-86-26t-37-77l72 0q0 45 49 46 43 0 45-52 0-49-84-47l0-57q48 0 68-8 23-11 23-46 0-57-54-61-43 0-47 55l-72 0z m281 146q49 14 88 47l0-297 70 0 0 371-64 0q-38-37-94-58l0-63z m135 473l0 156 82 0 0-156-82 0z" horiz-adv-x="893" />
<glyph glyph-name="title" unicode="&#xe807;" d="m713 745v-41q0-16-10-34t-24-18q-28 0-30-1-15-3-18-17-2-6-2-36v-643q0-14-10-24t-24-10h-60q-14 0-24 10t-10 24v680h-80v-680q0-14-9-24t-25-10h-60q-14 0-24 10t-10 24v277q-82 7-137 33-70 33-107 100-36 65-36 145 0 92 50 159 49 66 116 89 62 21 233 21h267q14 0 24-10t10-24z" horiz-adv-x="714.3" />
<glyph glyph-name="media-add" unicode="&#xe808;" d="m573 350q21 0 36-15t16-37-16-36-36-15l-104 0 0-105q0-22-16-37t-37-15-36 15-15 37l0 105-105 0q-21 0-36 15t-15 36 15 37 36 15l105 0 0 104q0 21 15 37t36 16 37-16 16-37l0-104 104 0z m245 245q16-16 16-36l0-521q0-65-46-111t-110-46l-522 0q-65 0-110 46t-46 111l0 625q0 65 46 110t110 46l417 0q22 0 37-15z m-110-36l-135 134 0-56q0-32 23-55t55-23l57 0z m-30-574q21 0 36 16t15 37l0 469-78 0q-53 0-92 38t-38 92l0 78-365 0q-21 0-37-15t-15-37l0-625q0-21 15-37t37-16l522 0z" horiz-adv-x="834" />
<glyph glyph-name="folder-open" unicode="&#xe809;" d="m994 330q0 20-30 20h-607q-22 0-48-12t-39-29l-164-203q-11-13-11-22 0-20 30-20h607q22 0 48 13t40 29l164 203q10 12 10 21z m-637 91h429v90q0 22-16 38t-38 15h-321q-23 0-38 16t-16 38v36q0 22-15 38t-38 15h-179q-22 0-38-15t-16-38v-476l143 175q25 30 65 49t78 19z m708-91q0-34-25-66l-165-203q-24-30-65-49t-78-19h-607q-51 0-88 37t-37 88v536q0 51 37 88t88 37h179q51 0 88-37t37-88v-18h303q51 0 88-37t37-88v-90h107q30 0 56-13t37-40q8-17 8-38z" horiz-adv-x="1071.4" />
<glyph glyph-name="folder-closed" unicode="&#xe80a;" d="m857 118v393q0 22-15 38t-38 15h-393q-23 0-38 16t-16 38v36q0 22-15 38t-38 15h-179q-22 0-38-15t-16-38v-536q0-22 16-38t38-16h679q22 0 38 16t15 38z m72 393v-393q0-51-37-88t-88-37h-679q-51 0-88 37t-37 88v536q0 51 37 88t88 37h179q51 0 88-37t37-88v-18h375q51 0 88-37t37-88z" horiz-adv-x="928.6" />
<glyph glyph-name="cancel-circle" unicode="&#xe80b;" d="m1000 349q0-136-67-251t-182-182-251-67-251 67-182 182-67 251 67 251 182 182 251 67 251-67 182-182 67-251z m-339-232l71 71-161 161 161 161-71 71-161-161-161 161-71-71 161-161-161-161 71-71 161 161z" horiz-adv-x="1000" />
<glyph glyph-name="minus-circle" unicode="&#xe80c;" d="m1000 349q0-136-67-251t-182-182-251-67-251 67-182 182-67 251 67 251 182 182 251 67 251-67 182-182 67-251z m-794-58h589v118h-589v-118z" horiz-adv-x="1000" />
</font>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 6.2 KiB

Binary file not shown.

Binary file not shown.

188
app/html/css/styles.css Normal file
View File

@ -0,0 +1,188 @@
body {
-webkit-animation: fadein 1s;
}
/* Safari, Chrome and Opera > 12.1 */
@-webkit-keyframes fadein {
from { opacity: 0; }
to { opacity: 1; }
}
*, body, div {
font-family: 'Lato', 'Helvetica';
font-weight: 300;
font-size: 1.1em;
color: #444;
}
body {
padding: 0;
margin: 0;
}
::-webkit-input-placeholder {
color: #ddd;
}
.titlebar {
height: 45px;
padding-top:10px;
text-align: center;
background-color: #eee;
border-bottom: solid 1.5px #aaa;
}
.titlebar a {
font-weight: 300;
font-size:.7em;
padding-top:5px;
}
.titlebar a.right {
float:right;
}
.titlebar a.left {
float:left;
}
.titlebar a.quit i {
color: #ff4136 !important;
}
.titlebar a.minus i {
color: #ffba00 !important;
}
.x-titlebar em {
padding-right: 28px;
background: url("../img/logo@18x22.png") no-repeat right 4px;
}
.content, .content-index {
padding: 0 10px 10px 10px;
font-size: 1em;
}
.content > div {
border-bottom: solid 1px #eee;
padding: 10px 0;
}
.content > p {
padding: 10px 0;
}
.content .import-success, .content .import-success a {
font-size:.9em;
}
.status {
display: none;
}
.status ul {
list-style-type:none;
padding:0;
margin:20px 0 0;
font-size:.8em;
}
.status ul li {
font-size:.9em;
padding:0;
margin:0 0 10px;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.status ul li .destination {
width: 100%;
font-size: .75em;
padding-top: 0;
}
.preview {
position: relative;
padding: 20px 0;
}
.preview .center-cropped {
display: inline-block;
border-radius: 2px;
border: solid 1px #ddd;
margin: 1px;
width: 42px;
height: 42px;
background-position: center center;
background-size: cover;
}
.preview .center-cropped.video:before {
font-family: "elodie";
color: #eee;
content: '\e805';
position: absolute;
margin-top: 6px;
margin-left: 8px;
font-size: 1.5em;
}
i {
color: #555;
}
i.icon-happy {
color: #6cc644;
}
i.icon-unhappy {
color: #bd2c00;
}
label {
font-size: .9em;
font-weight: 300;
display: block;
padding-bottom:3px;
}
input, button {
font-family: 'Lato', 'Helvetica';
font-weight: 300;
font-size: .9em;
color: #666;
border: solid 1px #eee;
border-radius: 3px;
}
input:focus,
select:focus,
textarea:focus,
button:focus {
outline: none;
}
input {
padding: 4px;
width: 100%;
}
input[type="file"] {
height:0px;
width:0px;
overflow:hidden;
display:none;
}
button {
cursor: pointer;
background-color: #eee;
padding: 4px 10px;
margin-top: 10px;
}
small {
font-size:.7em;
}

BIN
app/html/img/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 550 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

37
app/html/index.html Normal file
View File

@ -0,0 +1,37 @@
<html>
<head>
<script src="js/handlers.js"></script>
<link href='https://fonts.googleapis.com/css?family=Lato:400,100,300,100italic,300italic' rel='stylesheet' type='text/css'>
<link rel="stylesheet" href="css/bootstrap.css"></script>
<link rel="stylesheet" href="css/boilerplate.css"></script>
<link rel="stylesheet" href="css/styles.css"></script>
<link rel="stylesheet" href="css/fontello/css/animation.css"></script>
<link rel="stylesheet" href="css/fontello/css/elodie.css"></script>
</head>
<body>
<div class="titlebar">
<!--<a href="" class="left quit quitProgram"><i class="icon-cancel-circle"></i></a>
<a href="" class="left minus minimizeProgram"><i class="icon-minus-circle"></i></a>-->
How can I help you? <em>-- Elodie</em><i></i>
</div>
<form class="importPhotos" action="" method="post">
<div id="content" class="content">
<p>
Let me know where your photos are and where you'd like me to put them as I sort them.
<small><em>(You can drop your photos here to update its information.)</em></small>
</p>
<div>
<label for="source"><i class="icon-folder-open"></i> What folder are your photos in now?</label>
<input type="text" name="source" id="source" placeholder="Path to your photos folder">
<small><em>Hint: Right clicking on your folder in Finder, pressing option and clicking "Copy as Pathname" will put the full path on your clipboard</em></small>
</div>
<div>
<label for="destination"><i class="icon-folder-closed"></i> Where would you like me to organize them to?</label>
<input type="text" name="destination" id="destination" placeholder="Path to your photos folder">
<button type="submit" class="push">Start Organizing<i></i></button>
</div>
<div class="import-success"></div>
</div>
</form>
</body>
</html>

250
app/html/js/handlers.js Normal file
View File

@ -0,0 +1,250 @@
var __constants__ = {
baseUrl : 'http://localhost:5000'
};
var __process__ = {};
if(typeof(require) === 'function') {
var ipc = require('ipc');
var path = require('path');
var os = require('os');
ipc.on('files', function(files) {
__process__.files = files;
});
ipc.on('preview', function(files) {
handlers.renderPreview(files);
});
ipc.on('update-import-success', function(args) {
//var response = JSON.parse(args['stdout']);
handlers.setSuccessTitle();
handlers.removeProgressIcons();
handlers.addSuccessImportMessage(args);
});
ipc.on('update-import-no-photos', function(args) {
//var response = JSON.parse(args['stdout']);
handlers.removeProgressIcons();
});
ipc.on('update-config-status', function(args) {
if(args) {
// @TODO: We should really handle this in the nodejs code.
handlers.removeProgressIcons();
location.href = 'index.html';
} else {
}
});
ipc.on('update-photos-success', function(args) {
if(os.platform() == 'win32'){
var response = JSON.parse(args['stdout'].replace(/\\/g, '\\\\'));
}else{
var response = JSON.parse(args['stdout']);
}
handlers.setSuccessTitle();
handlers.removeProgressIcons();
handlers.updateStatus(response);
});
function Broadcast() {
this.send = function(name, message) {
ipc.send(name, message);
};
}
window.onload = function () {
var broadcast = new Broadcast();
window.ondragover = function (e){ e.preventDefault(); return false };
window.ondragover = function (e){ e.preventDefault(); return false };
var holder = document.getElementById('content');
if(holder != null){
holder.ondrop = function (e) {
e.preventDefault();
files = []
for (var i = 0; i < e.dataTransfer.files.length; ++i) {
console.log(e.dataTransfer.files[i].path);
files.push(e.dataTransfer.files[i].path);
}
broadcast.send('load-update-photos', files);
return false;
};
}
};
};
function Handlers() {
var self = this;
var broadcast = new Broadcast();
this.click = {};
this.submit = {};
this.change = {};
// CHANGE
this.change.fileSelected = function(ev) {
var el = ev.target,
dir = el.value.substr(el.value.lastIndexOf("\\")+1),
tgt = document.querySelector(el.dataset.display);
tgt.innerHTML = dir;
};
// CLICK
this.click.selectFile = function(ev) {
var el = ev.target,
tgt = document.querySelector(el.dataset.for);
ev.preventDefault();
tgt.click();
};
this.click.launchFinder = function(ev) {
var el = ev.target,
tgt = el.dataset.path;
ev.preventDefault();
broadcast.send('launch-finder', tgt);
};
this.click.launchUrl = function(ev) {
var el = ev.target,
tgt = el.dataset.url;
ev.preventDefault();
broadcast.send('launch-url', tgt);
};
this.click.quitProgram = function(ev) {
//ev.preventDefault();
console.log('quit');
broadcast.send('program-quit');
};
// SUBMIT
this.submit.importPhotos = function(ev) {
var el = ev.target,
cls = el.className,
params;
ev.preventDefault();
params = {};
params['source'] = document.querySelector('input[name="source"]').value
params['destination'] = document.querySelector('input[name="destination"]').value
if(params['destination'].length === 0 || params['source'].length === 0) {
return;
}
document.querySelector('button.push i').className = 'icon-spin animate-spin';
broadcast.send('import-photos', params);
};
this.submit.updateConfig = function(ev) {
var el = ev.target,
cls = el.className,
params;
ev.preventDefault();
document.querySelector('button.push i').className = 'icon-spin animate-spin';
params = {};
params['mapQuestKey'] = document.querySelector('input[id="mapquest-api-key-field"]').value;
if(params['mapQuestKey'].length === 0) {
return;
}
broadcast.send('update-config', params);
};
this.submit.updatePhotos = function(ev) {
var el = ev.target,
cls = el.className,
params;
ev.preventDefault();
document.querySelector('button.push i').className = 'icon-spin animate-spin';
params = {};
params['location'] = document.querySelector('input[id="location-field"]').value;
params['datetime'] = document.querySelector('input[id="datetime-field"]').value;
params['album'] = document.querySelector('input[id="album-field"]').value;
params['title'] = document.querySelector('input[id="title-field"]').value;
if(params['location'].length === 0 && params['datetime'].length === 0 && params['album'].length === 0 && params['title'].length === 0) {
return;
}
params['files'] = __process__.files;
broadcast.send('update-photos', params);
};
this.addSuccessImportMessage = function(args) {
document.querySelector('.import-success').innerHTML = 'Your photos were successfully imported. <a href="#" class="launchFinder" data-path="'+args['destination'] +'">View them here</a>.';
};
this.dispatch = function(ev) {
var classes = ev.target.className.split(' ');
for(i=0; i<classes.length; i++) {
if(typeof(self[ev.type][classes[i]]) !== 'undefined') {
self[ev.type][classes[i]](ev);
}
}
};
this.removeProgressIcons = function() {
var els = document.querySelectorAll('i.icon-spin');
for(el in els) {
els[el].className = '';
}
};
this.renderPreview = function(files) {
html = '<label>You selected ' + (files.length > 1 ? 'these photos' : 'this photo') + '</label>';
for(var i=0; i<files.length && i<16; i++) {
if(files[i].match(/(mov|mp4|3gp|avi)/i) === null) {
html += '<div class="center-cropped" style="background-image:url(\'file://'+fileUrl(files[i])+'\');" title="'+files[i]+'"></div>';
} else {
html += '<div class="center-cropped video"></div>';
}
}
if(files.length >= 16) {
html += '<br>...and ' + (files.length -16) + ' more.';
}
document.querySelector('.preview').innerHTML = html;
};
this.setSuccessTitle = function() {
var el = document.querySelector('.titlebar i').className = 'icon-happy';
};
this.updateStatus = function(response) {
var el = document.querySelector('.status'),
source, destination, html;
console.log('update status');
console.log(response);;
if(response.length > 0) {
html = '<label>Status</label><ul>';
for(i=0; i<response.length; i++) {
source = response[i]['source'] || null;
destination = response[i]['destination'] || null;
sourceFileName = source.substr(source.lastIndexOf('/')+1);
if(destination === null) {
html += '<li><i class="icon-unhappy"></i> ' + sourceFileName + '</li>';
} else {
html += '<li><i class="icon-happy"></i> ' + sourceFileName + '<div class="destination" title="'+destination+'">'+destination+'</div></li>';
}
}
html += '</ul>';
el.innerHTML = html;
el.style.display = 'block';
}
};
function fileUrl(str) {
if (typeof str !== 'string') {
throw new Error('Expected a string');
}
var pathName = path.resolve(str).replace(/\\/g, '/');
// Windows drive letter must be prefixed with a slash
if (pathName[0] !== '/') {
pathName = '/' + pathName;
}
return encodeURI('file://' + pathName);
};
}
var handlers = new Handlers();
window.addEventListener('click', handlers.dispatch);
window.addEventListener('submit', handlers.dispatch);
window.addEventListener('change', handlers.dispatch);

71
app/html/location.html Normal file
View File

@ -0,0 +1,71 @@
<html>
<head>
<script src="js/handlers.js"></script>
<link href='https://fonts.googleapis.com/css?family=Lato:400,100,300,100italic,300italic' rel='stylesheet' type='text/css'>
<link rel="stylesheet" href="css/bootstrap.css"></script>
<link rel="stylesheet" href="css/boilerplate.css"></script>
<link rel="stylesheet" href="css/styles.css"></script>
<link rel="stylesheet" href="css/fontello/css/animation.css"></script>
<link rel="stylesheet" href="css/fontello/css/elodie.css"></script>
</head>
<body>
<div class="titlebar">
<div class="left">
<!--<a href="" class="left quit quitProgram"><i class="icon-cancel-circle"></i></a>
<a href="" class="left minus minimizeProgram"><i class="icon-minus-circle"></i></a>-->
</div>
How can I help you? <em>-- Elodie</em><i></i>
<a href="index.html" class="right"><i class="icon-media-add"></i></a>
</div>
<form class="updatePhotos" action="" method="post">
<div id="content" class="content">
<div class="location">
<label for="location-field"><i class="icon-map"></i>Change geolocation</label>
<input id="location-field" type="text" placeholder="i.e. Sunnyvale, CA">
</div>
<div class="datetime">
<label for="datetime-field"><i class="icon-calendar"></i>Change date and time</label>
<input id="datetime-field" type="text" placeholder="i.e. 2015-07-31">
</div>
<div class="title">
<label for="title-field"><i class="icon-title"></i>Change title</label>
<input id="title-field" type="text" placeholder="i.e. Elodie smiling at dinner">
</div>
<div class="album">
<label for="album-field"><i class="icon-book"></i>Create album</label>
<input id="album-field" type="text" placeholder="i.e. Elodie's Birthday Party">
<button class="push" type="submit">Update<i></i></button>
<!--<button class="push add-location updatePhotos">Update Photos<i></i></button>
<button class="push add-datetime updatePhotos">Update<i></i></button>
<button class="push add-title updatePhotos">Update<i></i></button>
<button class="push add-album updatePhotos">Update<i></i></button>-->
</div>
<div class="status">
<!--<ul>
<li>IMG_6365.JPG <i class="icon-happy"></i><div class="destination" title="/Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg">/Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg</div></li>
<li>IMG_1234.JPG <i class="icon-unhappy"></i><div class="destination" title="/Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg">/Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg</div></li>
</ul>-->
</div>
<div class="preview">
<!--<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped video"></div>
<div class="center-cropped video"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>
<div class="center-cropped" style="background-image:url('file:///Users/jaisenmathai/Downloads/media/2015-10-Oct/Sunnyvale/2015-10-17_01-03-50-img_6365.jpg');"></div>-->
</div>
</div>
</form>
<script>
document.getElementById('location-field').focus();
</script>
</body>
</html>

19
app/index.js Normal file
View File

@ -0,0 +1,19 @@
var ipc = require('ipc'),
toolbarUi = require('./modules/toolbar-ui.js'),
broadcast = require('./modules/broadcast.js');
toolbarUi.app.on('ready', toolbarUi.ready);
toolbarUi.app.on('create-window', toolbarUi.createWindow);
toolbarUi.app.on('after-create-window', toolbarUi.afterCreateWindow);
toolbarUi.app.on('show', toolbarUi.show);
toolbarUi.app.on('after-show', toolbarUi.afterShow);
toolbarUi.app.on('hide', toolbarUi.show);
toolbarUi.app.on('after-hide', toolbarUi.afterHide);
ipc.on('import-photos', broadcast.importPhotos);
ipc.on('update-config', broadcast.updateConfig);
ipc.on('update-photos', broadcast.updatePhotos);
ipc.on('launch-finder', broadcast.launchFinder);
ipc.on('launch-url', broadcast.launchUrl);
ipc.on('program-quit', broadcast.programQuit);
ipc.on('load-update-photos', toolbarUi.onDropFiles);

123
app/modules/broadcast.js Normal file
View File

@ -0,0 +1,123 @@
var exports = module.exports = {};
var path = require('path');
var exec = require('child_process').exec,
config = require('./config.js');
// The main process listens for events from the web renderer.
// When photos are dragged onto the toolbar and photos are requested to be updated it will fire an 'update-photos' ipc event.
// The web renderer will send the list of photos, type of update and new value to apply
// Once this main process completes the update it will send a 'update-photos-completed' event back to the renderer with information
// so a proper response can be displayed.
exports.importPhotos = function(event, args) {
var params = args,
normalize;
console.log('import-photos');
console.log(args);
if(typeof(args['source']) === 'undefined' || args['source'].length === 0 || typeof(args['destination']) === 'undefined' || args['destination'].length === 0) {
console.log('no source or destination passed in to import-photos');
event.sender.send('update-import-no-photos', null);
return;
}
args['source'] = args['source'].normalize();
args['destination'] = args['destination'].normalize();
update_command = path.normalize(__dirname + '/../../dist/elodie/elodie') + ' import --source="' + args['source'] + '" --destination="' + args['destination'] + '"';
//update_command = __dirname + '/../../elodie.py import --source="' + args['source'] + '" --destination="' + args['destination'] + '"';
console.log(update_command);
exec(update_command, function(error, stdout, stderr) {
console.log('out ' + stdout);
console.log('err ' + stderr);
/*params['error'] = error
params['stdout'] = '[' + stdout.replace(/\n/g,',').replace(/\,+$/g, '').replace(/\n/g,'') + ']'
params['stderr'] = stderr
console.log('parsed')
console.log(params['stdout'])*/
event.sender.send('update-import-success', args);
});
};
exports.updateConfig = function(event, args) {
var params = args,
status;
status = config.writeConfig(params);
if(status) {
event.sender.send('update-config-status', true);
} else {
event.sender.send('update-config-status', false);
}
};
// When photos are dragged onto the toolbar and photos are requested to be updated it will fire an 'update-photos' ipc event.
// The web renderer will send the list of photos, type of update and new value to apply
// Once this main process completes the update it will send a 'update-photos-completed' event back to the renderer with information
// so a proper response can be displayed.
exports.updatePhotos = function(event, args) {
var params = args,
normalize;
console.log('update-photos');
console.log(args);
if(typeof(args['files']) === 'undefined' || args['files'].length === 0) {
console.log('no files passed in to update-photos');
return;
}
normalize = function(files) {
for(var i=0; i<files.length; i++) {
files[i] = files[i].normalize()
}
return files
}
files = normalize(args['files'])
elodie_path = path.normalize(__dirname + '/../../dist/elodie/elodie');
update_command = elodie_path +' update'
//update_command = __dirname + '/../../elodie.py update'
if(args['location'].length > 0) {
update_command += ' --location="' + args['location'] + '"';
}
if(args['album'].length > 0) {
update_command += ' --album="' + args['album'] + '"';
}
if(args['datetime'].length > 0) {
update_command += ' --time="' + args['datetime'] + '"';
}
if(args['title'].length > 0) {
update_command += ' --title="' + args['title'] + '"';
}
update_command += ' "' + files.join('" "') + '"'
console.log(update_command)
exec(update_command, function(error, stdout, stderr) {
console.log('out ' + stdout)
console.log('err ' + stderr)
params['error'] = error
params['stdout'] = '[' + stdout.replace(/\n/g,',').replace(/\,+$/g, '').replace(/\n/g,'') + ']'
params['stderr'] = stderr
console.log('parsed')
console.log(params['stdout'])
event.sender.send('update-photos-success', params);
});
};
exports.launchFinder = function(event, path) {
console.log(path);
var shell = require('shell');
shell.showItemInFolder(path);
};
exports.launchUrl = function(event, url) {
console.log(url);
var shell = require('shell');
shell.openExternal(url);
};
exports.programQuit = function(event, path) {
console.log('program-quit');
//mb.tray.destroy();
mb.quit();
};

38
app/modules/config.js Normal file
View File

@ -0,0 +1,38 @@
var exports = module.exports = {};
var fs = require('fs'),
os = require('os'),
defaultConfigFile = (function() {
var f = __dirname;
for(var i=0; i<2; i++) {
f = f.substr(0, f.lastIndexOf(os.platform() == 'win32' ? '\\' : '/'));
}
return f + (os.platform() == 'win32' ? '\\config.ini-sample': '/config.ini-sample');
})(),
configFile = (process.env.HOME || process.env.USERPROFILE) + (os.platform() == 'win32' ? '\\.elodie\\config.ini' : '/.elodie/config.ini'),
hasConfig,
setConfig;
exports.hasConfig = function() {
console.log(defaultConfigFile);
console.log(configFile);
return fs.existsSync(configFile);
};
exports.writeConfig = function(params) {
var contents;
try {
if(exports.hasConfig()) {
contents = fs.readFileSync(configFile).toString();
} else {
contents = fs.readFileSync(defaultConfigFile).toString();
}
console.log(contents);
contents = contents.replace(/key=[\s\S]+$/,'key='+params['mapQuestKey']);
fs.writeFileSync(configFile, contents);
return true;
} catch(e) {
console.log(e);
return false;
}
};

111
app/modules/toolbar-ui.js Normal file
View File

@ -0,0 +1,111 @@
var exports = module.exports = {};
var menubar = require('menubar'),
menu = require('menu'),
tray = require('tray'),
config = require('./config.js'),
loadUrl = null;
var os = require('os')
var s_dir = __dirname.substr(0,__dirname.lastIndexOf(os.platform() == 'win32' ? '\\' : '/')) +
(os.platform() == 'win32' ? '\\html' : '/html');
exports.app = app = menubar(
{
preloadWindow: true,
dir: s_dir,
index: 'index.html',
pages: {
'blank': 'blank.html',
'config': 'config.html',
'location': 'location.html'
},
width: 400,
height: 500,
'window-position': 'trayCenter',
'frame': os.platform() == 'win32' ? true : false,
'always-on-top': os.platform() == 'win32' ? true : false
}
);
exports.ready = function() {
console.log('app is ready');
var template = [{
label: "Application",
submenu: [
{ label: "Quit", accelerator: "Command+Q", click: function() { app.quit(); }}
]}, {
label: "Edit",
submenu: [
{ label: "Undo", accelerator: "CmdOrCtrl+Z", selector: "undo:" },
{ label: "Redo", accelerator: "Shift+CmdOrCtrl+Z", selector: "redo:" },
{ label: "Cut", accelerator: "CmdOrCtrl+X", selector: "cut:" },
{ label: "Copy", accelerator: "CmdOrCtrl+C", selector: "copy:" },
{ label: "Paste", accelerator: "CmdOrCtrl+V", selector: "paste:" },
{ label: "Select All", accelerator: "CmdOrCtrl+A", selector: "selectAll:" }
]}
];
menu.setApplicationMenu(menu.buildFromTemplate(template));
this.tray.setToolTip('Drag and drop files here');
console.log(app.getOption('dir'));
this.tray.setImage(app.getOption('dir') + '/img/logo@18x22xbw.png');
this.tray.on('clicked', function clicked () {
console.log('tray-clicked')
});
this.tray.on('drop-files', function dropFiles (ev, files) {
loadUrl = app.getOption('pages')['location'];
app.showWindow();
//app.window.openDevTools();
app.window.webContents.on('did-finish-load', function() {
app.window.webContents.send('files', files);
app.window.webContents.send('preview', files);
});
});
};
exports.onDropFiles = function(event, args) {
var files = args;
loadUrl = app.getOption('pages')['location'];
app.showWindow();
app.window.webContents.on('did-finish-load', function() {
app.window.webContents.send('files', files);
app.window.webContents.send('preview', files);
});
};
exports.createWindow = function() {
console.log('create-window')
};
exports.afterCreateWindow = function() {
console.log('after-create-window')
};
exports.show = function() {
if(!config.hasConfig()) {
loadUrl = this.getOption('pages')['config'];
} else if(loadUrl === null) {
loadUrl = this.getOption('index');
}
this.window.loadUrl('file://' + this.getOption('dir') + '/' + loadUrl);
loadUrl = null;
//app.window.openDevTools();
};
exports.afterShow = function() {
console.log('after-show');
};
exports.hide = function() {
console.log('hide');
};
exports.afterHide = function() {
console.log('after-hide')
this.window.loadUrl('file://' + this.getOption('dir') + '/' + this.getOption('pages')['blank']);
};

BIN
creative/logo@300x.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 99 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 37 KiB

33
elodie.spec Normal file
View File

@ -0,0 +1,33 @@
# -*- mode: python -*-
block_cipher = None
a = Analysis(['elodie.py'],
pathex=['/Users/jaisenmathai/dev/tools/elodie'],
binaries=None,
datas=[('configs/ExifTool_config', 'configs')],
hiddenimports=[],
hookspath=None,
runtime_hooks=None,
excludes=None,
win_no_prefer_redirects=None,
win_private_assemblies=None,
cipher=block_cipher)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
exclude_binaries=True,
name='elodie',
debug=False,
strip=None,
upx=True,
console=True )
coll = COLLECT(exe,
a.binaries,
a.zipfiles,
a.datas,
strip=None,
upx=True,
name='elodie')

View File

@ -1,23 +1,3 @@
[Exif]
#album_from_folder=False
fill_date_original=True
#cache=True
#ignore_tags=None
use_date_filename=True
#use_file_dates=False
[Filters]
exclude=["**/.directory", "**/.DS_Store"]
#extensions=None
#glob=**/*
#max_deep=None
remove_duplicates=True
[Geolocation]
geocoder=Nominatim
prefer_english_names=False
timeout=1
[Path]
# day_begins: what hour of the day you want the day to begin (only for
# classification purposes). Defaults at 0 as midnight. Can be
@ -25,11 +5,13 @@ timeout=1
# be a number between 0-23')
day_begins=4
# Path format
dirs_path=<%Y>/<%m-%b>_<location>_<folder>
name=<%Y%m%d-%H%M%S>_<<name>.%l<ext>|<original_name>>
# name=<%Y%m%d-%H%M%S>-%u<original_name>.%l<ext>
dirs_path={%Y}/{%m-%b}-{city}-{folder}
name={%Y%m%d-%H%M%S}-%u{original_name}.%l{ext}
[Terminal]
dry_run=False
interactive=False
[Exclusions]
name1=.directory
name2=.DS_Store
[Geolocation]
geocoder=Nominatim
prefer_english_names=False

286
ordigi.py Executable file
View File

@ -0,0 +1,286 @@
#!/usr/bin/env python
import os
import re
import sys
from datetime import datetime
import click
from ordigi import config
from ordigi import constants
from ordigi import log
from ordigi.database import Db
from ordigi.filesystem import FileSystem
from ordigi.media import Media, get_all_subclasses
from ordigi.summary import Summary
FILESYSTEM = FileSystem()
def print_help(command):
click.echo(command.get_help(click.Context(sort)))
@click.command('batch')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
def _batch(debug):
"""Run batch() for all plugins.
"""
constants.debug = debug
plugins = Plugins()
plugins.run_batch()
@click.command('sort')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
@click.option('--dry-run', default=False, is_flag=True,
help='Dry run only, no change made to the filesystem.')
@click.option('--destination', '-d', type=click.Path(file_okay=False),
default=None, help='Sort files into this directory.')
@click.option('--clean', '-C', default=False, is_flag=True,
help='Clean empty folders')
@click.option('--copy', '-c', default=False, is_flag=True,
help='True if you want files to be copied over from src_dir to\
dest_dir rather than moved')
@click.option('--exclude-regex', '-e', default=set(), multiple=True,
help='Regular expression for directories or files to exclude.')
@click.option('--filter-by-ext', '-f', default=set(), multiple=True, help='''Use filename
extension to filter files for sorting. If value is '*', use
common media file extension for filtering. Ignored files remain in
the same directory structure''' )
@click.option('--ignore-tags', '-i', default=set(), multiple=True,
help='Specific tags or group that will be ignored when\
searching for file data. Example \'File:FileModifyDate\' or \'Filename\'' )
@click.option('--max-deep', '-m', default=None,
help='Maximum level to proceed. Number from 0 to desired level.')
@click.option('--remove-duplicates', '-r', default=False, is_flag=True,
help='True to remove files that are exactly the same in name\
and a file hash')
@click.option('--reset-cache', '-R', default=False, is_flag=True,
help='Regenerate the hash.json and location.json database ')
@click.option('--verbose', '-v', default=False, is_flag=True,
help='True if you want to see details of file processing')
@click.argument('paths', required=True, nargs=-1, type=click.Path())
def _sort(debug, dry_run, destination, clean, copy, exclude_regex, filter_by_ext, ignore_tags,
max_deep, remove_duplicates, reset_cache, verbose, paths):
"""Sort files or directories by reading their EXIF and organizing them
according to ordigi.conf preferences.
"""
if copy:
mode = 'copy'
else:
mode = 'move'
logger = log.get_logger(verbose, debug)
if max_deep is not None:
max_deep = int(max_deep)
cache = True
if reset_cache:
cache = False
if not destination and paths:
destination = paths[-1]
paths = paths[0:-1]
else:
sys.exit(1)
paths = set(paths)
filter_by_ext = set(filter_by_ext)
destination = os.path.abspath(os.path.expanduser(destination))
if not os.path.exists(destination):
logger.error(f'Directory {destination} does not exist')
conf = config.load_config(constants.CONFIG_FILE)
path_format = config.get_path_definition(conf)
# if no exclude list was passed in we check if there's a config
if len(exclude_regex) == 0:
if 'Exclusions' in conf:
exclude_regex = [value for key, value in conf.items('Exclusions')]
exclude_regex_list = set(exclude_regex)
# Initialize Db
db = Db(destination)
if 'Path' in conf and 'day_begins' in conf['Path']:
config_directory = conf['Path']
day_begins = int(config_directory['day_begins'])
else:
day_begins = 0
filesystem = FileSystem(cache, day_begins, dry_run, exclude_regex_list,
filter_by_ext, logger, max_deep, mode, path_format)
import ipdb; ipdb.set_trace()
summary, has_errors = filesystem.sort_files(paths, destination, db,
remove_duplicates, ignore_tags)
if clean:
remove_empty_folders(destination, logger)
if verbose or debug:
summary.write()
if has_errors:
sys.exit(1)
def remove_empty_folders(path, logger, remove_root=True):
'Function to remove empty folders'
if not os.path.isdir(path):
return
# remove empty subfolders
files = os.listdir(path)
if len(files):
for f in files:
fullpath = os.path.join(path, f)
if os.path.isdir(fullpath):
remove_empty_folders(fullpath, logger)
# if folder empty, delete it
files = os.listdir(path)
if len(files) == 0 and remove_root:
logger.info(f"Removing empty folder: {path}")
os.rmdir(path)
@click.command('clean')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
@click.option('--verbose', '-v', default=False, is_flag=True,
help='True if you want to see details of file processing')
@click.argument('path', required=True, nargs=1, type=click.Path())
def _clean(debug, verbose, path):
"""Remove empty folders
Usage: clean [--verbose|--debug] directory [removeRoot]"""
logger = log.get_logger(verbose, debug)
remove_empty_folders(path, logger)
@click.command('generate-db')
@click.option('--path', type=click.Path(file_okay=False),
required=True, help='Path of your photo library.')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
def _generate_db(path, debug):
"""Regenerate the hash.json database which contains all of the sha256 signatures of media files.
"""
constants.debug = debug
result = Result()
path = os.path.abspath(os.path.expanduser(path))
if not os.path.isdir(path):
log.error('path is not a valid directory %s' % path)
sys.exit(1)
db = Db(path)
db.backup_hash_db()
db.reset_hash_db()
for current_file in FILESYSTEM.get_all_files(path):
result.append((current_file, True))
db.add_hash(db.checksum(current_file), current_file)
log.progress()
db.update_hash_db()
log.progress('', True)
result.write()
@click.command('verify')
@click.option('--path', type=click.Path(file_okay=False),
required=True, help='Path of your photo library.')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
def _verify(path, debug):
constants.debug = debug
result = Result()
db = Db(path)
for checksum, file_path in db.all():
if not os.path.isfile(file_path):
result.append((file_path, False))
log.progress('x')
continue
actual_checksum = db.checksum(file_path)
if checksum == actual_checksum:
result.append((file_path, True))
log.progress()
else:
result.append((file_path, False))
log.progress('x')
log.progress('', True)
result.write()
@click.command('compare')
@click.option('--debug', default=False, is_flag=True,
help='Override the value in constants.py with True.')
@click.option('--dry-run', default=False, is_flag=True,
help='Dry run only, no change made to the filesystem.')
@click.option('--find-duplicates', '-f', default=False, is_flag=True)
@click.option('--output-dir', '-o', default=False, is_flag=True, help='output\
dir')
@click.option('--remove-duplicates', '-r', default=False, is_flag=True)
@click.option('--revert-compare', '-R', default=False, is_flag=True, help='Revert\
compare')
@click.option('--similar-to', '-s', default=False, help='Similar to given\
image')
@click.option('--similarity', '-S', default=80, help='Similarity level for\
images')
@click.option('--verbose', '-v', default=False, is_flag=True,
help='True if you want to see details of file processing')
@click.argument('path', nargs=1, required=True)
def _compare(debug, dry_run, find_duplicates, output_dir, remove_duplicates,
revert_compare, similar_to, similarity, verbose, path):
'''Compare files in directories'''
logger = log.get_logger(verbose, debug)
# Initialize Db
db = Db(path)
filesystem = FileSystem(mode='move', dry_run=dry_run, logger=logger)
if revert_compare:
summary, has_errors = filesystem.revert_compare(path, db, dry_run)
else:
summary, has_errors = filesystem.sort_similar_images(path, db,
similarity)
if verbose or debug:
summary.write()
if has_errors:
sys.exit(1)
@click.group()
def main():
pass
main.add_command(_clean)
main.add_command(_compare)
main.add_command(_sort)
main.add_command(_generate_db)
main.add_command(_verify)
main.add_command(_batch)
if __name__ == '__main__':
main()

View File

@ -1,3 +0,0 @@
from ordigi import log
LOG = log.get_logger('ordigi')

View File

@ -1,628 +0,0 @@
#!/usr/bin/env python
from pathlib import Path
import sys
import click
from ordigi import log, LOG
from ordigi.collection import Collection
from ordigi import constants
from ordigi.geolocation import GeoLocation
from ordigi import utils
_logger_options = [
click.option(
'--quiet',
'-q',
default=False,
is_flag=True,
help='Log level set to ERROR',
),
click.option(
'--verbose',
'-v',
default=False,
is_flag=True,
help='Log level set to INFO',
),
click.option(
'--debug',
'-d',
default=False,
is_flag=True,
help='Log level set to DEBUG',
),
]
_input_options = [
click.option(
'--interactive', '-i', default=False, is_flag=True, help="Interactive mode"
),
]
_dry_run_options = [
click.option(
'--dry-run',
default=False,
is_flag=True,
help='Dry run only, no change made to the filesystem.',
),
]
_exclude_options = [
click.option(
'--exclude',
'-E',
default=None,
multiple=True,
help='Directories or files to exclude.',
),
]
_filter_options = [
click.option(
'--ext',
'-e',
default=None,
multiple=True,
help="""Use filename
extension to filter files for sorting. If value is '*', use
common media file extension for filtering. Ignored files remain in
the same directory structure""",
),
click.option(
'--ignore-tags',
'-I',
default=None,
multiple=True,
help='Specific tags or group that will be ignored when\
searching for file data. Example \'File:FileModifyDate\' or \'Filename\'',
),
click.option('--glob', '-g', default='**/*', help='Glob file selection'),
]
_sort_options = [
click.option(
'--album-from-folder',
'-a',
default=False,
is_flag=True,
help="Use images' folders as their album names.",
),
click.option(
'--fill-date-original',
'-O',
default=False,
is_flag=True,
help="Fill date original from date media if not set",
),
click.option(
'--path-format',
'-p',
default=constants.DEFAULT_PATH_FORMAT,
help='Custom featured path format',
),
click.option(
'--remove-duplicates',
'-R',
default=False,
is_flag=True,
help='True to remove files that are exactly the same in name\
and a file hash',
),
click.option(
'--use-date-filename',
'-f',
default=False,
is_flag=True,
help="Use filename date for media original date.",
),
click.option(
'--use-file-dates',
'-F',
default=False,
is_flag=True,
help="Use file date created or modified for media original date.",
),
]
def print_help(command):
click.echo(command.get_help(click.Context(command)))
def add_options(options):
def _add_options(func):
for option in reversed(options):
func = option(func)
return func
return _add_options
def _get_paths(paths, root):
root = Path(root).expanduser().absolute()
if not paths:
absolute_paths = {root}
else:
absolute_paths = set()
for path in paths:
absolute_paths.add(Path(path).expanduser().absolute())
return absolute_paths, root
def _cli_get_location(collection):
gopt = collection.opt['Geolocation']
return GeoLocation(
gopt['geocoder'],
gopt['prefer_english_names'],
gopt['timeout'],
)
def _cli_sort(collection, src_paths, import_mode):
loc = _cli_get_location(collection)
return collection.sort_files(src_paths, loc, import_mode)
@click.group()
def cli(**kwargs):
pass
@cli.command('check')
@add_options(_logger_options)
@click.argument('path', required=True, nargs=1, type=click.Path())
def _check(**kwargs):
"""
Check media collection.
"""
root = Path(kwargs['path']).expanduser().absolute()
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
collection = Collection(root)
result = collection.check_db()
if result:
summary = collection.check_files()
if log_level < 30:
summary.print()
if summary.errors:
LOG.error('Db data is not accurate run `ordigi update --checksum`')
sys.exit(1)
else:
LOG.error('Db data is not accurate run `ordigi update`')
sys.exit(1)
@cli.command('clean')
@add_options(_logger_options)
@add_options(_dry_run_options)
@add_options(_filter_options)
@click.option(
'--dedup-regex',
'-D',
default=None,
multiple=True,
help='Regex to match duplicate strings parts',
)
@click.option(
'--delete-excluded', '-d', default=False, is_flag=True, help='Remove excluded files'
)
@click.option(
'--folders', '-f', default=False, is_flag=True, help='Remove empty folders'
)
@click.option(
'--path-string', '-p', default=False, is_flag=True, help='Deduplicate path string'
)
@click.option(
'--remove-duplicates',
'-R',
default=False,
is_flag=True,
help='True to remove files that are exactly the same in name and a file hash',
)
@click.argument('subdirs', required=False, nargs=-1, type=click.Path())
@click.argument('collection', required=True, nargs=1, type=click.Path())
def _clean(**kwargs):
"""Clean media collection"""
folders = kwargs['folders']
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
subdirs = kwargs['subdirs']
root = kwargs['collection']
paths, root = _get_paths(subdirs, root)
collection = Collection(
root,
{
'dry_run': kwargs['dry_run'],
'extensions': kwargs['ext'],
'glob': kwargs['glob'],
'remove_duplicates': kwargs['remove_duplicates'],
},
)
# os.path.join(
# TODO make function to remove duplicates
# path_format = collection.opt['Path']['path_format']
# summary = collection.sort_files(paths, None)
if kwargs['path_string']:
dedup_regex = set(kwargs['dedup_regex'])
collection.dedup_path(paths, dedup_regex)
for path in paths:
if folders:
collection.remove_empty_folders(path)
if kwargs['delete_excluded']:
collection.remove_excluded_files()
summary = collection.summary
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('clone')
@add_options(_logger_options)
@add_options(_dry_run_options)
@click.argument('src', required=True, nargs=1, type=click.Path())
@click.argument('dest', required=True, nargs=1, type=click.Path())
def _clone(**kwargs):
"""Clone media collection to another location"""
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
src_path = Path(kwargs['src']).expanduser().absolute()
dest_path = Path(kwargs['dest']).expanduser().absolute()
dry_run = kwargs['dry_run']
src_collection = Collection(
src_path, {'cache': True, 'dry_run': dry_run}
)
if dest_path.exists() and not utils.empty_dir(dest_path):
LOG.error(f'Destination collection path {dest_path} must be empty directory')
sys.exit(1)
summary = src_collection.clone(dest_path)
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('compare')
@add_options(_logger_options)
@add_options(_dry_run_options)
@add_options(_filter_options)
@click.option('--find-duplicates', '-f', default=False, is_flag=True)
@click.option('--remove-duplicates', '-r', default=False, is_flag=True)
@click.option(
'--similar-to',
'-s',
default=False,
help='Similar to given image',
)
@click.option(
'--similarity',
'-S',
default=80,
help='Similarity level for images',
)
@click.argument('subdirs', required=False, nargs=-1, type=click.Path())
@click.argument('collection', required=True, nargs=1, type=click.Path())
def _compare(**kwargs):
"""
Sort similar images in directories
"""
subdirs = kwargs['subdirs']
root = kwargs['collection']
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
paths, root = _get_paths(subdirs, root)
collection = Collection(
root,
{
'extensions': kwargs['ext'],
'glob': kwargs['glob'],
'dry_run': kwargs['dry_run'],
'remove_duplicates': kwargs['remove_duplicates'],
},
)
for path in paths:
collection.sort_similar_images(path, kwargs['similarity'])
summary = collection.summary
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('edit')
@add_options(_logger_options)
@add_options(_exclude_options)
@add_options(_filter_options)
@click.option(
'--key',
'-k',
default=None,
multiple=True,
help="Select exif tags groups to edit",
)
@click.option(
'--overwrite',
'-O',
default=False,
is_flag=True,
help="Overwrite db and exif value by key value",
)
@click.argument('subdirs', required=False, nargs=-1, type=click.Path())
@click.argument('path', required=True, nargs=1, type=click.Path())
def _edit(**kwargs):
"""Edit EXIF metadata in files or directories"""
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
paths, root = _get_paths(kwargs['subdirs'], kwargs['path'])
overwrite = kwargs['overwrite']
collection = Collection(
root,
{
'cache': True,
'ignore_tags': kwargs['ignore_tags'],
'exclude': kwargs['exclude'],
'extensions': kwargs['ext'],
'glob': kwargs['glob'],
}
)
editable_keys = (
'album',
'camera_make',
'camera_model',
'city',
'country',
# 'date_created',
'date_media',
# 'date_modified',
'date_original',
'latitude',
'location',
'longitude',
'latitude_ref',
'longitude_ref',
'original_name',
'state',
'title',
)
if not kwargs['key']:
keys = set(editable_keys)
else:
keys = set(kwargs['key'])
if 'coordinates' in keys:
keys.remove('coordinates')
keys.update(['latitude', 'longitude'])
location = False
for key in keys:
if key not in editable_keys:
LOG.error(f"key '{key}' is not valid")
sys.exit(1)
if key in (
'city',
'latitude',
'location',
'longitude',
'latitude_ref',
'longitude_ref',
):
location = True
if location:
loc = _cli_get_location(collection)
else:
loc = None
summary = collection.edit_metadata(paths, keys, loc, overwrite)
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('init')
@add_options(_logger_options)
@click.argument('path', required=True, nargs=1, type=click.Path())
def _init(**kwargs):
"""
Init media collection database.
"""
root = Path(kwargs['path']).expanduser().absolute()
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
collection = Collection(root)
loc = _cli_get_location(collection)
summary = collection.init(loc)
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('import')
@add_options(_logger_options)
@add_options(_input_options)
@add_options(_dry_run_options)
@add_options(_exclude_options)
@add_options(_filter_options)
@add_options(_sort_options)
@click.option(
'--copy',
'-c',
default=False,
is_flag=True,
help='True if you want files to be copied over from src_dir to\
dest_dir rather than moved',
)
@click.argument('src', required=False, nargs=-1, type=click.Path())
@click.argument('dest', required=True, nargs=1, type=click.Path())
def _import(**kwargs):
"""Sort files or directories by reading their EXIF and organizing them
according to ordigi.conf preferences.
"""
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
src_paths, root = _get_paths(kwargs['src'], kwargs['dest'])
collection = Collection(
root,
{
'album_from_folder': kwargs['album_from_folder'],
'cache': False,
'ignore_tags': kwargs['ignore_tags'],
'use_date_filename': kwargs['use_date_filename'],
'use_file_dates': kwargs['use_file_dates'],
'exclude': kwargs['exclude'],
'extensions': kwargs['ext'],
'glob': kwargs['glob'],
'dry_run': kwargs['dry_run'],
'interactive': kwargs['interactive'],
'path_format': kwargs['path_format'],
'remove_duplicates': kwargs['remove_duplicates'],
}
)
if kwargs['copy']:
import_mode = 'copy'
else:
import_mode = 'move'
summary = _cli_sort(collection, src_paths, import_mode)
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('sort')
@add_options(_logger_options)
@add_options(_input_options)
@add_options(_dry_run_options)
@add_options(_filter_options)
@add_options(_sort_options)
@click.option('--clean', '-C', default=False, is_flag=True, help='Clean empty folders')
@click.option(
'--reset-cache',
'-r',
default=False,
is_flag=True,
help='Regenerate the hash.json and location.json database ',
)
@click.argument('subdirs', required=False, nargs=-1, type=click.Path())
@click.argument('dest', required=True, nargs=1, type=click.Path())
def _sort(**kwargs):
"""Sort files or directories by reading their EXIF and organizing them
according to ordigi.conf preferences.
"""
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
paths, root = _get_paths(kwargs['subdirs'], kwargs['dest'])
cache = not kwargs['reset_cache']
collection = Collection(
root,
{
'album_from_folder': kwargs['album_from_folder'],
'cache': cache,
'fill_date_original': kwargs['fill_date_original'],
'ignore_tags': kwargs['ignore_tags'],
'use_date_filename': kwargs['use_date_filename'],
'use_file_dates': kwargs['use_file_dates'],
'extensions': kwargs['ext'],
'glob': kwargs['glob'],
'dry_run': kwargs['dry_run'],
'interactive': kwargs['interactive'],
'remove_duplicates': kwargs['remove_duplicates'],
}
)
summary = _cli_sort(collection, paths, False)
if kwargs['clean']:
collection.remove_empty_folders(root)
if log_level < 30:
summary.print()
if summary.errors:
sys.exit(1)
@cli.command('update')
@add_options(_logger_options)
@click.option(
'--checksum',
'-c',
default=False,
is_flag=True,
help='Update checksum, assuming file are changed by the user',
)
@click.argument('path', required=True, nargs=1, type=click.Path())
def _update(**kwargs):
"""
Update media collection database.
"""
root = Path(kwargs['path']).expanduser().absolute()
log_level = log.get_level(kwargs['quiet'], kwargs['verbose'], kwargs['debug'])
log.console(LOG, level=log_level)
collection = Collection(root)
loc = _cli_get_location(collection)
summary = collection.update(loc, kwargs['checksum'])
if log_level < 30:
summary.print()
if __name__ == '__main__':
cli()

File diff suppressed because it is too large Load Diff

View File

@ -1,196 +1,47 @@
import json
import re
"""Load config file as a singleton."""
from configparser import RawConfigParser
from os import path
from ordigi import constants
from geopy.geocoders import options as gopt
def check_option(getoption):
"""Check option type int or boolean"""
try:
getoption
except ValueError as e:
# TODO
return None
else:
return getoption
def write(conf_file, config):
with open(conf_file, 'w') as conf_file:
config.write(conf_file)
return True
return False
def check_json(getoption):
"""Check if json string is valid"""
try:
getoption
except json.JSONDecodeError as e:
# TODO
return None
else:
return getoption
def load_config(file):
if not path.exists(file):
return {}
config = RawConfigParser()
config.read(file)
return config
def check_re(getoption):
"""Check if regex string is valid"""
try:
getoption
except re.error as e:
# TODO
return None
else:
return getoption
def get_path_definition(config):
"""Returns a list of folder definitions.
Each element in the list represents a folder.
Fallback folders are supported and are nested lists.
class Config:
"""Manage config file"""
:returns: string
"""
def __init__(self, conf_path=constants.CONFIG_FILE, conf=None):
self.conf_path = conf_path
if conf is None:
self.conf = self.load_config()
if self.conf == {}:
# Fallback to default config
self.conf_path = constants.CONFIG_FILE
self.conf = self.load_config()
else:
self.conf = conf
if 'Path' in config:
if 'format' in config['Path']:
return config['Path']['format']
elif 'dirs_path' and 'name' in config['Path']:
return config['Path']['dirs_path'] + '/' + config['Path']['name']
self.options = self.get_default_options()
return constants.default_path + '/' + constants.default_name
def get_default_options(self) -> dict:
# Initialize with default options
return {
'Exif': {
'album_from_folder': False,
'fill_date_original': False,
'cache': True,
'ignore_tags': None,
'use_date_filename': False,
'use_file_dates': False,
},
'Filters': {
'exclude': set(),
'extensions': None,
'glob': '**/*',
'max_deep': None,
'remove_duplicates': False,
},
'Geolocation': {
'geocoder': constants.DEFAULT_GEOCODER,
'prefer_english_names': False,
'timeout': gopt.default_timeout,
},
'Path': {
'day_begins': 0,
'path_format': constants.DEFAULT_PATH_FORMAT,
},
'Terminal': {
'dry_run': False,
'interactive': False,
},
}
def get_geocoder():
config = load_config(constants.CONFIG_FILE)
if 'Geolocation' in config and 'geocoder' in config['Geolocation']:
geocoder = config['Geolocation']['geocoder']
if geocoder in ('Nominatim', ):
return geocoder
def write(self, conf):
with open(self.conf_path, 'w') as conf_file:
conf.write(conf_file)
return True
return constants.default_geocoder
return False
def load_config(self):
if not self.conf_path.exists():
return {}
conf = RawConfigParser()
conf.read(self.conf_path)
return conf
def is_option(self, section, option):
"""Get ConfigParser options"""
if section in self.conf and option in self.conf[section]:
return True
return False
@check_option
def _getboolean(self, section, option):
return self.conf.getboolean(section, option)
getboolean = check_option(_getboolean)
@check_option
def _getint(self, section, option):
return self.conf.getint(section, option)
getint = check_option(_getint)
@check_json
def _getjson(self, section, option):
return json.loads(self.conf.get(section, option))
getjson = check_json(_getjson)
@check_re
def _getre(self, section, option):
return re.compile(self.conf.get(section, option))
getre = check_re(_getre)
def get_config_option(self, section, option):
bool_options = {
'album_from_folder',
'fill_date_original',
'cache',
'dry_run',
'interactive',
'prefer_english_names',
'remove_duplicates',
'use_date_filename',
'use_file_dates',
}
int_options = {
'day_begins',
'max_deep',
'timeout',
}
string_options = {
'glob',
'geocoder',
}
multi_options = {
'exclude',
'extensions',
'ignore_tags',
}
value = self.options[section][option]
if self.is_option(section, option):
if option in bool_options:
return self.getboolean(section, option)
if option in int_options:
return self.getint(section, option)
if option == 'geocoder' and value in ('Nominatim',):
return self.conf[section][option]
if option == 'glob':
return self.getre(section, option)
if option == 'path_format':
return self.conf[section][option]
if option in multi_options:
return set(self.getjson(section, option))
return value
if option == 'path_format':
if self.is_option('Path', 'name') and self.is_option('Path', 'dirs_path'):
# Path format is split in two parts
value = self.conf['Path']['dirs_path'] + '/' + self.conf['Path']['name']
return value
def get_config_options(self) -> dict:
"""Get config options"""
for section in self.options:
for option in self.options[section]:
# Option is in section
value = self.get_config_option(section, option)
self.options[section][option] = value
return self.options

View File

@ -2,30 +2,41 @@
Settings.
"""
from os import environ
from pathlib import Path
from os import environ, path
from sys import version_info
#: If True, debug messages will be printed.
debug = False
#Ordigi settings directory.
if 'XDG_CONFIG_HOME' in environ:
confighome = environ['XDG_CONFIG_HOME']
elif 'APPDATA' in environ:
confighome = environ['APPDATA']
else:
confighome = path.join(environ['HOME'], '.config')
application_directory = path.join(confighome, 'ordigi')
# Ordigi settings directory.
def get_config_dir(name):
if 'XDG_CONFIG_HOME' in environ:
confighome = Path(environ['XDG_CONFIG_HOME'])
elif 'APPDATA' in environ:
confighome = Path(environ['APPDATA'])
else:
confighome = Path(environ['HOME'], '.config')
default_path = '{%Y-%m-%b}/{album}|{city}|{"Unknown Location"}'
default_name = '{%Y-%m-%d_%H-%M-%S}-{name}-{title}.%l{ext}'
default_geocoder = 'Nominatim'
# Checksum storage file.
hash_db = 'hash.json'
# TODO will be removed eventualy later
# hash_db = '{}/hash.json'.format(application_directory)
return confighome / name
# Geolocation details file.
location_db = 'location.json'
# TODO will be removed eventualy later
# location_db = '{}/location.json'.format(application_directory)
# Ordigi installation directory.
script_directory = path.dirname(path.dirname(path.abspath(__file__)))
APPLICATION_DIRECTORY = get_config_dir('ordigi')
#: Accepted language in responses from MapQuest
accepted_language = 'en'
DEFAULT_PATH = '<%Y-%m-%b>/<album>|<city>'
DEFAULT_NAME = '<%Y-%m-%d_%H-%M-%S>-<name>-<title>.%l<ext>'
DEFAULT_PATH_FORMAT = DEFAULT_PATH + '/' + DEFAULT_NAME
DEFAULT_GEOCODER = 'Nominatim'
# check python version, required in filesystem.py to trigger appropriate method
python_version = version_info.major
CONFIG_FILE = APPLICATION_DIRECTORY / 'ordigi.conf'
CONFIG_FILE = f'{application_directory}/ordigi.conf'

View File

@ -1,350 +1,194 @@
from datetime import datetime
"""
Methods for interacting with database files
"""
from builtins import map
from builtins import object
import json
import os
from pathlib import Path
import sqlite3
import sys
from ordigi import LOG
from ordigi.utils import distance_between_two_points
from math import radians, cos, sqrt
from shutil import copyfile
from time import strftime
from ordigi import constants
class Sqlite:
"""Methods for interacting with Sqlite database"""
class Db(object):
"""A class for interacting with the JSON files database."""
def __init__(self, target_dir):
# Create dir for target database
db_dir = Path(target_dir, '.ordigi')
dirname = os.path.join(target_dir, '.ordigi')
if not db_dir.exists():
if not os.path.exists(dirname):
try:
db_dir.mkdir()
os.makedirs(dirname)
except OSError:
pass
self.db_type = 'SQLite format 3'
self.log = LOG.getChild(self.__class__.__name__)
self.types = {'text': (str, datetime), 'integer': (int,), 'real': (float,)}
# self.hash_db = constants.hash_db
self.hash_db_file = os.path.join(dirname, constants.hash_db)
self.check_db(self.hash_db_file)
self.filename = Path(db_dir, 'collection.db')
self.con = sqlite3.connect(self.filename)
# Allow selecting column by name
self.con.row_factory = sqlite3.Row
self.cur = self.con.cursor()
self.hash_db = {}
metadata_header = {
'FilePath': 'text not null',
'Checksum': 'text',
'Album': 'text',
'Title': 'text',
'LocationId': 'integer',
'DateMedia': 'text',
'DateOriginal': 'text',
'DateCreated': 'text',
'DateModified': 'text',
'FileModifyDate': 'text',
'CameraMake': 'text',
'CameraModel': 'text',
'OriginalName': 'text',
'SrcDir': 'text',
'Subdirs': 'text',
'Filename': 'text',
}
# We know from above that this file exists so we open it
# for reading only.
with open(self.hash_db_file, 'r') as f:
try:
self.hash_db = json.load(f)
except ValueError:
pass
location_header = {
'Latitude': 'real not null',
'Longitude': 'real not null',
'LatitudeRef': 'text',
'LongitudeRef': 'text',
'City': 'text',
'State': 'text',
'Country': 'text',
'Location': 'text',
}
# self.location_db_file = constants.location_db
self.location_db_file = os.path.join(dirname, constants.location_db)
self.check_db(self.location_db_file)
self.tables = {
'metadata': {'header': metadata_header},
'location': {'header': location_header},
}
self.location_db = []
# Create tables
for table, d in self.tables.items():
if not self.is_table(table):
if table == 'metadata':
# https://www.quackit.com/sqlite/tutorial/create_a_relationship.cfm
self.create_table(
table, d['header'],
(
"unique('FilePath')",
"foreign key(LocationId) references location(Id)",
),
)
elif table == 'location':
self.create_table(
table, d['header'],
("unique('Latitude', 'Longitude')",),
)
# We know from above that this file exists so we open it
# for reading only.
with open(self.location_db_file, 'r') as f:
try:
self.location_db = json.load(f)
except ValueError:
pass
def is_Sqlite3(self, filename):
if not os.path.isfile(filename):
return False
if os.path.getsize(filename) < 100: # SQLite database file header is 100 bytes
return False
def check_db(self, db_file):
'''Load db from file'''
# If the hash db doesn't exist we create it.
# Otherwise we only open for reading
if not os.path.isfile(db_file):
with open(db_file, 'a'):
os.utime(db_file, None)
with open(filename, 'rb') as fd:
header = fd.read(100)
def add_hash(self, key, value, write=False):
"""Add a hash to the hash db.
return header[:16] == self.db_type + '\x00'
def is_table(self, table):
"""Check if table exist"""
try:
# get the count of tables with the name
self.cur.execute(
f"select count(name) from sqlite_master where type='table' and name='{table}'"
)
except sqlite3.DatabaseError as e:
# raise type(e)(e.message + ' :{self.filename} %s' % arg1)
raise sqlite3.DatabaseError(f"{self.filename} is not valid database")
# if the count is 1, then table exists
if self.cur.fetchone()[0] == 1:
return True
return False
def get_rows(self, table):
"""Cycle through rows in table
:params: str
:return: iter
:param str key:
:param str value:
:param bool write: If true, write the hash db to disk.
"""
self.cur.execute(f'select * from {table}')
for row in self.cur:
yield row
self.hash_db[key] = value
if(write is True):
self.update_hash_db()
def is_empty(self, table):
if [x for x in self.get_rows(table)] == []:
return True
# Location database
# Currently quite simple just a list of long/lat pairs with a name
# If it gets many entries a lookup might take too long and a better
# structure might be needed. Some speed up ideas:
# - Sort it and inter-half method can be used
# - Use integer part of long or lat as key to get a lower search list
# - Cache a small number of lookups, images are likely to be taken in
# clusters around a spot during import.
def add_location(self, latitude, longitude, place, write=False):
"""Add a location to the database.
return False
def _run(self, query, n=0):
self.log.debug(f"Sqlite run '{query}'")
try:
result = self.cur.execute(query).fetchone()
except sqlite3.DatabaseError as e:
self.log.error(e)
result = False
if result:
return result[n]
else:
return False
def _run_many(self, query, table_list):
self.cur.executemany(query, table_list)
if self.cur.fetchone()[0] != 1:
return False
self.con.commit()
return True
def create_table(self, table, header, statements=None):
:param float latitude: Latitude of the location.
:param float longitude: Longitude of the location.
:param str place: Name for the location.
:param bool write: If true, write the location db to disk.
"""
:params: row data (dict), primary_key (tuple)
data = {}
data['lat'] = latitude
data['long'] = longitude
data['name'] = place
self.location_db.append(data)
if(write is True):
self.update_location_db()
def backup_hash_db(self):
"""Backs up the hash db."""
# TODO
if os.path.isfile(self.hash_db_file):
mask = strftime('%Y-%m-%d_%H-%M-%S')
backup_file_name = '%s-%s' % (self.hash_db_file, mask)
copyfile(self.hash_db_file, backup_file_name)
return backup_file_name
def check_hash(self, key):
"""Check whether a hash is present for the given key.
:param str key:
:returns: bool
"""
fieldset = []
fieldset.append("Id integer primary key autoincrement")
for col, definition in header.items():
fieldset.append(f"{col} {definition}")
# https://stackoverflow.com/questions/11719073/sqlite-insert-or-update-without-changing-rowid-value
if statements:
for statement in statements:
fieldset.append(statement)
return key in self.hash_db
if len(fieldset) > 0:
query = "create table {0} ({1})".format(table, ", ".join(fieldset))
self.cur.execute(query)
self.tables[table]['header'] = header
return True
def get_hash(self, key):
"""Get the hash value for a given key.
return False
def check_row(self, table, row_data):
header = self.tables[table]['header']
if len(row_data) != len(header):
raise ValueError(
f"""Table {table} length mismatch: row_data
{row_data}, header {header}"""
)
columns = ', '.join(row_data.keys())
placeholders = ', '.join('?' * len(row_data))
return columns, placeholders
def update_query(self, table, row_id, columns, placeholders):
:param str key:
:returns: str or None
"""
:returns: query (str)
"""
return f"""replace into {table} (Id, {columns})
values ((select id from {table} where id={row_id}), {placeholders})"""
if(self.check_hash(key) is True):
return self.hash_db[key]
return None
def insert_query(self, table, columns, placeholders):
"""
:returns: query (str)
"""
return f"insert into {table} ({columns}) values ({placeholders})"
def get_location_name(self, latitude, longitude, threshold_m):
"""Find a name for a location in the database.
def upsert_row(self, table, row_data, columns, placeholders, row_id=None):
"""
:returns: lastrowid (int)
https://www.sqlitetutorial.net/sqlite-replace-statement/
https://www.sqlite.org/lang_UPSERT.html
"""
if row_id:
query = self.update_query(table, row_id, columns, placeholders)
else:
query = self.insert_query(table, columns, placeholders)
values = []
for key, value in row_data.items():
if isinstance(value, bool):
values.append(int(value))
else:
values.append(value)
self.cur.execute(query, values)
self.con.commit()
return self.cur.lastrowid
def upsert_location(self, row_data):
# Check if row already exist
row_id = self.get_location(row_data['Latitude'], row_data['Longitude'], 'Id')
columns, placeholders = self.check_row('location', row_data)
return self.upsert_row('location', row_data, columns, placeholders, row_id)
def upsert_metadata(self, row_data):
# Check if row already exist
row_id = self.get_metadata(row_data['FilePath'], 'Id')
columns, placeholders = self.check_row('metadata', row_data)
return self.upsert_row('metadata', row_data, columns, placeholders, row_id)
def get_header(self, row_data):
"""
:params: row data (dict)
:returns: header
"""
sql_table = {}
for key, value in row_data.items():
for sql_type, t in self.types.items():
# Find corresponding sql_type from python type
if type(value) in t:
sql_table[key] = sql_type
return sql_table
def build_table(self, table, row_data, statements=None):
header = self.get_header(row_data)
return self.create_table(table, header, statements=None)
def check_table(self, table, row_data):
"""
:params: row data (dict), primary_key (tuple)
:returns: bool
"""
if not self.tables[table]['header']:
self.log.error(f"Table {table} do not exist")
return False
return True
def escape_quote(self, string):
return string.translate(str.maketrans({"'": r"''"}))
def get_checksum(self, file_path):
file_path_e = self.escape_quote(str(file_path))
query = f"select Checksum from metadata where FilePath='{file_path_e}'"
return self._run(query)
def get_metadata(self, file_path, column):
file_path_e = self.escape_quote(str(file_path))
query = f"select {column} from metadata where FilePath='{file_path_e}'"
return self._run(query)
def match_location(self, latitude, longitude):
query = f"""select 1 from location where Latitude='{latitude}'
and Longitude='{longitude}'"""
return self._run(query)
def get_location_data(self, location_id, data):
query = f"select {data} from location where Id='{location_id}'"
return self._run(query)
def get_location(self, latitude, longitude, column):
query = f"""select {column} from location where Latitude='{latitude}'
and Longitude='{longitude}'"""
return self._run(query)
def _get_table(self, table):
self.cur.execute(f'SELECT * FROM {table}').fetchall()
def get_location_nearby(self, latitude, longitude, Column, threshold_m=3000):
"""
Find a name for a location in the database.
:param float latitude: Latitude of the location.
:param float longitude: Longitude of the location.
:param int threshold_m: Location in the database must be this close to
the given latitude and longitude.
:returns: str, or None if a matching location couldn't be found.
"""
shorter_distance = sys.maxsize
value = None
self.cur.execute('SELECT * FROM location')
for row in self.cur:
distance = distance_between_two_points(
latitude, longitude, row['Latitude'], row['Longitude']
)
last_d = sys.maxsize
name = None
for data in self.location_db:
# As threshold is quite small use simple math
# From http://stackoverflow.com/questions/15736995/how-can-i-quickly-estimate-the-distance-between-two-latitude-longitude-points # noqa
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = list(map(
radians,
[longitude, latitude, data['long'], data['lat']]
))
r = 6371000 # radius of the earth in m
x = (lon2 - lon1) * cos(0.5 * (lat2 + lat1))
y = lat2 - lat1
d = r * sqrt(x * x + y * y)
# Use if closer then threshold_km reuse lookup
if distance < shorter_distance and distance <= threshold_m:
shorter_distance = distance
value = row[Column]
if(d <= threshold_m and d < last_d):
name = data['name']
last_d = d
return value
return name
def delete_row(self, table, column, value):
def get_location_coordinates(self, name):
"""Get the latitude and longitude for a location.
:param str name: Name of the location.
:returns: tuple(float), or None if the location wasn't in the database.
"""
Delete a row by row id in table
:param table: database table
:param id: id of the row
:return:
for data in self.location_db:
if data['name'] == name:
return (data['lat'], data['long'])
return None
def all(self):
"""Generator to get all entries from self.hash_db
:returns tuple(string)
"""
sql = f'delete from {table} where {column}=?'
self.cur.execute(sql, (value,))
self.con.commit()
for checksum, path in self.hash_db.items():
yield (checksum, path)
def delete_filepath(self, value):
self.delete_row('metadata', 'FilePath', value)
def reset_hash_db(self):
self.hash_db = {}
def delete_all_rows(self, table):
"""
Delete all row in table
:param table: database table
:return:
"""
sql = f'delete from {table}'
self.cur.execute(sql)
self.con.commit()
def len(self, table):
sql = f'select count() from {table}'
return self._run(sql)
def update_hash_db(self):
"""Write the hash db to disk."""
with open(self.hash_db_file, 'w') as f:
json.dump(self.hash_db, f)
def update_location_db(self):
"""Write the location db to disk."""
with open(self.location_db_file, 'w') as f:
json.dump(self.location_db, f)

View File

@ -4,16 +4,14 @@ https://github.com/RhetTbull/osxphotos/blob/master/osxphotos/exiftool.py
import atexit
import json
import logging
import os
from pathlib import Path
import re
import shutil
import subprocess
from abc import ABC, abstractmethod
from functools import lru_cache # pylint: disable=syntax-error
from ordigi import LOG
# exiftool -stay_open commands outputs this EOF marker after command is run
EXIFTOOL_STAYOPEN_EOF = "{ready}"
EXIFTOOL_STAYOPEN_EOF_LEN = len(EXIFTOOL_STAYOPEN_EOF)
@ -30,14 +28,14 @@ def exiftool_is_running():
@atexit.register
def terminate_exiftool():
"""Terminate any running ExifTool subprocesses; call this to cleanup when done using ExifTool"""
"""Terminate any running ExifTool subprocesses; call this to cleanup when done using ExifTool """
for proc in EXIFTOOL_PROCESSES:
proc._stop_proc()
@lru_cache(maxsize=1)
def get_exiftool_path():
"""return path of exiftool, cache result"""
""" return path of exiftool, cache result """
exiftool_path = shutil.which("exiftool")
if exiftool_path:
return exiftool_path.rstrip()
@ -53,33 +51,33 @@ class _ExifToolProc:
Creates a singleton object"""
def __new__(cls, *args, **kwargs):
"""create new object or return instance of already created singleton"""
""" create new object or return instance of already created singleton """
if not hasattr(cls, "instance") or not cls.instance:
cls.instance = super().__new__(cls)
return cls.instance
def __init__(self, exiftool=None):
def __init__(self, exiftool=None, logger=logging.getLogger()):
"""construct _ExifToolProc singleton object or return instance of already created object
exiftool: optional path to exiftool binary (if not provided, will search path to find it)"""
self.log = LOG.getChild(self.__class__.__name__)
self._exiftool = exiftool or get_exiftool_path()
self.logger = logger
if hasattr(self, "_process_running") and self._process_running:
# already running
if exiftool is not None and exiftool != self._exiftool:
self.log.warning(
self.logger.warning(
f"exiftool subprocess already running, "
f"ignoring exiftool={exiftool}"
)
return
self._process_running = False
self._exiftool = exiftool or get_exiftool_path()
self._start_proc()
@property
def process(self):
"""return the exiftool subprocess"""
""" return the exiftool subprocess """
if self._process_running:
return self._process
else:
@ -88,19 +86,19 @@ class _ExifToolProc:
@property
def pid(self):
"""return process id (PID) of the exiftool process"""
""" return process id (PID) of the exiftool process """
return self._process.pid
@property
def exiftool(self):
"""return path to exiftool process"""
""" return path to exiftool process """
return self._exiftool
def _start_proc(self):
"""start exiftool in batch mode"""
""" start exiftool in batch mode """
if self._process_running:
self.log.warning("exiftool already running: {self._process}")
self.logger.warning("exiftool already running: {self._process}")
return
# open exiftool process
@ -125,7 +123,7 @@ class _ExifToolProc:
EXIFTOOL_PROCESSES.append(self)
def _stop_proc(self):
"""stop the exiftool process if it's running, otherwise, do nothing"""
""" stop the exiftool process if it's running, otherwise, do nothing """
if not self._process_running:
return
@ -148,15 +146,9 @@ class _ExifToolProc:
class ExifTool:
"""Basic exiftool interface for reading and writing EXIF tags"""
""" Basic exiftool interface for reading and writing EXIF tags """
def __init__(
self,
filepath,
exiftool=None,
overwrite=True,
flags=None,
):
def __init__(self, filepath, exiftool=None, overwrite=True, flags=None, logger=logging.getLogger()):
"""Create ExifTool object
Args:
@ -176,7 +168,7 @@ class ExifTool:
self.error = None
# if running as a context manager, self._context_mgr will be True
self._context_mgr = False
self._exiftoolproc = _ExifToolProc(exiftool=exiftool)
self._exiftoolproc = _ExifToolProc(exiftool=exiftool, logger=logger)
self._read_exif()
@property
@ -326,12 +318,12 @@ class ExifTool:
@property
def pid(self):
"""return process id (PID) of the exiftool process"""
""" return process id (PID) of the exiftool process """
return self._process.pid
@property
def version(self):
"""returns exiftool version"""
""" returns exiftool version """
ver, _, _ = self.run_commands("-ver", no_file=True)
return ver.decode("utf-8")
@ -369,12 +361,12 @@ class ExifTool:
return exifdict
def json(self):
"""returns JSON string containing all EXIF tags and values from exiftool"""
""" returns JSON string containing all EXIF tags and values from exiftool """
json, _, _ = self.run_commands("-json")
return json
def _read_exif(self):
"""read exif data from file"""
""" read exif data from file """
data = self.asdict()
self.data = {k: v for k, v in data.items()}
@ -395,24 +387,23 @@ class ExifTool:
class ExifToolCaching(ExifTool):
"""Basic exiftool interface for reading and writing EXIF tags, with caching.
Use this only when you know the file's EXIF data will not be changed by any external process.
""" Basic exiftool interface for reading and writing EXIF tags, with caching.
Use this only when you know the file's EXIF data will not be changed by any external process.
Creates a singleton cached ExifTool instance """
Creates a singleton cached ExifTool instance"""
_singletons = {}
_singletons: dict[Path, ExifTool] = {}
def __new__(cls, filepath, exiftool=None):
"""create new object or return instance of already created singleton"""
def __new__(cls, filepath, exiftool=None, logger=logging.getLogger()):
""" create new object or return instance of already created singleton """
if filepath not in cls._singletons:
cls._singletons[filepath] = _ExifToolCaching(
filepath, exiftool=exiftool
)
cls._singletons[filepath] = _ExifToolCaching(filepath,
exiftool=exiftool, logger=logger)
return cls._singletons[filepath]
class _ExifToolCaching(ExifTool):
def __init__(self, filepath, exiftool=None):
def __init__(self, filepath, exiftool=None, logger=logging.getLogger()):
"""Create read-only ExifTool object that caches values
Args:
@ -424,9 +415,8 @@ class _ExifToolCaching(ExifTool):
"""
self._json_cache = None
self._asdict_cache = {}
super().__init__(
filepath, exiftool=exiftool, overwrite=False, flags=None
)
super().__init__(filepath, exiftool=exiftool, overwrite=False,
flags=None, logger=logger)
def run_commands(self, *commands, no_file=False):
if commands[0] not in ["-json", "-ver"]:
@ -463,6 +453,7 @@ class _ExifToolCaching(ExifTool):
return self._asdict_cache[tag_groups][normalized]
def flush_cache(self):
"""Clear cached data so that calls to json or asdict return fresh data"""
""" Clear cached data so that calls to json or asdict return fresh data """
self._json_cache = None
self._asdict_cache = {}

779
ordigi/filesystem.py Normal file
View File

@ -0,0 +1,779 @@
"""
General file system methods.
"""
from builtins import object
import filecmp
import hashlib
import logging
import os
import pathlib
import re
import sys
import shutil
import time
from datetime import datetime, timedelta
from ordigi import constants
from ordigi import geolocation
from ordigi import media
from ordigi.media import Media, get_all_subclasses
from ordigi.images import Images
from ordigi.summary import Summary
class FileSystem(object):
"""A class for interacting with the file system."""
def __init__(self, cache=False, day_begins=0, dry_run=False, exclude_regex_list=set(),
filter_by_ext=set(), logger=logging.getLogger(), max_deep=None,
mode='copy', path_format=None):
self.cache = cache
self.day_begins = day_begins
self.dry_run = dry_run
self.exclude_regex_list = exclude_regex_list
if '%media' in filter_by_ext:
filter_by_ext.remove('%media')
self.filter_by_ext = filter_by_ext.union(media.extensions)
else:
self.filter_by_ext = filter_by_ext
self.items = self.get_items()
self.logger = logger
self.max_deep = max_deep
self.mode = mode
# TODO have to be removed
if path_format:
self.path_format = path_format
else:
self.path_format = os.path.join(constants.default_path,
constants.default_name)
self.summary = Summary()
self.whitespace_regex = '[ \t\n\r\f\v]+'
def create_directory(self, directory_path):
"""Create a directory if it does not already exist.
:param str directory_name: A fully qualified path of the
to create.
:returns: bool
"""
try:
if os.path.exists(directory_path):
return True
else:
if not self.dry_run:
os.makedirs(directory_path)
self.logger.info(f'Create {directory_path}')
return True
except OSError:
# OSError is thrown for cases like no permission
pass
return False
def get_items(self):
return {
'album': '{album}',
'basename': '{basename}',
'camera_make': '{camera_make}',
'camera_model': '{camera_model}',
'city': '{city}',
'custom': '{".*"}',
'country': '{country}',
# 'folder': '{folder[<>]?[-+]?[1-9]?}',
'ext': '{ext}',
'folder': '{folder}',
'folders': r'{folders(\[[0-9:]{0,3}\])?}',
'location': '{location}',
'name': '{name}',
'original_name': '{original_name}',
'state': '{state}',
'title': '{title}',
'date': '{(%[a-zA-Z][^a-zA-Z]*){1,8}}' # search for date format string
}
def walklevel(self, src_path, maxlevel=None):
"""
Walk into input directory recursively until desired maxlevel
source: https://stackoverflow.com/questions/229186/os-walk-without-digging-into-directories-below
"""
src_path = src_path.rstrip(os.path.sep)
if not os.path.isdir(src_path):
return None
num_sep = src_path.count(os.path.sep)
for root, dirs, files in os.walk(src_path):
level = root.count(os.path.sep) - num_sep
yield root, dirs, files, level
if maxlevel is not None and level >= maxlevel:
del dirs[:]
def get_all_files(self, path, extensions=False, exclude_regex_list=set()):
"""Recursively get all files which match a path and extension.
:param str path string: Path to start recursive file listing
:param tuple(str) extensions: File extensions to include (whitelist)
:returns: generator
"""
if self.filter_by_ext != () and not extensions:
# Filtering files by extensions.
if '%media' in self.filter_by_ext:
extensions = set()
subclasses = get_all_subclasses()
for cls in subclasses:
extensions.update(cls.extensions)
else:
extensions = self.filter_by_ext
# Create a list of compiled regular expressions to match against the file path
compiled_regex_list = [re.compile(regex) for regex in exclude_regex_list]
for dirname, dirnames, filenames in os.walk(path):
if dirname == os.path.join(path, '.ordigi'):
continue
for filename in filenames:
# If file extension is in `extensions`
# And if file path is not in exclude regexes
# Then append to the list
filename_path = os.path.join(dirname, filename)
if (
extensions == False
or os.path.splitext(filename)[1][1:].lower() in extensions
and not self.should_exclude(filename_path, compiled_regex_list, False)
):
yield filename_path
def check_for_early_morning_photos(self, date):
"""check for early hour photos to be grouped with previous day"""
if date.hour < self.day_begins:
self.logger.info('moving this photo to the previous day for\
classification purposes (day_begins=' + str(self.day_begins) + ')')
date = date - timedelta(hours=date.hour+1) # push it to the day before for classificiation purposes
return date
def get_location_part(self, mask, part, place_name):
"""Takes a mask for a location and interpolates the actual place names.
Given these parameters here are the outputs.
mask = 'city'
part = 'city-random'
place_name = {'city': u'Sunnyvale'}
return 'Sunnyvale'
mask = 'location'
part = 'location'
place_name = {'default': u'Sunnyvale', 'city': u'Sunnyvale'}
return 'Sunnyvale'
:returns: str
"""
folder_name = part
if(mask in place_name):
replace_target = mask
replace_with = place_name[mask]
else:
replace_target = part
replace_with = ''
folder_name = folder_name.replace(
replace_target,
replace_with,
)
return folder_name
def get_part(self, item, mask, metadata, db, subdirs):
"""Parse a specific folder's name given a mask and metadata.
:param item: Name of the item as defined in the path (i.e. date from %date)
:param mask: Mask representing the template for the path (i.e. %city %state
:param metadata: Metadata dictionary.
:returns: str
"""
# Each item has its own custom logic and we evaluate a single item and return
# the evaluated string.
part = ''
if item == 'basename':
part = os.path.basename(metadata['base_name'])
elif item == 'name':
# Remove date prefix added to the name.
part = metadata['base_name']
for i, rx in self.match_date_from_string(metadata['base_name']):
part = re.sub(rx, '', part)
elif item == 'date':
date = self.get_date_taken(metadata)
# early morning photos can be grouped with previous day
date = self.check_for_early_morning_photos(date)
if date is not None:
part = date.strftime(mask)
elif item in ('location', 'city', 'state', 'country'):
place_name = geolocation.place_name(
metadata['latitude'],
metadata['longitude'],
db,
self.cache,
self.logger
)
if item == 'location':
mask = 'default'
part = self.get_location_part(mask, item, place_name)
elif item == 'folder':
part = os.path.basename(subdirs)
elif item == 'folders':
folders = pathlib.Path(subdirs).parts
folders = eval(mask)
part = os.path.join(*folders)
elif item in ('album','camera_make', 'camera_model', 'ext',
'title'):
if metadata[item]:
part = metadata[item]
elif item == 'original_name':
# First we check if we have metadata['original_name'].
# We have to do this for backwards compatibility because
# we original did not store this back into EXIF.
if metadata[item]:
part = metadata['original_name']
elif item in 'custom':
# Fallback string
part = mask[1:-1]
return part
def get_path(self, metadata, db, subdirs='', whitespace_sub='_'):
"""path_format: {%Y-%d-%m}/%u{city}/{album}
Returns file path.
:returns: string"""
path_format = self.path_format
path = []
path_parts = path_format.split('/')
for path_part in path_parts:
this_parts = path_part.split('|')
# p = []
for this_part in this_parts:
# parts = ''
for item, regex in self.items.items():
matched = re.search(regex, this_part)
if matched:
# parts = re.split(mask, this_part)
# parts = this_part.split('%')[1:]
part = self.get_part(item, matched.group()[1:-1], metadata, db,
subdirs)
part = part.strip()
if part == '':
# delete separator if any
regex = '[-_ .]?(%[ul])?' + regex
this_part = re.sub(regex, part, this_part)
else:
# Capitalization
u_regex = '%u' + regex
l_regex = '%l' + regex
if re.search(u_regex, this_part):
this_part = re.sub(u_regex, part.upper(), this_part)
elif re.search(l_regex, this_part):
this_part = re.sub(l_regex, part.lower(), this_part)
else:
this_part = re.sub(regex, part, this_part)
if this_part:
# Check if all masks are substituted
if True in [c in this_part for c in '{}']:
self.logger.error(f'Format path part invalid: \
{this_part}')
sys.exit(1)
path.append(this_part.strip())
# We break as soon as we have a value to append
break
# Else we continue for fallbacks
if(len(path[-1]) == 0):
path[-1] = metadata['base_name']
path_string = os.path.join(*path)
if whitespace_sub != ' ':
# Lastly we want to sanitize the name
path_string = re.sub(self.whitespace_regex, whitespace_sub, path_string)
return path_string
def match_date_from_string(self, string, user_regex=None):
if user_regex is not None:
matches = re.findall(user_regex, string)
else:
regex = {
# regex to match date format type %Y%m%d, %y%m%d, %d%m%Y,
# etc...
'a': re.compile(
r'.*[_-]?(?P<year>\d{4})[_-]?(?P<month>\d{2})[_-]?(?P<day>\d{2})[_-]?(?P<hour>\d{2})[_-]?(?P<minute>\d{2})[_-]?(?P<second>\d{2})'),
'b': re.compile (
r'[-_./](?P<year>\d{4})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})[-_./]'),
# not very accurate
'c': re.compile (
r'[-_./](?P<year>\d{2})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})[-_./]'),
'd': re.compile (
r'[-_./](?P<day>\d{2})[-_.](?P<month>\d{2})[-_.](?P<year>\d{4})[-_./]')
}
for i, rx in regex.items():
yield i, rx
def get_date_from_string(self, string, user_regex=None):
# If missing datetime from EXIF data check if filename is in datetime format.
# For this use a user provided regex if possible.
# Otherwise assume a filename such as IMG_20160915_123456.jpg as default.
matches = []
for i, rx in self.match_date_from_string(string, user_regex):
match = re.findall(rx, string)
if match != []:
if i == 'c':
match = [('20' + match[0][0], match[0][1], match[0][2])]
elif i == 'd':
# reorder items
match = [(match[0][2], match[0][1], match[0][0])]
# matches = match + matches
if len(match) != 1:
# The time string is not uniq
continue
matches.append((match[0], rx))
# We want only the first match for the moment
break
# check if there is only one result
if len(set(matches)) == 1:
try:
# Convert str to int
date_object = tuple(map(int, matches[0][0]))
time = False
if len(date_object) > 3:
time = True
date = datetime(*date_object)
except (KeyError, ValueError):
return None
return date
return None
def get_date_taken(self, metadata):
'''
Get the date taken from metadata or filename
:returns: datetime or None.
'''
if metadata is None:
return None
basename = metadata['base_name']
date_original = metadata['date_original']
if metadata['original_name'] is not None:
date_filename = self.get_date_from_string(metadata['original_name'])
else:
date_filename = self.get_date_from_string(basename)
date_created = metadata['date_created']
if metadata['date_original'] is not None:
if (date_filename is not None and
date_filename != date_original):
self.logger.warn(f"{basename} time mark is different from {date_original}")
# TODO ask for keep date taken, filename time, or neither
return metadata['date_original']
elif True:
if date_filename is not None:
if date_created is not None and date_filename > date_created:
self.logger.warn(f"{basename} time mark is more recent than {date_created}")
return date_filename
if True:
# TODO warm and ask for confirmation
if date_created is not None:
return date_created
elif metadata['date_modified'] is not None:
return metadata['date_modified']
def checksum(self, file_path, blocksize=65536):
"""Create a hash value for the given file.
See http://stackoverflow.com/a/3431835/1318758.
:param str file_path: Path to the file to create a hash for.
:param int blocksize: Read blocks of this size from the file when
creating the hash.
:returns: str or None
"""
hasher = hashlib.sha256()
with open(file_path, 'rb') as f:
buf = f.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(blocksize)
return hasher.hexdigest()
return None
def checkcomp(self, dest_path, src_checksum):
"""Check file.
"""
# src_checksum = self.checksum(src_path)
if self.dry_run:
return src_checksum
dest_checksum = self.checksum(dest_path)
if dest_checksum != src_checksum:
self.logger.info(f'Source checksum and destination checksum are not the same')
return False
return src_checksum
def sort_file(self, src_path, dest_path, remove_duplicates=True):
'''Copy or move file to dest_path.'''
mode = self.mode
dry_run = self.dry_run
# check for collisions
if(src_path == dest_path):
self.logger.info(f'File {dest_path} already sorted')
return None
elif os.path.isfile(dest_path):
self.logger.info(f'File {dest_path} already exist')
if remove_duplicates:
if filecmp.cmp(src_path, dest_path):
self.logger.info(f'File in source and destination are identical. Duplicate will be ignored.')
if(mode == 'move'):
if not dry_run:
os.remove(src_path)
self.logger.info(f'remove: {src_path}')
return None
else: # name is same, but file is different
self.logger.info(f'File in source and destination are different.')
return False
else:
return False
else:
if(mode == 'move'):
if not dry_run:
# Move the processed file into the destination directory
shutil.move(src_path, dest_path)
self.logger.info(f'move: {src_path} -> {dest_path}')
elif mode == 'copy':
if not dry_run:
shutil.copy2(src_path, dest_path)
self.logger.info(f'copy: {src_path} -> {dest_path}')
return True
def check_file(self, src_path, dest_path, src_checksum, db):
# Check if file remain the same
checksum = self.checkcomp(dest_path, src_checksum)
has_errors = False
if checksum:
if not self.dry_run:
db.add_hash(checksum, dest_path)
db.update_hash_db()
self.summary.append((src_path, dest_path))
else:
self.logger.error(f'Files {src_path} and {dest_path} are not identical')
# sys.exit(1)
self.summary.append((src_path, False))
has_errors = True
return self.summary, has_errors
def get_files_in_path(self, path, extensions=set()):
"""Recursively get files which match a path and extension.
:param str path string: Path to start recursive file listing
:param tuple(str) extensions: File extensions to include (whitelist)
:returns: file_path, subdirs
"""
file_list = set()
if os.path.isfile(path):
if not self.should_exclude(path, self.exclude_regex_list, True):
file_list.add((path, ''))
# Create a list of compiled regular expressions to match against the file path
compiled_regex_list = [re.compile(regex) for regex in self.exclude_regex_list]
subdirs = ''
for dirname, dirnames, filenames, level in self.walklevel(path,
self.max_deep):
if dirname == os.path.join(path, '.ordigi'):
continue
subdirs = os.path.join(subdirs, os.path.basename(dirname))
for filename in filenames:
# If file extension is in `extensions`
# And if file path is not in exclude regexes
# Then append to the list
filename_path = os.path.join(dirname, filename)
if (
extensions == set()
or os.path.splitext(filename)[1][1:].lower() in extensions
and not self.should_exclude(filename_path, compiled_regex_list, False)
):
file_list.add((filename_path, subdirs))
return file_list
def _conflict_solved(self, conflict_file_list, item, dest_path):
self.logger.warning(f'Same name already exists...renaming to: {dest_path}')
del(conflict_file_list[item])
def solve_conflicts(self, conflict_file_list, remove_duplicates):
file_list = conflict_file_list.copy()
for item, file_paths in enumerate(file_list):
src_path = file_paths['src_path']
dest_path = file_paths['dest_path']
# Try to sort the file
result = self.sort_file(src_path, dest_path, remove_duplicates)
# remove to conflict file list if file as be successfully copied or ignored
if result is True or None:
self._conflict_solved(conflict_file_list, item, dest_path)
else:
n = 1
while result is False:
if n > 100:
self.logger.warning(f'{self.mode}: to many append for {dest_path}...')
break
# Add appendix to the name
pre, ext = os.path.splitext(dest_path)
dest_path = pre + '_' + str(n) + ext
conflict_file_list[item]['dest_path'] = dest_path
result = self.sort_file(src_path, dest_path, remove_duplicates)
else:
self._conflict_solved(conflict_file_list, item, dest_path)
return result
def sort_files(self, paths, destination, db, remove_duplicates=False,
ignore_tags=set()):
"""
Sort files into appropriate folder
"""
has_errors = False
for path in paths:
# some error checking
if not os.path.exists(path):
self.logger.error(f'Directory {path} does not exist')
path = os.path.expanduser(path)
conflict_file_list = []
for src_path, subdirs in self.get_files_in_path(path,
extensions=self.filter_by_ext):
# Process files
src_checksum = self.checksum(src_path)
media = Media(src_path, ignore_tags, self.logger)
if media:
metadata = media.get_metadata()
# Get the destination path according to metadata
file_path = self.get_path(metadata, db, subdirs=subdirs)
else:
# Keep same directory structure
file_path = os.path.relpath(src_path, path)
dest_directory = os.path.join(destination,
os.path.dirname(file_path))
dest_path = os.path.join(destination, file_path)
self.create_directory(dest_directory)
result = self.sort_file(src_path, dest_path, remove_duplicates)
if result is False:
# There is conflict files
conflict_file_list.append({'src_path': src_path, 'dest_path': dest_path})
result = self.solve_conflicts(conflict_file_list, remove_duplicates)
if result is True:
self.summary, has_errors = self.check_file(src_path,
dest_path, src_checksum, db)
elif result is None:
has_errors = False
else:
self.summary.append((src_path, False))
has_errors = True
return self.summary, has_errors
def check_path(self, path):
path = os.path.abspath(os.path.expanduser(path))
# some error checking
if not os.path.exists(path):
self.logger.error(f'Directory {path} does not exist')
sys.exit(1)
return path
def set_hash(self, result, src_path, dest_path, src_checksum, db):
if result:
# Check if file remain the same
result = self.checkcomp(dest_path, src_checksum)
has_errors = False
if result:
if not self.dry_run:
db.add_hash(checksum, dest_path)
db.update_hash_db()
if dest_path:
self.logger.info(f'{src_path} -> {dest_path}')
self.summary.append((src_path, dest_path))
else:
self.logger.error(f'Files {src_path} and {dest_path} are not identical')
# sys.exit(1)
self.summary.append((src_path, False))
has_errors = True
else:
self.summary.append((src_path, False))
has_errors = True
return has_errors
def move_file(self, img_path, dest_path, checksum, db):
if not self.dry_run:
try:
shutil.move(img_path, dest_path)
except OSError as error:
self.logger.error(error)
self.logger.info(f'move: {img_path} -> {dest_path}')
return self.set_hash(True, img_path, dest_path, checksum, db)
def sort_similar_images(self, path, db, similarity=80):
has_errors = False
path = self.check_path(path)
for dirname, dirnames, filenames, level in self.walklevel(path, None):
if dirname == os.path.join(path, '.ordigi'):
continue
if dirname.find('similar_to') == 0:
continue
file_paths = set()
for filename in filenames:
file_paths.add(os.path.join(dirname, filename))
i = Images(file_paths, logger=self.logger)
images = set([ i for i in i.get_images() ])
for image in images:
if not os.path.isfile(image):
continue
checksum1 = self.checksum(image)
# Process files
# media = Media(src_path, False, self.logger)
# TODO compare metadata
# if media:
# metadata = media.get_metadata()
similar = False
moved_imgs = set()
for img_path in i.find_similar(image, similarity):
similar = True
checksum2 = self.checksum(img_path)
# move image into directory
name = os.path.splitext(os.path.basename(image))[0]
directory_name = 'similar_to_' + name
dest_directory = os.path.join(os.path.dirname(img_path),
directory_name)
dest_path = os.path.join(dest_directory, os.path.basename(img_path))
result = self.create_directory(dest_directory)
# Move the simlars file into the destination directory
if result:
result = self.move_file(img_path, dest_path, checksum2, db)
moved_imgs.add(img_path)
if not result:
has_errors = True
else:
has_errors = True
if similar:
dest_path = os.path.join(dest_directory,
os.path.basename(image))
result = self.move_file(image, dest_path, checksum1, db)
moved_imgs.add(image)
if not result:
has_errors = True
# for moved_img in moved_imgs:
# os.remove(moved_img)
return self.summary, has_errors
def revert_compare(self, path, db):
has_errors = False
path = self.check_path(path)
for dirname, dirnames, filenames, level in self.walklevel(path, None):
if dirname == os.path.join(path, '.ordigi'):
continue
if dirname.find('similar_to') == 0:
continue
for subdir in dirnames:
if subdir.find('similar_to') == 0:
file_names = os.listdir(os.path.abspath(os.path.join(dirname, subdir)))
for file_name in file_names:
# move file to initial folder
img_path = os.path.join(dirname, subdir, file_name)
if os.path.isdir(img_path):
continue
checksum = self.checksum(img_path)
dest_path = os.path.join(dirname, os.path.basename(img_path))
result = self.move_file(img_path, dest_path, checksum, db)
if not result:
has_errors = True
# remove directory
try:
os.rmdir(os.path.join (dirname, subdir))
except OSError as error:
self.logger.error(error)
return self.summary, has_errors
def set_utime_from_metadata(self, date_taken, file_path):
""" Set the modification time on the file based on the file name.
"""
# Initialize date taken to what's returned from the metadata function.
os.utime(file_path, (int(datetime.now().timestamp()), int(date_taken.timestamp())))
def should_exclude(self, path, regex_list=set(), needs_compiled=False):
if(len(regex_list) == 0):
return False
if(needs_compiled):
compiled_list = []
for regex in regex_list:
compiled_list.append(re.compile(regex))
regex_list = compiled_list
return any(regex.search(path) for regex in regex_list)

View File

@ -1,101 +1,160 @@
"""Look up geolocation information for media objects."""
from os import path
import geopy
from geopy.geocoders import Nominatim, options
from geopy.geocoders import Nominatim
import logging
from ordigi import LOG
from ordigi import config
from ordigi import constants
from ordigi.config import load_config, get_geocoder
__KEY__ = None
__DEFAULT_LOCATION__ = 'Unknown Location'
__PREFER_ENGLISH_NAMES__ = None
class GeoLocation:
"""Look up geolocation information for media objects."""
def coordinates_by_name(name, db):
# Try to get cached location first
cached_coordinates = db.get_location_coordinates(name)
if(cached_coordinates is not None):
return {
'latitude': cached_coordinates[0],
'longitude': cached_coordinates[1]
}
def __init__(
self,
geocoder='Nominatim',
prefer_english_names=False,
timeout=options.default_timeout,
):
self.geocoder = geocoder
self.log = LOG.getChild(self.__class__.__name__)
self.prefer_english_names = prefer_english_names
self.timeout = timeout
# If the name is not cached then we go ahead with an API lookup
geocoder = get_geocoder()
if geocoder == 'Nominatim':
# timeout = DEFAULT_SENTINEL
timeout = 10
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
geolocation_info = locator.geocode(name)
if geolocation_info is not None:
return {
'latitude': geolocation_info.latitude,
'longitude': geolocation_info.longitude
}
else:
raise NameError(geocoder)
def coordinates_by_name(self, name, timeout=options.default_timeout):
"""Get coordinates from given location name"""
geocoder = self.geocoder
if geocoder == 'Nominatim':
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
geolocation_info = locator.geocode(name)
if geolocation_info is not None:
return {
'latitude': geolocation_info.latitude,
'longitude': geolocation_info.longitude,
}
return None
def decimal_to_dms(decimal):
decimal = float(decimal)
decimal_abs = abs(decimal)
minutes, seconds = divmod(decimal_abs*3600, 60)
degrees, minutes = divmod(minutes, 60)
degrees = degrees
sign = 1 if decimal >= 0 else -1
return (degrees, minutes, seconds, sign)
def dms_to_decimal(degrees, minutes, seconds, direction=' '):
sign = 1
if direction[0] in 'WSws':
sign = -1
return (degrees + minutes / 60 + seconds / 3600) * sign
def dms_string(decimal, type='latitude'):
# Example string -> 38 deg 14' 27.82" S
dms = decimal_to_dms(decimal)
if type == 'latitude':
direction = 'N' if decimal >= 0 else 'S'
elif type == 'longitude':
direction = 'E' if decimal >= 0 else 'W'
return '{} deg {}\' {}" {}'.format(dms[0], dms[1], dms[2], direction)
def get_prefer_english_names():
global __PREFER_ENGLISH_NAMES__
if __PREFER_ENGLISH_NAMES__ is not None:
return __PREFER_ENGLISH_NAMES__
config = load_config(constants.CONFIG_FILE)
if('prefer_english_names' not in config['Geolocation']):
return False
__PREFER_ENGLISH_NAMES__ = bool(config['Geolocation']['prefer_english_names'])
return __PREFER_ENGLISH_NAMES__
def place_name(lat, lon, db, cache=True, logger=logging.getLogger()):
lookup_place_name_default = {'default': __DEFAULT_LOCATION__}
if(lat is None or lon is None):
return lookup_place_name_default
# Convert lat/lon to floats
if(not isinstance(lat, float)):
lat = float(lat)
if(not isinstance(lon, float)):
lon = float(lon)
# Try to get cached location first
# 3km distace radious for a match
cached_place_name = None
if cache:
cached_place_name = db.get_location_name(lat, lon, 3000)
# We check that it's a dict to coerce an upgrade of the location
# db from a string location to a dictionary. See gh-160.
if(isinstance(cached_place_name, dict)):
return cached_place_name
lookup_place_name = {}
geocoder = get_geocoder()
if geocoder == 'Nominatim':
geolocation_info = lookup_osm(lat, lon, logger)
else:
raise NameError(geocoder)
if(geolocation_info is not None and 'address' in geolocation_info):
address = geolocation_info['address']
# gh-386 adds support for town
# taking precedence after city for backwards compatability
for loc in ['city', 'town', 'village', 'state', 'country']:
if(loc in address):
lookup_place_name[loc] = address[loc]
# In many cases the desired key is not available so we
# set the most specific as the default.
if('default' not in lookup_place_name):
lookup_place_name['default'] = address[loc]
if(lookup_place_name):
db.add_location(lat, lon, lookup_place_name)
# TODO: Maybe this should only be done on exit and not for every write.
db.update_location_db()
if('default' not in lookup_place_name):
lookup_place_name = lookup_place_name_default
return lookup_place_name
def lookup_osm(lat, lon, logger=logging.getLogger()):
prefer_english_names = get_prefer_english_names()
try:
timeout = 10
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
coords = (lat, lon)
if(prefer_english_names):
lang='en'
else:
raise NameError(geocoder)
lang='local'
locator_reverse = locator.reverse(coords, language=lang)
if locator_reverse is not None:
return locator_reverse.raw
else:
return None
except geopy.exc.GeocoderUnavailable or geopy.exc.GeocoderServiceError as e:
logger.error(e)
return None
# Fix *** TypeError: `address` must not be None
except (TypeError, ValueError) as e:
logger.error(e)
return None
def place_name(self, lat, lon, timeout=options.default_timeout):
"""get place name from coordinates"""
lookup_place_name_default = {'default': None}
if lat is None or lon is None:
return lookup_place_name_default
# Convert lat/lon to floats
if not isinstance(lat, float):
lat = float(lat)
if not isinstance(lon, float):
lon = float(lon)
lookup_place_name = {}
geocoder = self.geocoder
if geocoder == 'Nominatim':
geolocation_info = self.lookup_osm(lat, lon, timeout)
else:
raise NameError(geocoder)
if geolocation_info is not None and 'address' in geolocation_info:
address = geolocation_info['address']
# gh-386 adds support for town
# taking precedence after city for backwards compatability
for loc in ['city', 'town', 'village', 'state', 'country']:
if loc in address:
lookup_place_name[loc] = address[loc]
# In many cases the desired key is not available so we
# set the most specific as the default.
if 'default' not in lookup_place_name:
lookup_place_name['default'] = address[loc]
if 'default' not in lookup_place_name:
lookup_place_name = lookup_place_name_default
return lookup_place_name
def lookup_osm( self, lat, lon, timeout=options.default_timeout):
"""Get Geolocation address data from latitude and longitude"""
locator_reverse = None
try:
locator = Nominatim(user_agent='myGeocoder', timeout=timeout)
coords = (lat, lon)
if self.prefer_english_names:
lang = 'en'
else:
lang = 'local'
try:
locator_reverse = locator.reverse(coords, language=lang)
except geopy.exc.GeocoderUnavailable or geopy.exc.GeocoderTimedOut as e:
self.log.error(e)
# Fix *** TypeError: `address` must not be None
except (TypeError, ValueError) as e:
self.log.error(e)
else:
if locator_reverse is not None:
return locator_reverse.raw
return None

View File

@ -5,30 +5,27 @@ image objects (JPG, DNG, etc.).
.. moduleauthor:: Jaisen Mathai <jaisen@jmathai.com>
"""
import imghdr
import os
import imagehash
import imghdr
import logging
import numpy as np
import os
from PIL import Image as img
from PIL import UnidentifiedImageError
from ordigi import LOG
import time
# HEIC extension support (experimental, not tested)
PYHEIF = False
try:
from pyheif_pillow_opener import register_heif_opener
PYHEIF = True
# Allow to open HEIF/HEIC image from pillow
register_heif_opener()
except ImportError as e:
LOG.info(e)
logging.info(e)
class Image:
"""Image file class"""
class Image():
def __init__(self, img_path, hash_size=8):
@ -54,25 +51,21 @@ class Image:
# things like mode, size, and other properties required to decode the file,
# but the rest of the file is not processed until later.
try:
image = img.open(self.img_path)
im = img.open(self.img_path)
except (IOError, UnidentifiedImageError):
return False
if image.format is None:
if(im.format is None):
return False
return True
def get_hash(self):
"""Get image hash"""
try:
with img.open(self.img_path) as image:
return imagehash.average_hash(image, self.hash_size).hash
except (OSError, UnidentifiedImageError):
return None
with img.open(self.img_path) as img_path:
return imagehash.average_hash(img_path, self.hash_size).hash
class Images:
class Images():
"""A image object.
@ -80,45 +73,37 @@ class Images:
"""
#: Valid extensions for image files.
extensions = (
'arw',
'cr2',
'dng',
'gif',
'heic',
'jpeg',
'jpg',
'nef',
'png',
'rw2',
)
extensions = ('arw', 'cr2', 'dng', 'gif', 'heic', 'jpeg', 'jpg', 'nef', 'png', 'rw2')
def __init__(self, images, hash_size=8):
self.images = images
self.duplicates = []
def __init__(self, file_paths=None, hash_size=8, logger=logging.getLogger()):
self.file_paths = file_paths
self.hash_size = hash_size
self.log = LOG.getChild(self.__class__.__name__)
if not PYHEIF:
self.log.info("No module named 'pyheif_pillow_opener'")
self.duplicates = []
self.logger = logger
def get_images(self):
''':returns: img_path generator
'''
for img_path in self.file_paths:
image = Image(img_path)
if image.is_image():
yield img_path
def get_images_hashes(self):
"""Get image hashes"""
hashes = {}
# Searching for duplicates.
for image in self.images:
with img.open(image.img_path) as i:
yield imagehash.average_hash(i, self.hash_size)
for img_path in self.get_images():
with img.open(img_path) as img:
yield imagehash.average_hash(img, self.hash_size)
def find_duplicates(self, img_path):
"""Find duplicates"""
duplicates = []
hashes = {}
for temp_hash in self.get_images_hashes():
for temp_hash in get_images_hashes(self.file_paths):
if temp_hash in hashes:
self.log.info(
"Duplicate {} \nfound for image {}\n".format(
img_path, hashes[temp_hash]
)
)
self.logger.info("Duplicate {} \nfound for image {}\n".format(img_path, hashes[temp_hash]))
duplicates.append(img_path)
else:
hashes[temp_hash] = img_path
@ -126,62 +111,53 @@ class Images:
return duplicates
def remove_duplicates(self, duplicates):
"""Remove duplicate files"""
for duplicate in duplicates:
try:
os.remove(duplicate)
except OSError as error:
self.log.error(error)
self.logger.error(error)
def remove_duplicates_interactive(self, duplicates):
"""Remove duplicate files: interactive mode"""
if len(duplicates) != 0:
answer = input(f"Do you want to delete these {duplicates} images? Y/n: ")
if answer.strip().lower() == 'y':
if(answer.strip().lower() == 'y'):
self.remove_duplicates(duplicates)
self.log.info('Duplicates images deleted successfully!')
self.logger.info(f'{duplicate} deleted successfully!')
else:
self.log.info("No duplicates found")
self.logger.info("No duplicates found")
def diff(self, hash1, hash2):
return np.count_nonzero(hash1 != hash2)
def similarity(self, img_diff):
"""Similarity rate in %"""
threshold_img = img_diff / (self.hash_size ** 2)
threshold_img = img_diff / (self.hash_size**2)
similarity_img = round((1 - threshold_img) * 100)
return similarity_img
def find_similar(self, image0, similarity=80):
"""
def find_similar(self, image, similarity=80):
'''
Find similar images
:returns: img_path generator
"""
hash1 = image0.get_hash()
'''
hash1 = ''
image = Image(image)
if image.is_image():
hash1 = image.get_hash()
if hash1 is None:
return
self.logger.info(f'Finding similar images to {image}')
self.log.info(f"Finding similar images to {image0.img_path}")
threshold = 1 - similarity/100
diff_limit = int(threshold*(self.hash_size**2))
threshold = 1 - similarity / 100
diff_limit = int(threshold * (self.hash_size ** 2))
for image in self.images:
if not image.img_path.is_file():
continue
if image.img_path == image0.img_path:
for img_path in self.get_images():
if img_path == image:
continue
hash2 = image.get_hash()
# Be sure that hash are not None
if hash2 is None:
continue
img_diff = self.diff(hash1, hash2)
if img_diff <= diff_limit:
similarity_img = self.similarity(img_diff)
self.log.info(
f"{image.img_path} image found {similarity_img}% similar to {image0.img_path}"
)
yield image.img_path
self.logger.info(f'{img_path} image found {similarity_img}% similar to {image}')
yield img_path

View File

@ -1,61 +1,16 @@
"""Logging module"""
import logging
def get_logger(verbose, debug):
if debug:
level = logging.DEBUG
elif verbose:
level = logging.INFO
else:
level = logging.WARNING
def get_logger(name, level=30):
"""Get logger"""
logger = logging.getLogger(name)
logger.setLevel(level)
logging.basicConfig(format='%(levelname)s:%(message)s', level=level)
logging.getLogger('asyncio').setLevel(level)
logger = logging.getLogger('ordigi')
logger.level = level
return logger
def log_format(level):
if level > 10:
return '%(levelname)s:%(message)s'
return '%(levelname)s:%(name)s:%(message)s'
def set_formatter(handler, level):
"""create formatter and add it to the handlers"""
formatter = logging.Formatter(log_format(level))
handler.setFormatter(formatter)
def console(logger, level=30):
"""create console handler with a higher log level"""
logger.setLevel(level)
handler = logging.StreamHandler()
handler.setLevel(level)
set_formatter(handler, level)
# add the handlers to logger
logger.addHandler(handler)
def file_logger(logger, file, level=30):
"""create file handler that logs debug and higher level messages"""
logger.setLevel(level)
handler = logging.FileHandler(file)
handler.setLevel(level)
set_formatter(handler, log_format(level))
# add the handlers to logger
logger.addHandler(handler)
def get_level(quiet=False, verbose=False, debug=False, num=None):
"""Return int logging level from command line args"""
if num and num.isnumeric():
return int(verbose)
if debug:
return int(logging.getLevelName('DEBUG'))
if verbose:
return int(logging.getLevelName('INFO'))
if quiet:
return int(logging.getLevelName('ERROR'))
return int(logging.getLevelName('WARNING'))

View File

@ -1,48 +1,60 @@
"""
Media :class:`Media` class to get file metadata
"""
import logging
import mimetypes
import os
import six
# load modules
from dateutil.parser import parse
import re
import sys
from dateutil import parser
import inquirer
from ordigi import LOG
from ordigi.exiftool import ExifTool, ExifToolCaching
from ordigi import utils
from ordigi import request
class Media():
class ExifMetadata:
"""The media class for all media objects.
def __init__(self, file_path, ignore_tags=None):
:param str file_path: The fully qualified path to the media file.
"""
d_coordinates = {
'latitude': 'latitude_ref',
'longitude': 'longitude_ref'
}
PHOTO = ('arw', 'cr2', 'dng', 'gif', 'heic', 'jpeg', 'jpg', 'nef', 'png', 'rw2')
AUDIO = ('m4a',)
VIDEO = ('avi', 'm4v', 'mov', 'mp4', 'mpg', 'mpeg', '3gp', 'mts')
extensions = PHOTO + AUDIO + VIDEO
def __init__(self, file_path, ignore_tags=set(), logger=logging.getLogger()):
self.file_path = file_path
if ignore_tags is None:
ignore_tags = set()
self.ignore_tags = ignore_tags
self.log = LOG.getChild(self.__class__.__name__)
self.tags_keys = self.get_tags()
self.exif_metadata = None
self.metadata = None
self.logger = logger
def get_tags(self) -> dict:
"""Get exif tags groups in dict"""
def get_tags(self):
tags_keys = {}
tags_keys['date_original'] = [
'EXIF:DateTimeOriginal',
'H264:DateTimeOriginal',
'QuickTime:ContentCreateDate',
'QuickTime:ContentCreateDate'
]
tags_keys['date_created'] = [
'EXIF:CreateDate',
'QuickTime:CreationDate',
'QuickTime:CreateDate',
'QuickTime:CreationDate-und-US',
'QuickTime:MediaCreateDate',
'QuickTime:MediaCreateDate'
]
tags_keys['date_modified'] = [
'EXIF:ModifyDate',
'QuickTime:ModifyDate',
]
tags_keys['file_modify_date'] = [
'File:FileModifyDate',
'QuickTime:ModifyDate'
]
tags_keys['camera_make'] = ['EXIF:Make', 'QuickTime:Make']
tags_keys['camera_model'] = ['EXIF:Model', 'QuickTime:Model']
@ -52,30 +64,75 @@ class ExifMetadata:
'EXIF:GPSLatitude',
'XMP:GPSLatitude',
# 'QuickTime:GPSLatitude',
'Composite:GPSLatitude',
'Composite:GPSLatitude'
]
tags_keys['longitude'] = [
'EXIF:GPSLongitude',
'XMP:GPSLongitude',
# 'QuickTime:GPSLongitude',
'Composite:GPSLongitude',
'Composite:GPSLongitude'
]
tags_keys['latitude_ref'] = ['EXIF:GPSLatitudeRef']
tags_keys['longitude_ref'] = ['EXIF:GPSLongitudeRef']
tags_keys['original_name'] = ['EXIF:OriginalFileName', 'XMP:OriginalFileName']
tags_keys['original_name'] = ['XMP:OriginalFileName']
# Remove ignored tag from list
for tag_regex in self.ignore_tags:
ignored_tags = set()
for key, tags in tags_keys.items():
for i, tag in enumerate(tags):
for n, tag in enumerate(tags):
if re.match(tag_regex, tag):
del tags_keys[key][i]
del(tags_keys[key][n])
return tags_keys
def get_date_format(self, value):
def _del_ignored_tags(self, exif_metadata):
for tag_regex in self.ignore_tags:
ignored_tags = set()
for tag in exif_metadata:
if re.search(tag_regex, tag) is not None:
ignored_tags.add(tag)
for ignored_tag in ignored_tags:
del exif_metadata[ignored_tag]
def get_mimetype(self):
"""Get the mimetype of the file.
:returns: str or None
"""
Formatting date attribute.
mimetype = mimetypes.guess_type(self.file_path)
if(mimetype is None):
return None
return mimetype[0]
def _get_key_values(self, key):
"""Get the first value of a tag set
:returns: str or None if no exif tag
"""
if self.exif_metadata is None:
return None
for tag in self.tags_keys[key]:
if tag in self.exif_metadata:
yield self.exif_metadata[tag]
def get_value(self, tag):
"""Get given value from EXIF.
:returns: str or None
"""
exiftool_attributes = self.get_exiftool_attributes()
if exiftool_attributes is None:
return None
if(tag not in exiftool_attributes):
return None
return exiftool_attributes[tag]
def get_date_format(self, value):
"""Formate date attribute.
:returns: datetime object or None
"""
# We need to parse a string to datetime format.
@ -86,65 +143,15 @@ class ExifMetadata:
try:
# correct nasty formated date
regex = re.compile(r'(\d{4}):(\d{2}):(\d{2})[-_ .]')
if re.match(regex, value):
value = re.sub(regex, r'\g<1>-\g<2>-\g<3> ', value)
else:
regex = re.compile(r'(\d{4})(\d{2})(\d{2})[-_ .]?(\d{2})?(\d{2})?(\d{2})?')
if re.match(regex, value):
value = re.sub(regex, r'\g<1>-\g<2>-\g<3> \g<4>:\g<5>:\g<6>', value)
return parser.parse(value)
except BaseException or parser._parser.ParserError as e:
self.log.warning(e.args, value)
regex = re.compile(r'(\d{4}):(\d{2}):(\d{2})')
if(re.match(regex , value) is not None): # noqa
value = re.sub(regex , r'\g<1>-\g<2>-\g<3>', value)
return parse(value)
except BaseException or dateutil.parser._parser.ParserError as e:
self.logger.error(e, value)
import ipdb; ipdb.set_trace()
return None
class ReadExif(ExifMetadata):
"""Read exif metadata to file"""
def __init__(
self,
file_path,
exif_metadata=None,
cache=True,
ignore_tags=None,
):
super().__init__(file_path, ignore_tags)
# Options
self.log = LOG.getChild(self.__class__.__name__)
self.cache = cache
if exif_metadata:
self.exif_metadata = exif_metadata
elif self.cache:
self.exif_metadata = self.get_exif_metadata_caching()
else:
self.exif_metadata = self.get_exif_metadata()
def get_exif_metadata(self):
"""Get metadata from exiftool."""
return ExifToolCaching(self.file_path).asdict()
def get_exif_metadata_caching(self):
"""Get metadata from exiftool."""
return ExifToolCaching(self.file_path).asdict()
def get_key_values(self, key):
"""
Get tags values of a key
:returns: str or None if no exif tag
"""
if self.exif_metadata is None:
return None
for tag in self.tags_keys[key]:
if tag in self.exif_metadata:
yield self.exif_metadata[tag]
def get_coordinates(self, key, value):
"""Get latitude or longitude value
@ -179,52 +186,86 @@ class ReadExif(ExifMetadata):
direction_multiplier = -1.0
return this_coordinate * direction_multiplier
return None
class WriteExif(ExifMetadata):
"""Write exif metadata to file"""
def get_metadata(self):
"""Get a dictionary of metadata from exif.
All keys will be present and have a value of None if not obtained.
def __init__(
self,
file_path,
metadata,
ignore_tags=None,
):
super().__init__(file_path, ignore_tags)
self.metadata = metadata
self.log = LOG.getChild(self.__class__.__name__)
def set_value(self, tag, value):
"""Set value of a tag.
:returns: value (str)
:returns: dict
"""
# TODO overwrite mode check if fail
return ExifTool(self.file_path, overwrite=True).setvalue(tag, value)
# Get metadata from exiftool.
self.exif_metadata = ExifToolCaching(self.file_path, logger=self.logger).asdict()
def set_key_values(self, key, value):
"""Set tags values for given key"""
status = True
for tag in self.tags_keys[key]:
if not self.set_value(tag, value):
status = False
# TODO to be removed
self.metadata = {}
# Retrieve selected metadata to dict
if not self.exif_metadata:
return self.metadata
return status
for key in self.tags_keys:
formated_data = None
for value in self._get_key_values(key):
if 'date' in key:
formated_data = self.get_date_format(value)
elif key in ('latitude', 'longitude'):
formated_data = self.get_coordinates(key, value)
else:
if value is not None and value != '':
formated_data = value
else:
formated_data = None
if formated_data:
# Use this data and break
break
def set_date_media(self, time):
self.metadata[key] = formated_data
self.metadata['base_name'] = os.path.basename(os.path.splitext(self.file_path)[0])
self.metadata['ext'] = os.path.splitext(self.file_path)[1][1:]
self.metadata['directory_path'] = os.path.dirname(self.file_path)
return self.metadata
def has_exif_data(self):
"""Check if file has metadata, date original"""
if not self.metadata:
return False
if 'date_original' in self.metadata:
if self.metadata['date_original'] != None:
return True
return False
@classmethod
def get_class_by_file(cls, _file, classes, ignore_tags=set(), logger=logging.getLogger()):
"""Static method to get a media object by file.
"""
Set the date/time a photo was taken.
if not os.path.isfile(_file):
return None
extension = os.path.splitext(_file)[1][1:].lower()
if len(extension) > 0:
for i in classes:
if(extension in i.extensions):
return i(_file, ignore_tags=ignore_tags, logger=logger)
return Media(_file, logger, ignore_tags=ignore_tags, logger=logger)
def set_date_taken(self, date_key, time):
"""Set the date/time a photo was taken.
:param datetime time: datetime object of when the photo was taken
:returns: bool
"""
if time is None:
if(time is None):
return False
formatted_time = time.strftime('%Y:%m:%d %H:%M:%S')
status = self.set_value('date_original', formatted_time)
if not status:
if status == False:
# exif attribute date_original d'ont exist
status = self.set_value('date_created', formatted_time)
@ -241,7 +282,7 @@ class WriteExif(ExifMetadata):
status.append(self.set_value('latitude', latitude))
if self.metadata['longitude_ref']:
if self.metadata['longitude_ref']:
longitude = abs(longitude)
if longitude > 0:
status.append(self.set_value('latitude_ref', 'E'))
@ -252,519 +293,49 @@ class WriteExif(ExifMetadata):
if all(status):
return True
else:
return False
return False
def set_album_from_folder(self):
def set_album_from_folder(self, path):
"""Set the album attribute based on the leaf folder name
:returns: bool
"""
# TODO use tag key
return self.set_value('Album', self.file_path.parent.name)
folder = os.path.basename(os.path.dirname(self.file_path))
return set_value(self, 'album', folder)
class Media(ReadExif):
"""
Extract matadatas from exiftool and sort them to dict structure
def get_all_subclasses(cls=None):
"""Module method to get all subclasses of Media.
"""
subclasses = set()
d_coordinates = {'latitude': 'latitude_ref', 'longitude': 'longitude_ref'}
this_class = Media
if cls is not None:
this_class = cls
def __init__(
self,
file_path,
src_dir,
album_from_folder=False,
ignore_tags=None,
interactive=False,
cache=True,
checksum=None,
use_date_filename=False,
use_file_dates=False,
):
super().__init__(
file_path,
cache=True,
ignore_tags=ignore_tags,
)
subclasses.add(this_class)
self.src_dir = src_dir
this_class_subclasses = this_class.__subclasses__()
for child_class in this_class_subclasses:
subclasses.update(get_all_subclasses(child_class))
self.album_from_folder = album_from_folder
self.cache = cache
if checksum:
self.checksum = checksum
else:
self.checksum = utils.checksum(file_path)
return subclasses
self.interactive = interactive
self.log = LOG.getChild(self.__class__.__name__)
self.metadata = None
self.use_date_filename = use_date_filename
self.use_file_dates = use_file_dates
self.theme = request.load_theme()
self.loc_keys = (
'latitude',
'longitude',
'location',
'latitude_ref',
'longitude_ref',
'city',
'state',
'country',
)
def get_mimetype(self):
"""
Get the mimetype of the file.
:returns: str or None
"""
# TODO add to metadata
mimetype = mimetypes.guess_type(self.file_path)
if mimetype is None:
return None
return mimetype[0]
def _get_date_media_interactive(self, choices, default):
print(f"Date conflict for file: {self.file_path}")
choices_list = [
inquirer.List(
'date_list',
message="Choice appropriate original date",
choices=choices,
default=default,
),
]
answers = inquirer.prompt(choices_list, theme=self.theme)
if not answers:
sys.exit()
if not answers['date_list']:
answer = self.prompt.text("date")
return self.get_date_format(answer)
return answers['date_list']
def get_date_media(self):
'''
Get the date taken from self.metadata or filename
:returns: datetime or None.
'''
if self.metadata is None:
return None
filename = self.metadata['filename']
stem = os.path.splitext(filename)[0]
date_original = self.metadata['date_original']
if self.metadata['original_name']:
date_filename, _, _ = utils.get_date_from_string(self.metadata['original_name'])
else:
date_filename, _, _ = utils.get_date_from_string(stem)
self.log.debug(f'date_filename: {date_filename}')
date_original = self.metadata['date_original']
date_created = self.metadata['date_created']
date_modified = self.metadata['date_modified']
file_modify_date = self.metadata['file_modify_date']
if self.metadata['date_original']:
if date_filename and date_filename != date_original:
timedelta = abs(date_original - date_filename)
if timedelta.total_seconds() > 60:
self.log.warning(
f"{filename} time mark is different from {date_original}"
)
if self.interactive:
# Ask for keep date taken, filename time, or neither
choices = [
(f"date original:'{date_original}'", date_original),
(f"date filename:'{date_filename}'", date_filename),
("custom", None),
]
default = f'{date_original}'
return self._get_date_media_interactive(choices, default)
return self.metadata['date_original']
self.log.warning(f"could not find date original for {self.file_path}")
if self.use_date_filename and date_filename:
self.log.info(
f"use date from filename:{date_filename} for {self.file_path}"
)
if date_created and date_filename > date_created:
timedelta = abs(date_created - date_filename)
if timedelta.total_seconds() > 60:
self.log.warning(
f"{filename} time mark is more recent than {date_created}"
)
return date_created
if self.interactive:
choices = [
(f"date filename:'{date_filename}'", date_filename),
(f"date created:'{date_created}'", date_created),
("custom", None),
]
default = date_filename
return self._get_date_media_interactive(choices, default)
return date_filename
if date_created:
self.log.warning(
f"use date created:{date_created} for {self.file_path}"
)
return date_created
if date_modified:
self.log.warning(
f"use date modified:{date_modified} for {self.file_path}"
)
return date_modified
if self.use_file_dates:
if file_modify_date:
self.log.warning(
f"use date modified:{file_modify_date} for {self.file_path}"
)
return file_modify_date
elif self.interactive:
choices = []
if date_filename:
choices.append((f"date filename:'{date_filename}'", date_filename))
if date_created:
choices.append((f"date created:'{date_created}'", date_created))
if date_modified:
choices.append((f"date modified:'{date_modified}'", date_modified))
if file_modify_date:
choices.append(
(f"date modified:'{file_modify_date}'", file_modify_date)
)
choices.append(("custom", None))
default = date_filename
return self._get_date_media_interactive(choices, default)
def _set_album(self, album, folder):
print(f"Metadata conflict for file: {self.file_path}")
choices_list = [
inquirer.List(
'album',
message=f"Exif album is already set to {album}, choices",
choices=[
(f"album:'{album}'", album),
(f"folder:'{folder}'", folder),
("custom", None),
],
default=f'{album}',
),
]
answers = inquirer.prompt(choices_list, theme=self.theme)
if not answers:
sys.exit()
if not answers['album']:
return self.input.text("album")
return answers['album']
def _set_metadata_from_exif(self):
"""
Get selected metadata from exif to dict structure
"""
if not self.exif_metadata:
return
for key in self.tags_keys:
formated_data = None
for value in self.get_key_values(key):
if 'date' in key:
formated_data = self.get_date_format(value)
elif key in ('latitude', 'longitude'):
formated_data = self.get_coordinates(key, value)
else:
if value is not None and value != '':
formated_data = value
else:
formated_data = None
if formated_data:
# Use this data and break
break
self.metadata[key] = formated_data
def _set_metadata_from_db(self, db, relpath):
# Get metadata from db
formated_data = None
for key in self.tags_keys:
if key in (
'latitude',
'longitude',
'latitude_ref',
'longitude_ref',
'file_path',
):
continue
label = utils.snake2camel(key)
value = db.get_metadata(relpath, label)
if 'date' in key:
formated_data = self.get_date_format(value)
else:
formated_data = value
self.metadata[key] = formated_data
for key in 'src_dir', 'subdirs', 'filename':
label = utils.snake2camel(key)
formated_data = db.get_metadata(relpath, label)
self.metadata[key] = formated_data
return db.get_metadata(relpath, 'LocationId')
def set_location_from_db(self, location_id, db):
self.metadata['location_id'] = location_id
if location_id:
for key in self.loc_keys:
# use str to convert non string format data like latitude and
# longitude
self.metadata[key] = str(
db.get_location_data(location_id, utils.snake2camel(key))
)
else:
for key in self.loc_keys:
self.metadata[key] = None
def set_location_from_coordinates(self, loc):
self.metadata['location_id'] = None
if loc:
place_name = loc.place_name(
self.metadata['latitude'], self.metadata['longitude']
)
self.log.debug("location: {place_name['default']}")
for key in ('city', 'state', 'country', 'location'):
# mask = 'city'
# place_name = {'default': u'Sunnyvale', 'city-random': u'Sunnyvale'}
if key in place_name:
self.metadata[key] = place_name[key]
elif key == 'location':
self.metadata[key] = place_name['default']
else:
self.metadata[key] = None
else:
for key in self.loc_keys:
self.metadata[key] = None
def _set_album_from_folder(self):
album = self.metadata['album']
folder = self.file_path.parent.name
if album and album != '':
if self.interactive:
answer = self._set_album(album, folder)
if answer == 'c':
self.metadata['album'] = input('album=')
if answer == 'a':
self.metadata['album'] = album
elif answer == 'f':
self.metadata['album'] = folder
if not album or album == '':
self.metadata['album'] = folder
def get_metadata(self, root, loc=None, db=None, cache=False):
"""
Get a dictionary of metadata from exif.
All keys will be present and have a value of None if not obtained.
"""
self.metadata = {}
self.metadata['checksum'] = self.checksum
db_checksum = False
location_id = None
if cache and db and str(self.file_path).startswith(str(root)):
relpath = os.path.relpath(self.file_path, root)
db_checksum = db.get_checksum(relpath)
if db_checksum:
location_id = self._set_metadata_from_db(db, relpath)
self.set_location_from_db(location_id, db)
else:
self.metadata['src_dir'] = str(self.src_dir)
self.metadata['subdirs'] = str(
self.file_path.relative_to(self.src_dir).parent
)
self.metadata['filename'] = self.file_path.name
self._set_metadata_from_exif()
self.set_location_from_coordinates(loc)
self.metadata['date_media'] = self.get_date_media()
self.metadata['location_id'] = location_id
if self.album_from_folder:
self._set_album_from_folder()
def has_exif_data(self):
"""Check if file has metadata, date original"""
if not self.metadata:
return False
if 'date_original' in self.metadata:
if self.metadata['date_original']:
return True
def get_media_class(_file, ignore_tags=set(), logger=logging.getLogger()):
if not os.path.exists(_file):
logger.warning(f'Could not find {_file}')
logger.error(f'Could not find {_file}')
return False
class Medias:
"""
Extract matadatas from exiftool in paths and sort them to dict structure
"""
PHOTO = ('arw', 'cr2', 'dng', 'gif', 'heic', 'jpeg', 'jpg', 'nef', 'png', 'rw2')
AUDIO = ('m4a',)
VIDEO = ('avi', 'm4v', 'mov', 'mp4', 'mpg', 'mpeg', '3gp', 'mts')
extensions = PHOTO + AUDIO + VIDEO
def __init__(
self,
paths,
root,
exif_options,
checksums=None,
db=None,
interactive=False,
):
# Modules
self.db = db
self.paths = paths
# Arguments
self.root = root
# Options
if checksums:
self.checksums = checksums
else:
self.checksums = {}
self.exif_opt = exif_options
self.ignore_tags = self.exif_opt['ignore_tags']
self.interactive = interactive
self.log = LOG.getChild(self.__class__.__name__)
# Attributes
# List to store medias datas
self.datas = {}
self.theme = request.load_theme()
def get_media(self, file_path, src_dir, checksum=None):
media = Media(
file_path,
src_dir,
self.exif_opt['album_from_folder'],
self.exif_opt['ignore_tags'],
self.interactive,
self.exif_opt['cache'],
checksum,
self.exif_opt['use_date_filename'],
self.exif_opt['use_file_dates'],
)
return media
def get_media_data(self, file_path, src_dir, loc=None):
"""Get media class instance with metadata"""
if self.checksums and file_path in self.checksums.keys():
checksum = self.checksums[file_path]
else:
checksum = None
media = self.get_media(file_path, src_dir, checksum)
media.get_metadata(
self.root, loc, self.db.sqlite, self.exif_opt['cache']
)
return media
def get_metadata(self, src_path, src_dir, loc=None):
"""Get metadata"""
return self.get_media_data(src_path, src_dir, loc).metadata
def get_paths(self, src_dirs, imp=False):
"""Get paths"""
for src_dir in src_dirs:
src_dir = self.paths.check(src_dir)
if src_dir.is_file():
yield src_dir.parent, src_dir
continue
paths = self.paths.get_paths_list(src_dir)
# Get medias and src_dirs
for src_path in paths:
if self.root not in src_path.parents:
if not imp:
self.log.error(f"""{src_path} not in {self.root}
collection, use `ordigi import`""")
sys.exit(1)
yield src_dir, src_path
def get_medias_datas(self, src_dirs, imp=False, loc=None):
"""Get medias datas"""
for src_dir, src_path in self.get_paths(src_dirs, imp=imp):
# Get file metadata
media = self.get_media_data(src_path, src_dir, loc=loc)
yield src_path, media
def get_metadatas(self, src_dirs, imp=False, loc=None):
"""Get medias data"""
for src_dir, src_path in self.get_paths(src_dirs, imp=imp):
# Get file metadata
metadata = self.get_metadata(src_path, src_dir, loc=loc)
yield src_path, metadata
def update_exif_data(self, metadata, imp=False):
file_path = self.root / metadata['file_path']
exif = WriteExif(
file_path,
metadata,
ignore_tags=self.exif_opt['ignore_tags'],
)
updated = False
if imp and metadata['original_name'] in (None, ''):
exif.set_key_values('original_name', metadata['filename'])
updated = True
if self.exif_opt['album_from_folder']:
exif.set_album_from_folder()
album = metadata['album']
if album and album != '':
exif.set_value('album', album)
updated = True
if (
self.exif_opt['fill_date_original']
and metadata['date_original'] in (None, '')
):
exif.set_key_values('date_original', metadata['date_media'])
updated = True
if updated:
return True
media = Media.get_class_by_file(_file, get_all_subclasses(),
ignore_tags=set(), logger=logger)
if not media:
logger.warning(f'File{_file} is not supported')
logger.error(f'File {_file} can\'t be imported')
return False
return media

View File

@ -1,79 +0,0 @@
import inquirer
from blessed import Terminal
from colorama import init,Fore,Style,Back
term = Terminal()
# TODO allow exit from inquierer prompt
# TODO fix 'opening_prompt_color': term.yellow,
def load_theme():
"""
Customize inquirer
source:https://github.com/magmax/python-inquirer/blob/master/inquirer/themes.py
"""
custom_theme = {
'Question': {
'brackets_color': term.dodgerblue4,
'default_color': term.yellow,
},
'Checkbox': {
'selection_icon': '',
'selected_icon': '',
'unselected_icon': '',
'selection_color': term.bold_on_dodgerblue4,
'selected_color': term.dodgerblue2,
'unselected_color': term.yellow,
},
'List': {
'selection_color': term.bold_on_dodgerblue4,
'selection_cursor': '',
'unselected_color': term.yellow,
},
}
return inquirer.themes.load_theme_from_dict(custom_theme)
class Input():
def __init__(self):
init()
def text(self, message):
return input(f'{Fore.BLUE}[{Fore.YELLOW}?{Fore.BLUE}]{Fore.WHITE} {message}: ')
# def edit_prompt(self, key: str, value: str) -> str:
# print(f"Date conflict for file: {self.file_path}")
# choices_list = [
# inquirer.List(
# 'edit',
# message=f"Edit '{key}' metadata",
# choices = [
# (f"{key}: '{value}'", value),
# ("custom", None),
# ],
# default=value,
# ),
# ]
# answers = inquirer.prompt(choices_list, theme=self.theme)
# if not answers['edit']:
# prompt = [
# inquirer.Text('edit', message="value"),
# ]
# answers = inquirer.prompt(prompt, theme=self.theme)
# return self.get_date_format(answers['edit'])
# else:
# return answers['date_list']
# choices = [
# (f"date original:'{date_original}'", date_original),
# (f"date filename:'{date_filename}'", date_filename),
# ("custom", None),
# ]
# default = f'{date_original}'
# return self._get_date_media_interactive(choices, default)

View File

@ -1,99 +1,40 @@
# import pandas as pd
from tabulate import tabulate
class Tables:
"""Create table and display result in Pandas DataFrame"""
class Summary(object):
def __init__(self, actions):
self.actions = actions
def __init__(self):
self.records = []
self.success = 0
self.error = 0
self.error_items = []
self.table = []
def append(self, row):
id, status = row
self.columns = ['action', 'file_path', 'dest_path']
# self.df = self.dataframe()
if status:
self.success += 1
else:
self.error += 1
self.error_items.append(id)
def append(self, action, file_path=None, dest_path=None):
row = (action, file_path, dest_path)
self.table.append(row)
def write(self):
if self.error > 0:
error_headers = ["File"]
error_result = []
for id in self.error_items:
error_result.append([id])
def sum(self, action=None):
if not action:
return len(self.table)
print('Errors details:')
print(tabulate(error_result, headers=error_headers))
print("\n")
count = 0
for row in self.table:
if row[0] == action:
count += 1
return count
# def dataframe(self):
# return pd.DataFrame(self.table, columns=self.columns)
def tabulate(self):
errors_headers = self.columns
return tabulate(self.table, headers=errors_headers)
class Summary:
"""Result summary of ordigi program call"""
def __init__(self, root):
self.actions = (
'check',
'import',
'remove',
'sort',
'update',
)
# Set labels
self.state = ['success', 'errors']
self.root = root
self.success_table = Tables(self.actions)
self.errors_table = Tables(self.actions)
self.errors = 0
def append(self, action, success, file_path=None, dest_path=None):
if action:
if success:
self.success_table.append(action, file_path, dest_path)
else:
self.errors_table.append(action, file_path, dest_path)
if not success:
self.errors += 1
def print(self):
"""Print summary"""
headers = ["Metric", "Count"]
result = [
["Success", self.success],
["Error", self.error],
]
print()
for action in self.actions:
nb = self.success_table.sum(action)
if nb != 0:
if action == 'check':
print(f"SUMMARY: {nb} files checked in {self.root}.")
elif action == 'import':
print(f"SUMMARY: {nb} files imported into {self.root}.")
elif action == 'sort':
print(f"SUMMARY: {nb} files sorted inside {self.root}.")
elif action == 'remove_excluded':
print(f"SUMMARY: {nb} files deleted in {self.root}.")
elif action == 'remove_empty_folders':
print(f"SUMMARY: {nb} empty folders removed in {self.root}.")
elif action == 'update':
print(f"SUMMARY: {nb} files updated in {self.root} database.")
success = self.success_table.sum()
if not success and not self.errors:
print(f"SUMMARY: no action done in {self.root}.")
errors = self.errors_table.sum()
if errors:
print()
print(f"ERROR: {errors} errors reported for files:")
print(self.success_table.tabulate())
elif self.errors:
print(f"ERROR: {errors} errors reported.")
print('Summary:')
print(tabulate(result, tablefmt="plain"))

View File

@ -1,189 +0,0 @@
from math import radians, cos, sqrt
from datetime import datetime
import hashlib
import os
import platform
import re
import subprocess
def checksum(file_path, blocksize=65536):
"""Create a hash value for the given file.
See http://stackoverflow.com/a/3431835/1318758.
:param str file_path: Path to the file to create a hash for.
:param int blocksize: Read blocks of this size from the file when
creating the hash.
:returns: str or None
"""
hasher = hashlib.sha256()
with open(file_path, 'rb') as file:
buf = file.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = file.read(blocksize)
return hasher.hexdigest()
return None
def distance_between_two_points(lat1, lon1, lat2, lon2):
"""Return distance between two points"""
# From http://stackoverflow.com/questions/15736995/how-can-i-quickly-estimate-the-distance-between-two-latitude-longitude-points # noqa
# convert decimal degrees to radians
lat1, lon1, lat2, lon2 = list(map(radians, [lat1, lon1, lat2, lon2]))
rad = 6371000 # radius of the earth in m
x = (lon2 - lon1) * cos(0.5 * (lat2 + lat1))
y = lat2 - lat1
return rad * sqrt(x * x + y * y)
def empty_dir(dir_path):
return not next(os.scandir(dir_path), None)
def filename_filter(filename):
"""
Take a string and return a valid filename constructed from the string.
"""
blacklist = '/\\:*"<>|'
if filename is None:
return filename
# Remove blacklisted chars.
for char in blacklist:
filename = filename.replace(char, '')
return filename
def get_date_regex(user_regex=None):
"""Return date regex generator"""
if user_regex:
regex = {'a': re.compile(user_regex)}
else:
regex = {
# regex to match date format type %Y%m%d, %y%m%d, %d%m%Y,
# etc...
'a': re.compile(
r'[-_./ ](?P<year>\d{4})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})[-_.]?(?P<hour>\d{2})[-_.]?(?P<minute>\d{2})[-_.]?(?P<second>\d{2})([-_./ ])'
),
'b': re.compile(
r'[-_./ ](?P<year>\d{4})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})([-_./ ])'
),
# not very accurate
'c': re.compile(
r'[-_./ ](?P<year>\d{2})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})([-_./ ])'
),
'd': re.compile(
r'[-_./ ](?P<day>\d{2})[-_.](?P<month>\d{2})[-_.](?P<year>\d{4})([-_./ ])'
),
}
return regex
DATE_REGEX = get_date_regex()
def get_date_from_string(string):
"""Retrieve date stamp from string"""
# If missing datetime from EXIF data check if filename is in datetime format.
# For this use a user provided regex if possible.
# Otherwise assume a filename such as IMG_20160915_123456.jpg as default.
matches = []
sep = ''
for i, regex in DATE_REGEX.items():
match = re.findall(regex, string)
if match != []:
sep = match[0][3]
if i == 'c':
match = [('20' + match[0][0], match[0][1], match[0][2])]
elif i == 'd':
# reorder items
match = [(match[0][2], match[0][1], match[0][0])]
else:
match = [(match[0][0], match[0][1], match[0][2])]
if len(match) != 1:
# The time string is not uniq
continue
matches.append((match[0], regex))
# We want only the first match for the moment
break
# check if there is only one result
if len(set(matches)) == 1:
try:
# Convert str to int
date_object = tuple(map(int, matches[0][0]))
date = datetime(*date_object)
except (KeyError, ValueError):
return None, matches[0][1], sep
return date, matches[0][1], sep
return None, None, sep
def match_date_regex(regex, value):
if re.match(regex, value) is not None:
return re.sub(regex, r'\g<1>-\g<2>-\g<3>-', value)
return value
def split_part(dedup_regex, path_part, items=None):
"""
Split part from regex
:returns: parts
"""
if not items:
items = []
regex = dedup_regex.pop()
parts = re.split(regex, path_part)
# Loop thought part, search matched regex part and proceed with
# next regex for others parts
for n, part in enumerate(parts):
if re.match(regex, part):
if part[0] in '-_ .':
if n > 0:
# move the separator to previous item
parts[n - 1] = parts[n - 1] + part[0]
items.append(part[1:])
else:
items.append(part)
elif dedup_regex:
# Others parts
items = split_part(dedup_regex, part, items)
else:
items.append(part)
return items
# Conversion functions
# source:https://rodic.fr/blog/camelcase-and-snake_case-strings-conversion-with-python/
def snake2camel(name):
return re.sub(r'(?:^|_)([a-z])', lambda x: x.group(1).upper(), name)
def camel2snake(name):
return name[0].lower() + re.sub(
r'(?!^)[A-Z]', lambda x: '_' + x.group(0).lower(), name[1:]
)
def open_file(path):
if platform.system() == "Windows":
os.startfile(path)
elif platform.system() == "Darwin":
subprocess.Popen(["open", path])
else:
subprocess.Popen(["xdg-open", path])

23
package.json Normal file
View File

@ -0,0 +1,23 @@
{
"name": "elodie",
"version": "1.0.0",
"description": "GUI for Elodie",
"main": "app/index.js",
"dependencies": {
"menubar": "^2.3.0"
},
"devDependencies": {},
"scripts": {
"test": "electron app.js"
},
"repository": {
"type": "git",
"url": "https://github.com/jmathai/elodie"
},
"author": "Jaisen Mathai",
"license": "ISC",
"bugs": {
"url": "https://github.com/jmathai/elodie/issues"
},
"homepage": "https://github.com/jmathai/elodie"
}

View File

@ -1,6 +0,0 @@
[build-system]
requires = [
"setuptools>=42",
"wheel"
]
build-backend = "setuptools.build_meta"

View File

@ -1,5 +1,5 @@
[pytest]
# addopts = --ignore=old_tests -s
addopts = --ignore=old_tests -s
# collect_ignore = ["old_test"]

View File

@ -1,9 +1,9 @@
click
python-dateutil
geopy
imagehash
inquirer
configparser
tabulate
Pillow
#xpyheif_pillow_opener
click==6.6
imagehash==4.2.1
requests==2.20.0
Send2Trash==1.3.0
configparser==3.5.0
tabulate==0.7.7
Pillow==8.0
pyheif_pillow_opener=0.1
six==1.9

41
run_tests.py Executable file
View File

@ -0,0 +1,41 @@
#!/usr/bin/env python
import nose
import os
import shutil
import sys
import tempfile
if __name__ == "__main__":
# test_directory is what we pass nose.run for where to find tests
test_directory = os.path.abspath('tests')
# create a temporary directory to use for the application directory while running tests
temporary_application_directory = tempfile.mkdtemp('-elodie-tests')
os.environ['ELODIE_APPLICATION_DIRECTORY'] = temporary_application_directory
# copy config.ini-sample over to the test application directory
temporary_config_file_sample = '{}/config.ini-sample'.format(os.path.dirname(test_directory))
temporary_config_file = '{}/config.ini'.format(temporary_application_directory)
shutil.copy2(
temporary_config_file_sample,
temporary_config_file,
)
# read the sample config file and store contents to be replaced
with open(temporary_config_file_sample, 'r') as f:
config_contents = f.read()
# set the mapquest key in the temporary config file and write it to the temporary application directory
config_contents = config_contents.replace('your-api-key-goes-here', 'x8wQLqGhW7qK3sFpjYtVTogVtoMK0S8s')
with open(temporary_config_file, 'w+') as f:
f.write(config_contents)
test_argv = sys.argv
test_argv.append('--verbosity=2')
test_argv.append('-s')
result = nose.run(argv=test_argv)
if(result):
sys.exit(0)
else:
sys.exit(1)

Binary file not shown.

Binary file not shown.

View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

Before

Width:  |  Height:  |  Size: 2.9 KiB

After

Width:  |  Height:  |  Size: 2.9 KiB

View File

Before

Width:  |  Height:  |  Size: 222 B

After

Width:  |  Height:  |  Size: 222 B

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 9.2 KiB

After

Width:  |  Height:  |  Size: 9.2 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

@ -1,39 +0,0 @@
[metadata]
# For upload use:
# name = example-pkg-YOUR-USERNAME-HERE
name = ordigi-pkg-local
version = 0.1.0
author = Cedric Leporcq
author_email = cedl38@gmail.com
description = Media organizer tools
long_description = file: README.md
long_description_content_type = text/markdown
# url =
# project_urls =
# Bug Tracker =
classifiers =
Development Status :: 3 - Alpha
Environment :: Console
Programming Language :: Python :: 3
License :: OSI Approved :: GPL version 3
Operating System :: OS Independent
[options]
# package_dir = ordigi
packages = find:
python_requires = >=3.6
[options.packages.find]
exclude =
tests
docs
[options.entry_points]
console_scripts =
ordigi = ordigi.cli:cli
[flake8]
[pycodestyle]
max-line-length = 88

View File

@ -1,3 +0,0 @@
from setuptools import setup
setup()

View File

@ -1,76 +1,49 @@
""" pytest test configuration """
from configparser import RawConfigParser
import os
from pathlib import Path, PurePath
import random
import pytest
from pathlib import Path
import shutil
import tempfile
import pytest
from ordigi import config
from ordigi.exiftool import _ExifToolProc
ORDIGI_PATH = Path(__file__).parent.parent
@pytest.fixture(autouse=True)
def reset_singletons():
""" Need to clean up any ExifTool singletons between tests """
_ExifToolProc.instance = None
@pytest.fixture(scope="module")
def sample_files_paths(tmpdir_factory):
tmp_path = Path(tmpdir_factory.mktemp("ordigi-src-"))
path = Path(ORDIGI_PATH, 'samples/test_exif')
shutil.copytree(path, tmp_path / path.name)
paths = Path(tmp_path).glob('**/*')
def copy_sample_files():
src_path = tempfile.mkdtemp(prefix='ordigi-src')
paths = Path(ORDIGI_PATH, 'samples/test_exif').glob('*')
file_paths = [x for x in paths if x.is_file()]
for file_path in file_paths:
source_path = Path(src_path, file_path.name)
shutil.copyfile(file_path, source_path)
return tmp_path, file_paths
def randomize_files(dest_dir):
# Get files randomly
for path, subdirs, files in os.walk(dest_dir):
if '.ordigi' in path:
continue
for name in files:
file_path = PurePath(path, name)
if bool(random.getrandbits(1)):
with open(file_path, 'wb') as fout:
fout.write(os.urandom(random.randrange(128, 2048)))
if bool(random.getrandbits(1)):
dest_path = PurePath(path, file_path.stem + '_1'+ file_path.suffix)
shutil.copyfile(file_path, dest_path)
def randomize_db(dest_dir):
# alterate database
file_path = Path(str(dest_dir), '.ordigi', 'collection.db')
with open(file_path, 'wb') as fout:
fout.write(os.urandom(random.randrange(128, 2048)))
return src_path, file_paths
@pytest.fixture(scope="module")
def conf_path():
conf_dir = tempfile.mkdtemp(prefix='ordigi-')
tmp_path = tempfile.mkdtemp(prefix='ordigi-')
conf = RawConfigParser()
conf['Path'] = {
'day_begins': '4',
'dirs_path':'%u<%Y-%m>/<city>|<city>-<%Y>/<folders[:1]>/<folder>',
'name':'<%Y-%m-%b-%H-%M-%S>-<basename>.%l<ext>'
'dirs_path':'%u{%Y-%m}/{city}|{city}-{%Y}/{folders[:1]}/{folder}',
'name':'{%Y-%m-%b-%H-%M-%S}-{basename}.%l{ext}'
}
conf['Geolocation'] = {
'geocoder': 'Nominatium'
}
conf_path = Path(conf_dir, "ordigi.conf")
with open(conf_path, 'w') as conf_file:
conf.write(conf_file)
conf_path = Path(tmp_path, "ordigi.conf")
config.write(conf_path, conf)
yield conf_path
shutil.rmtree(conf_dir)
shutil.rmtree(tmp_path)

View File

@ -1,251 +0,0 @@
import shutil
from click.testing import CliRunner
from pathlib import Path
import pytest
import inquirer
from ordigi import cli
from ordigi.request import Input
CONTENT = "content"
ORDIGI_PATH = Path(__file__).parent.parent
def get_arg_options_list(arg_options):
arg_options_list = []
for opt, arg in arg_options:
arg_options_list.append(opt)
arg_options_list.append(arg)
return arg_options_list
class TestOrdigi:
@pytest.fixture(autouse=True)
def setup_class(cls, sample_files_paths):
cls.runner = CliRunner()
cls.src_path, cls.file_paths = sample_files_paths
cls.logger_options = ('--debug',)
cls.filter_options = (
('--ignore-tags', 'CreateDate'),
('--ext', 'jpg'),
('--glob', '*'),
)
cls.sort_options = (
'--album-from-folder',
'--fill-date-original',
'--path-format',
'--remove-duplicates',
'--use-date-filename',
'--use-file-dates',
)
def assert_cli(self, command, attributes, state=0):
result = self.runner.invoke(command, [*attributes])
assert result.exit_code == state, (command, attributes)
def assert_options(self, command, bool_options, arg_options, paths):
for bool_option in bool_options:
self.assert_cli(command, [bool_option, *paths])
for opt, arg in arg_options:
self.assert_cli(command, [opt, arg, *paths])
def assert_all_options(self, command, bool_options, arg_options, paths):
arg_options_list = get_arg_options_list(arg_options)
self.assert_cli(command, [
*bool_options, *arg_options_list, *paths,
])
def test_commands(self):
# Check if fail if path not exist
commands = [
cli._check,
cli._clean,
cli._compare,
cli._edit,
cli._import,
cli._init,
cli._sort,
cli._update,
]
for command in commands:
if command.name == 'edit':
self.assert_cli(command, ['-k', 'date_original', 'not_exist'], state=1)
else:
self.assert_cli(command, ['not_exist'], state=1)
self.assert_cli(cli._clone, ['not_exist'], state=2)
def test_edit(self, monkeypatch):
bool_options = (
*self.logger_options,
)
arg_options = (
*self.filter_options,
)
def mockreturn(self, message):
return '03-12-2021 08:12:35'
monkeypatch.setattr(Input, 'text', mockreturn)
args = (
'--key',
'date_original',
'--overwrite',
str(self.src_path.joinpath('test_exif/photo.png')),
str(self.src_path),
)
self.assert_cli(cli._edit, args)
# self.assert_options(cli._edit, bool_options, arg_options, args)
# self.assert_all_options(cli._edit, bool_options, arg_options, args)
def test_sort(self):
bool_options = (
*self.logger_options,
# '--interactive',
'--dry-run',
'--album-from-folder',
'--remove-duplicates',
'--use-date-filename',
'--use-file-dates',
'--clean',
)
arg_options = (
*self.filter_options,
('--path-format', '{%Y}/{folder}/{name}.{ext}'),
)
paths = (str(self.src_path),)
self.assert_cli(cli._sort, paths)
self.assert_options(cli._sort, bool_options, arg_options, paths)
self.assert_all_options(cli._sort, bool_options, arg_options, paths)
def test_clone(self, tmp_path):
paths = (str(self.src_path), str(tmp_path))
self.assert_cli(cli._init, [str(self.src_path)])
self.assert_cli(cli._clone, ['--dry-run', *self.logger_options, *paths])
self.assert_cli(cli._clone, paths)
def assert_init(self):
self.assert_cli(cli._init, [*self.logger_options, str(self.src_path)])
def assert_update(self):
file_path = Path(ORDIGI_PATH, 'samples/test_exif/photo.cr2')
dest_path = self.src_path / 'photo_moved.cr2'
shutil.copyfile(file_path, dest_path)
self.assert_cli(cli._update, [*self.logger_options, str(self.src_path)])
self.assert_cli(cli._update, ['--checksum', str(self.src_path)])
def assert_check(self):
self.assert_cli(cli._check, [*self.logger_options, str(self.src_path)])
def assert_clean(self):
bool_options = (
*self.logger_options,
# '--interactive',
'--dry-run',
'--delete-excluded',
'--folders',
'--path-string',
'--remove-duplicates',
)
arg_options = (
*self.filter_options,
('--dedup-regex', r'\d{4}-\d{2}'),
)
paths = ('test_exif', str(self.src_path))
self.assert_cli(cli._clean, paths)
paths = (str(self.src_path),)
self.assert_cli(cli._clean, paths)
self.assert_options(cli._clean, bool_options, arg_options, paths)
self.assert_all_options(cli._clean, bool_options, arg_options, paths)
def test_init_update_check_clean(self):
self.assert_init()
self.assert_update()
self.assert_clean()
def test_import(self, tmp_path):
bool_options = (
*self.logger_options,
# '--interactive',
'--dry-run',
'--album-from-folder',
'--remove-duplicates',
'--use-date-filename',
'--use-file-dates',
'--copy',
)
arg_options = (
('--exclude', '.DS_Store'),
*self.filter_options,
('--path-format', '{%Y}/{folder}/{stem}.{ext}'),
)
paths = (str(self.src_path), str(tmp_path))
result = self.runner.invoke(cli._import, ['--copy', *paths])
assert result.exit_code == 0
self.assert_options(cli._import, bool_options, arg_options, paths)
self.assert_all_options(cli._import, bool_options, arg_options, paths)
def test_compare(self):
bool_options = (
*self.logger_options,
# '--interactive',
'--dry-run',
'--find-duplicates',
'--remove-duplicates',
)
arg_options = (
*self.filter_options,
# ('--similar-to', ''),
('--similarity', '65'),
)
paths = (str(self.src_path),)
# Workaround
self.assert_cli(cli._update, paths)
self.assert_cli(cli._compare, paths)
self.assert_options(cli._compare, bool_options, arg_options, paths)
def test_check(self):
self.assert_check()
def test_needsfiles(tmpdir):
assert tmpdir
def test_create_file(tmp_path):
directory = tmp_path / "sub"
directory.mkdir()
path = directory / "hello.txt"
path.write_text(CONTENT)
assert path.read_text() == CONTENT
assert len(list(tmp_path.iterdir())) == 1

View File

@ -1,298 +0,0 @@
from datetime import datetime
import shutil
import sqlite3
from pathlib import Path
import re
import pytest
import inquirer
from ordigi import LOG
from ordigi import constants
from ordigi import utils
from ordigi.summary import Summary
from ordigi.collection import Collection, FPath, Paths
from ordigi.exiftool import ExifTool, ExifToolCaching, exiftool_is_running, terminate_exiftool
from ordigi.geolocation import GeoLocation
from ordigi.media import Media, ReadExif
from ordigi.request import Input
from .conftest import randomize_files, randomize_db
LOG.setLevel(10)
class TestFPath:
@pytest.fixture(autouse=True)
def setup_class(cls, sample_files_paths):
cls.src_path, cls.file_paths = sample_files_paths
cls.path_format = constants.DEFAULT_PATH + '/' + constants.DEFAULT_NAME
def test_get_part(self, tmp_path):
"""
Test all parts
"""
fpath = FPath(self.path_format, 4)
# Item to search for:
items = fpath.get_items()
masks = [
'<album>',
'<basename>',
'<camera_make>',
'<camera_model>',
'<city>',
'<"custom">',
'<country>',
'<ext>',
'<folder>',
'<folders[1:3]>',
'<location>',
'<name>',
'<original_name>',
'<state>',
'<title>',
'<%Y-%m-%d>',
'<%Y-%m-%d_%H-%M-%S>',
'<%Y-%m-%b>'
]
for file_path in self.file_paths:
media = Media(file_path, self.src_path, use_date_filename=True,
use_file_dates=True)
subdirs = file_path.relative_to(self.src_path).parent
exif_tags = {}
for key in ('album', 'camera_make', 'camera_model', 'latitude',
'longitude', 'original_name', 'title'):
exif_tags[key] = media.tags_keys[key]
exif_data = ExifToolCaching(str(file_path)).asdict()
loc = GeoLocation()
media.get_metadata(self.src_path, loc)
for item, regex in items.items():
for mask in masks:
matched = re.search(regex, mask)
if matched:
part = fpath.get_part(item, mask[1:-1], media.metadata)
# check if part is correct
assert isinstance(part, str), file_path
if item == 'basename':
assert part == file_path.stem, file_path
elif item == 'date':
if part == '':
media.get_date_media()
assert datetime.strptime(part, mask[1:-1])
elif item == 'folder':
assert part == subdirs.name, file_path
elif item == 'folders':
assert part in str(subdirs)
elif item == 'ext':
assert part == file_path.suffix[1:], file_path
elif item == 'name':
expected_part = file_path.stem
for rx in utils.get_date_regex().values():
part = re.sub(rx, '', expected_part)
assert part == expected_part, file_path
elif item == 'custom':
assert part == mask[2:-2], file_path
elif item in ('city', 'country', 'location', 'state'):
pass
elif item in exif_tags.keys():
f = False
for key in exif_tags[item]:
if key in exif_data:
f = True
assert part == exif_data[key], file_path
break
if f == False:
assert part == '', file_path
else:
assert part == '', file_path
def test_get_early_morning_photos_date(self):
date = datetime(2021, 10, 16, 2, 20, 40)
fpath = FPath(self.path_format, 4)
part = fpath.get_early_morning_photos_date(date, '%Y-%m-%d')
assert part == '2021-10-15'
part = fpath.get_early_morning_photos_date(date, '%Y%m%d-%H%M%S')
assert part == '20211016-022040'
class TestCollection:
@pytest.fixture(autouse=True)
def setup_class(cls, sample_files_paths):
cls.src_path, cls.file_paths = sample_files_paths
cls.path_format = constants.DEFAULT_PATH + '/' + constants.DEFAULT_NAME
def teardown_class(self):
terminate_exiftool()
assert not exiftool_is_running()
def assert_import(self, summary, nb):
# Summary is created and there is no errors
assert summary.errors == 0
assert summary.success_table.sum('import') == nb
def assert_sort(self, summary, nb):
# Summary is created and there is no errors
assert summary.errors == 0
assert summary.success_table.sum('sort') == nb
def test_sort_files(self, tmp_path):
cli_options = {
'album_from_folder': True, 'cache': False, 'path_format': self.path_format
}
collection = Collection(tmp_path, cli_options=cli_options)
loc = GeoLocation()
summary = collection.sort_files([self.src_path], loc, imp='copy')
self.assert_import(summary, 29)
summary = collection.check_files()
assert summary.success_table.sum('import') == 29
assert summary.success_table.sum('update') == 0
assert not summary.errors
# check if album value are set
filters = {
'exclude': None,
'extensions': None,
'glob': '**/*',
'max_deep': None,
}
paths = Paths(filters).get_files(tmp_path)
for file_path in paths:
if '.db' not in str(file_path):
for value in ReadExif(file_path).get_key_values('album'):
assert value != '' or None
collection = Collection(tmp_path, cli_options=cli_options)
# Try to change path format and sort files again
path_format = 'test_exif/<city>/<%Y>-<name>.%l<ext>'
summary = collection.sort_files([tmp_path], loc)
self.assert_sort(summary, 23)
shutil.copytree(tmp_path / 'test_exif', tmp_path / 'test_exif_copy')
collection.summary = Summary(tmp_path)
assert collection.summary.success_table.sum() == 0
summary = collection.update(loc)
assert summary.success_table.sum('update') == 2
assert summary.success_table.sum() == 2
assert not summary.errors
collection.summary = Summary(tmp_path)
summary = collection.update(loc)
assert summary.success_table.sum() == 0
assert not summary.errors
# test with populated dest dir
randomize_files(tmp_path)
summary = collection.check_files()
assert summary.errors
# test summary update
collection.summary = Summary(tmp_path)
summary = collection.update(loc)
assert summary.success_table.sum('sort') == 0
assert summary.success_table.sum('update')
assert not summary.errors
def test_sort_files_invalid_db(self, tmp_path):
collection = Collection(tmp_path, {'path_format': self.path_format})
loc = GeoLocation()
randomize_db(tmp_path)
with pytest.raises(sqlite3.DatabaseError) as e:
summary = collection.sort_files([self.src_path], loc, imp='copy')
def test_sort_file(self, tmp_path):
for imp in ('copy', 'move', False):
collection = Collection(tmp_path)
# copy mode
src_path = Path(self.src_path, 'test_exif', 'photo.png')
media = Media(src_path, self.src_path)
media.get_metadata(tmp_path)
name = 'photo_' + str(imp) + '.png'
media.metadata['file_path'] = name
dest_path = Path(tmp_path, name)
src_checksum = utils.checksum(src_path)
summary = collection.sort_file(
src_path, dest_path, media.metadata, imp=imp
)
assert not summary.errors
# Ensure files remain the same
if not imp:
assert collection._checkcomp(dest_path, src_checksum)
if imp == 'copy':
assert src_path.exists()
else:
assert not src_path.exists()
shutil.copyfile(dest_path, src_path)
def test_get_files(self):
filters = {
'exclude': {'**/*.dng',},
'extensions': None,
'glob': '**/*',
'max_deep': 1,
}
paths = Paths(filters)
paths = list(paths.get_files(self.src_path))
assert len(paths) == 9
assert Path(self.src_path, 'test_exif/photo.dng') not in paths
for path in paths:
assert isinstance(path, Path)
def test_sort_similar_images(self, tmp_path):
path = tmp_path / 'collection'
shutil.copytree(self.src_path, path)
collection = Collection(path)
loc = GeoLocation()
summary = collection.init(loc)
summary = collection.sort_similar_images(path, similarity=60)
# Summary is created and there is no errors
assert not summary.errors
def test_edit_date_metadata(self, tmp_path, monkeypatch):
path = tmp_path / 'collection'
shutil.copytree(self.src_path, path)
collection = Collection(path, {'cache': False})
def mockreturn(self, message):
return '03-12-2021 08:12:35'
monkeypatch.setattr(Input, 'text', mockreturn)
collection.edit_metadata({path}, {'date_original'}, overwrite=True)
# check if db value is set
file_path = 'test_exif/photo.rw2'
date = collection.db.sqlite.get_metadata(file_path, 'DateOriginal')
assert date == '2021-03-12 08:12:35'
# Check if exif value is set
path_file = path.joinpath(file_path)
date = ExifTool(path_file).asdict()['EXIF:DateTimeOriginal']
assert date == '2021-03-12 08:12:35'
def test_edit_location_metadata(self, tmp_path, monkeypatch):
path = tmp_path / 'collection'
shutil.copytree(self.src_path, path)
collection = Collection(path, {'cache': False})
loc = GeoLocation()
def mockreturn(self, message):
return 'lyon'
monkeypatch.setattr(Input, 'text', mockreturn)
collection.edit_metadata({path}, {'location'}, loc, True)
# check if db value is set
file_path = 'test_exif/photo.rw2'
location_id = collection.db.sqlite.get_metadata(file_path, 'LocationId')
location = collection.db.sqlite.get_location_data(location_id, 'Location')
assert location_id, location == 'Lyon'
# Check if exif value is set
path_file = path.joinpath(file_path)
latitude = ExifTool(path_file).asdict()['EXIF:GPSLatitude']
longitude = ExifTool(path_file).asdict()['EXIF:GPSLongitude']
assert latitude == 45.7578136999889
assert longitude == 4.83201140001667

View File

@ -4,7 +4,7 @@ import shutil
import tempfile
from unittest import mock
from ordigi.config import Config
from ordigi import config
# Helpers
import random
@ -21,8 +21,7 @@ class TestConfig:
@pytest.fixture(scope="module")
def conf(self, conf_path):
config = Config(conf_path)
return config.load_config()
return config.load_config(conf_path)
def test_write(self, conf_path):
assert conf_path.is_file()
@ -32,34 +31,27 @@ class TestConfig:
Read files from config and return variables
"""
# test valid config file
assert conf['Path']['dirs_path'] == '%u<%Y-%m>/<city>|<city>-<%Y>/<folders[:1]>/<folder>'
assert conf['Path']['name'] == '<%Y-%m-%b-%H-%M-%S>-<basename>.%l<ext>'
assert conf['Path']['dirs_path'] == '%u{%Y-%m}/{city}|{city}-{%Y}/{folders[:1]}/{folder}'
assert conf['Path']['name'] == '{%Y-%m-%b-%H-%M-%S}-{basename}.%l{ext}'
assert conf['Path']['day_begins'] == '4'
assert conf['Geolocation']['geocoder'] == 'Nominatium'
def test_load_config_no_exist(self):
# test file not exist
config = Config()
config.conf_path = Path('filename')
assert config.load_config() == {}
conf = config.load_config('filename')
assert conf == {}
def test_load_config_invalid(self, conf_path):
# test invalid config
write_random_file(conf_path)
with pytest.raises(Exception) as e:
config = Config(conf_path)
config.load_config(conf_path)
assert e.typename == 'MissingSectionHeaderError'
# def test_get_path_definition(self, conf):
# """
# Get path definition from config
# """
# config = Config(conf=conf)
# path = config.get_path_definition()
# assert path == '%u<%Y-%m>/<city>|<city>-<%Y>/<folders[:1]>/<folder>/<%Y-%m-%b-%H-%M-%S>-<basename>.%l<ext>'
def test_get_path_definition(self, conf):
"""
Get path definition from config
"""
path = config.get_path_definition(conf)
assert path == '%u{%Y-%m}/{city}|{city}-{%Y}/{folders[:1]}/{folder}/{%Y-%m-%b-%H-%M-%S}-{basename}.%l{ext}'
def test_get_config_options(self, conf):
config = Config(conf=conf)
options = config.get_config_options()
assert isinstance(options, dict)
# assert isinstance(options['Path'], dict)

View File

@ -1,130 +0,0 @@
from datetime import datetime
from pathlib import Path
import pytest
import shutil
import sqlite3
from ordigi.database import Sqlite
class TestSqlite:
@pytest.fixture(autouse=True)
def setup_class(cls, tmp_path):
cls.test='abs'
cls.sqlite = Sqlite(tmp_path)
row_data = {
'FilePath': 'file_path',
'Checksum': 'checksum',
'Album': 'album',
'Title': 'title',
'LocationId': 2,
'DateMedia': datetime(2012, 3, 27),
'DateOriginal': datetime(2013, 3, 27),
'DateCreated': 'date_created',
'DateModified': 'date_modified',
'FileModifyDate': 'file_modify_date',
'CameraMake': 'camera_make',
'CameraModel': 'camera_model',
'OriginalName':'original_name',
'SrcDir': 'src_dir',
'Subdirs': 'subdirs',
'Filename': 'filename'
}
location_data = {
'Latitude': 24.2,
'Longitude': 7.3,
'LatitudeRef': 'latitude_ref',
'LongitudeRef': 'longitude_ref',
'City': 'city',
'State': 'state',
'Country': 'country',
'Location': 'location'
}
cls.sqlite.upsert_metadata(row_data)
cls.sqlite.upsert_location(location_data)
# cls.sqlite.add_metadata_data('filename', 'ksinslsdosic', 'original_name', 'date_original', 'album', 1)
# cls.sqlite.add_location(24.2, 7.3, 'city', 'state', 'country', 'location')
yield
shutil.rmtree(tmp_path)
def test_init(self):
assert isinstance(self.sqlite.filename, Path)
assert isinstance(self.sqlite.con, sqlite3.Connection)
assert isinstance(self.sqlite.cur, sqlite3.Cursor)
def test_create_table(self):
assert self.sqlite.is_table('metadata')
assert self.sqlite.is_table('location')
def test_add_metadata_data(self):
result = tuple(self.sqlite.cur.execute("""select * from metadata where
rowid=1""").fetchone())
assert result == (
1,
'file_path',
'checksum',
'album',
'title',
2,
'2012-03-27 00:00:00',
'2013-03-27 00:00:00',
'date_created',
'date_modified',
'file_modify_date',
'camera_make',
'camera_model',
'original_name',
'src_dir',
'subdirs',
'filename'
)
def test_get_checksum(self):
assert not self.sqlite.get_checksum('invalid')
assert self.sqlite.get_checksum('file_path') == 'checksum'
def test_get_metadata(self):
assert not self.sqlite.get_metadata('invalid', 'DateOriginal')
assert self.sqlite.get_metadata('file_path', 'Album') == 'album'
def test_add_location(self):
result = tuple(self.sqlite.cur.execute("""select * from location where
rowid=1""").fetchone())
assert result == (
1,
24.2,
7.3,
'latitude_ref',
'longitude_ref',
'city',
'state',
'country',
'location',
)
@pytest.mark.skip('TODO')
def test_get_location_data(self, LocationId, data):
pass
@pytest.mark.skip('TODO')
def test_get_location(self, Latitude, Longitude, column):
pass
def test_get_location_nearby(self):
value = self.sqlite.get_location_nearby(24.2005, 7.3004, 'Location')
assert value == 'location'
@pytest.mark.skip('TODO')
def test_delete_row(self, table, id):
pass
@pytest.mark.skip('TODO')
def test_delete_all_rows(self, table):
pass

19
tests/test_dozo.py Normal file
View File

@ -0,0 +1,19 @@
import pytest
CONTENT = "content"
class TestOrdigi:
@pytest.mark.skip()
def test__sort(self):
assert 0
def test_needsfiles(tmpdir):
assert tmpdir
def test_create_file(tmp_path):
d = tmp_path / "sub"
d.mkdir()
p = d / "hello.txt"
p.write_text(CONTENT)
assert p.read_text() == CONTENT
assert len(list(tmp_path.iterdir())) == 1

178
tests/test_filesystem.py Normal file
View File

@ -0,0 +1,178 @@
# TODO to be removed later
from datetime import datetime
import os
import pytest
from pathlib import Path
import re
from sys import platform
from time import sleep
from .conftest import copy_sample_files
from ordigi import constants
from ordigi.database import Db
from ordigi.filesystem import FileSystem
from ordigi.media import Media
from ordigi.exiftool import ExifToolCaching, exiftool_is_running, terminate_exiftool
@pytest.mark.skip()
class TestDb:
pass
class TestFilesystem:
def setup_class(cls):
cls.src_paths, cls.file_paths = copy_sample_files()
cls.path_format = constants.default_path + '/' + constants.default_name
def teardown_class(self):
terminate_exiftool()
assert not exiftool_is_running()
def test_get_part(self, tmp_path):
"""
Test all parts
"""
# Item to search for:
filesystem = FileSystem()
items = filesystem.get_items()
masks = [
'{album}',
'{basename}',
'{camera_make}',
'{camera_model}',
'{city}',
'{"custom"}',
'{country}',
'{ext}',
'{folder}',
'{folders[1:3]}',
'{location}',
'{name}',
'{original_name}',
'{state}',
'{title}',
'{%Y-%m-%d}',
'{%Y-%m-%d_%H-%M-%S}',
'{%Y-%m-%b}'
]
subdirs = Path('a', 'b', 'c', 'd')
for file_path in self.file_paths:
media = Media(str(file_path))
exif_tags = {}
for key in ('album', 'camera_make', 'camera_model', 'latitude',
'longitude', 'original_name', 'title'):
exif_tags[key] = media.tags_keys[key]
exif_data = ExifToolCaching(str(file_path)).asdict()
metadata = media.get_metadata()
for item, regex in items.items():
for mask in masks:
matched = re.search(regex, mask)
if matched:
part = filesystem.get_part(item, mask[1:-1],
metadata, Db(tmp_path), subdirs)
# check if part is correct
assert isinstance(part, str), file_path
if item == 'basename':
assert part == file_path.stem, file_path
elif item == 'date':
assert datetime.strptime(part, mask[1:-1])
elif item == 'folder':
assert part == subdirs.name, file_path
elif item == 'folders':
if platform == "win32":
assert '\\' in part, file_path
else:
assert '/' in part, file_path
elif item == 'ext':
assert part == file_path.suffix[1:], file_path
elif item == 'name':
expected_part = file_path.stem
for i, rx in filesystem.match_date_from_string(expected_part):
part = re.sub(rx, '', expected_part)
assert part == expected_part, file_path
elif item == 'custom':
assert part == mask[2:-2], file_path
elif item in ('city', 'country', 'location', 'state'):
pass
elif item in exif_tags.keys():
f = False
for key in exif_tags[item]:
if key in exif_data:
f = True
assert part == exif_data[key], file_path
break
if f == False:
assert part == '', file_path
else:
assert part == '', file_path
def test_get_date_taken(self):
filesystem = FileSystem()
for file_path in self.file_paths:
exif_data = ExifToolCaching(str(file_path)).asdict()
media = Media(str(file_path))
metadata = media.get_metadata()
date_taken = filesystem.get_date_taken(metadata)
date_filename = None
for tag in media.tags_keys['original_name']:
if tag in exif_data:
date_filename = filesystem.get_date_from_string(exif_data[tag])
break
if not date_filename:
date_filename = filesystem.get_date_from_string(file_path.name)
if media.metadata['date_original']:
assert date_taken == media.metadata['date_original']
elif date_filename:
assert date_taken == date_filename
elif media.metadata['date_created']:
assert date_taken == media.metadata['date_created']
elif media.metadata['date_modified']:
assert date_taken == media.metadata['date_modified']
def test_sort_files(self, tmp_path):
db = Db(tmp_path)
filesystem = FileSystem(path_format=self.path_format)
summary, has_errors = filesystem.sort_files([self.src_paths], tmp_path, db)
# Summary is created and there is no errors
assert summary, summary
assert not has_errors, has_errors
# TODO check if path follow path_format
# TODO make another class?
def test_sort_file(self, tmp_path):
for mode in 'copy', 'move':
filesystem = FileSystem(path_format=self.path_format, mode=mode)
# copy mode
import ipdb; ipdb.set_trace()
src_path = Path(self.src_paths, 'photo.png')
dest_path = Path(tmp_path,'photo_copy.png')
src_checksum = filesystem.checksum(src_path)
result_copy = filesystem.sort_file(src_path, dest_path)
assert result_copy
# Ensure files remain the same
assert filesystem.checkcomp(dest_path, src_checksum)
if mode == 'copy':
assert src_path.exists()
else:
assert not src_path.exists()
# TODO check for conflicts
# TODO check date
# filesystem.sort_files
#- Sort similar images into a directory
# filesystem.sort_similar

View File

@ -1,25 +0,0 @@
from ordigi.utils import distance_between_two_points
from ordigi.geolocation import GeoLocation
import pytest
class TestGeoLocation:
def setup_class(cls):
cls.loc = GeoLocation()
def test_coordinates_by_name(self):
coordinates = self.loc.coordinates_by_name('Sunnyvale, CA')
latitude = coordinates['latitude']
longitude = coordinates['longitude']
distance = distance_between_two_points(latitude, longitude, 37.3745086, -122.0581602)
assert distance <= 3000
def test_place_name(self):
place_name = self.loc.place_name(lat=37.368, lon=-122.03)
assert place_name['city'] == 'Sunnyvale', place_name
# Invalid lat/lon
with pytest.warns(UserWarning):
place_name = self.loc.place_name(lat=999, lon=999)
assert place_name == {'default': None}, place_name

View File

@ -1,110 +1,64 @@
from datetime import datetime
import pytest
from pathlib import Path
import re
import shutil
import tempfile
import pytest
from .conftest import copy_sample_files
from ordigi import constants
from ordigi.media import Media
from ordigi.images import Images
from ordigi.exiftool import ExifTool, ExifToolCaching
from ordigi.utils import get_date_from_string
ORDIGI_PATH = Path(__file__).parent.parent
CACHING = True
class TestMedia:
class TestMetadata:
@pytest.fixture(autouse=True)
def setup_class(cls, sample_files_paths):
cls.src_path, cls.file_paths = sample_files_paths
cls.ignore_tags = (
'EXIF:CreateDate',
'File:FileModifyDate',
'File:FileAccessDate',
'EXIF:Make',
'Composite:LightValue'
)
def setup_class(cls):
cls.src_paths, cls.file_paths = copy_sample_files()
cls.ignore_tags = ('EXIF:CreateDate', 'File:FileModifyDate',
'File:FileAccessDate', 'EXIF:Make', 'Composite:LightValue')
def get_media(self):
for file_path in self.file_paths:
yield file_path, Media(
file_path,
self.src_path,
album_from_folder=True,
cache=False,
ignore_tags=self.ignore_tags,
)
self.exif_data = ExifTool(str(file_path)).asdict()
yield Media(str(file_path), self.ignore_tags)
def test_get_metadata(self, tmp_path):
for file_path, media in self.get_media():
# test get metadata from cache or exif
for root in self.src_path, tmp_path:
media.get_metadata(root)
assert isinstance(media.metadata, dict), media.metadata
# check if all tags key are present
for tags_key, tags in media.tags_keys.items():
assert tags_key in media.metadata
for tag in tags:
for tag_regex in self.ignore_tags:
assert not re.match(tag_regex, tag)
# Check for valid type
for key, value in media.metadata.items():
if value or value == '':
if 'date' in key:
assert isinstance(value, datetime)
elif key in ('latitude', 'longitude'):
assert isinstance(value, float)
else:
assert isinstance(value, str)
def test_get_metadata(self):
for media in self.get_media():
result = media.get_metadata()
assert result
assert isinstance(media.metadata, dict), media.metadata
#check if all tags key are present
for tags_key, tags in media.tags_keys.items():
assert tags_key in media.metadata
for tag in tags:
for tag_regex in self.ignore_tags:
assert not re.match(tag_regex, tag)
# Check for valid type
for key, value in media.metadata.items():
if value or value == '':
if 'date' in key:
assert isinstance(value, datetime)
elif key in ('latitude', 'longitude'):
assert isinstance(value, float)
else:
assert value is None
if key == 'album':
for album in media.get_key_values('album'):
if album is not None and album != '':
assert value == album
break
else:
assert value == file_path.parent.name
# Check if has_exif_data() is True if 'date_original' key is
# present, else check if it's false
has_exif_data = False
for tag in media.tags_keys['date_original']:
if tag in media.exif_metadata:
if media.get_date_format(media.exif_metadata[tag]):
has_exif_data = True
assert media.has_exif_data()
break
if not has_exif_data:
assert not media.has_exif_data()
def test_get_date_media(self):
for file_path in self.file_paths:
exif_data = ExifToolCaching(str(file_path)).asdict()
media = Media(
file_path, self.src_path, use_date_filename=True, use_file_dates=True
)
media.get_metadata(self.src_path)
date_media = media.get_date_media()
date_filename = None
for tag in media.tags_keys['original_name']:
if tag in exif_data:
date_filename, _, _ = get_date_from_string(exif_data[tag])
break
if not date_filename:
date_filename, _, _ = get_date_from_string(file_path.name)
if media.metadata['date_original']:
assert date_media == media.metadata['date_original']
elif date_filename:
assert date_media == date_filename
elif media.metadata['date_created']:
assert date_media == media.metadata['date_created']
elif media.metadata['date_modified']:
assert date_media == media.metadata['date_modified']
elif media.metadata['file_modify_date']:
assert date_media == media.metadata['file_modify_date']
assert isinstance(value, str)
else:
assert value is None
# Check if has_exif_data() is True if 'date_original' key is
# present, else check if it's false
has_exif_data = False
for tag in media.tags_keys['date_original']:
if tag in media.exif_metadata:
if media.get_date_format(media.exif_metadata[tag]):
has_exif_data = True
assert media.has_exif_data()
break
if has_exif_data == False:
assert not media.has_exif_data()
# Will be changed to get_metadata
# check if metatadata type are correct

612
todo.md Executable file
View File

@ -0,0 +1,612 @@
# NOW
# Media:
- rewrite set_date...
# Test:
- finish filesystem
- date_taken
- geolocation
move elodie to dozo
check for early morning photos: add test
add --folder-path option %Y-%d-%m/%city/%album
datetime.today().strftime('%Y-%m-%d')
add %filename
add edit_exif command?
Add update command
# enhancement
- acccept Path in get_exiftool
- Use get_exiftool instead of get metadata:
try to do it in get_date_taken...
media class:
- Add self.file_path
-
## Album form folder
- move to filesystem
# TODO implement album from folder here?
# folder = os.path.basename(os.path.dirname(source))
# album = self.metadata['album']
# if album_from_folder and (album is None or album == ''):
# album = folder
# Update
use pathlib instead of os.path
Allow update in sort command in same dir if path is the dest dir
ENhancement: swap hash db key value: for checking file integrity
https://github.com/JohannesBuchner/imagehash
https://github.com/cw-somil/Duplicate-Remover
https://leons.im/posts/a-python-implementation-of-simhash-algorithm/
Visualy check similar image
https://www.pluralsight.com/guides/importing-image-data-into-numpy-arrays
https://stackoverflow.com/questions/56056054/add-check-boxes-to-scrollable-image-in-python
https://wellsr.com/python/python-image-manipulation-with-pillow-library/
kitty gird image?
https://fr.wikibooks.org/wiki/PyQt/PyQt_versus_wxPython
https://docs.python.org/3/faq/gui.html
https://docs.opencv.org/3.4/d3/df2/tutorial_py_basic_ops.html
https://stackoverflow.com/questions/52727332/python-tkinter-create-checkbox-list-from-listbox
Image gird method:
matplot
https://gist.github.com/lebedov/7018889ba47668c64bcf96aee82caec0
Tkinter
https://python-forum.io/thread-22700.html
https://stackoverflow.com/questions/43326282/how-can-i-use-images-in-a-tkinter-grid
wxwidget
https://wxpython.org/Phoenix/docs/html/wx.lib.agw.thumbnailctrl.html
Ability to change metadata to selection
Enhancement: Option to keep existing directory structure
Fix: change versvalidion number to 0.x99
Fix: README
Refactoring: elodie update: update metadata of destination
Fix: update: fix move files...
Refactoring: Move exiftool config
Checksum:
FIX: test if checksum remain the same for all files (global check)
FIX: if dest file already here and checksum d'ont match change name to
prevent overwriting to file with same dest path
Enhancement: media file, do not filter files, only to prevent error when copying
fix: Valid file: check for open file error
Enhancement: Add %base_name string key
Refactoring: class get_metadata
check if as exiF, check exif type...
Interface: show error and warning
interface: less verbose when no error
interface: Move default setting to config?
Behavior: Move only by defaut without changing metatdata and filename...
Refactoring: check one time media is valid?
Refactoring: Unify source and path
Enhancement: allow nested dir
Fix: check exclusion for file
Refactoring: Import perl as submodule?
Enhancement: # setup arguments to exiftool
https://github.com/andrewning/sortphotos/blob/master/src/sortphotos.py
# AFTER
Enhancement: add walklevel function
Enhancement: change early morning date sort
# TODO
Fix: date, make correction in filename if needed
Check: date from filename
Options:
--update-cache|-u
--date-from-filename
--location --time
# --date from folder
# --date from file
# -f overwrite metadata
Add get tag function
Add --copy alternative
--auto|-a: a set of option: geolocalisation, best match date, rename, album
from folder...
defaut: only move
# --keep-folder option
# --rename
-- no cache mode!!
--confirm unsure operation
--interactive
# TEST
# lat='45.58339'
# lon='4.79823'
# coordinates ='53.480837, -2.244914'
# Alger
# coords=(36.752887, 3.042048)
https://www.gitmemory.com/issue/pallets/click/843/634305917
https://github.com/pallets/click/issues/843
# import unittest
# import pytest
# from thing.__main__ import cli
# class TestCli(unittest.TestCase):
# @pytest.fixture(autouse=True)
# def capsys(self, capsys):
# self.capsys = capsys
# def test_cli(self):
# with pytest.raises(SystemExit) as ex:
# cli(["create", "--name", "test"])
# self.assertEqual(ex.value.code, 0)
# out, err = self.capsys.readouterr()
# self.assertEqual(out, "Succesfully created test\n")
# dev
# mode ~/.elodie ~/.config/elodie
# location selection buggy
# TODO:
# /home/cedric/src/elodie/elodie/media/photo.py(86)get_date_taken()
# 85 # TODO potential bu for old photo below 1970...
# ---> 86 if(seconds_since_epoch == 0):
# 87 return None
import os
def walklevel(some_dir, level=1):
some_dir = some_dir.rstrip(os.path.sep)
assert os.path.isdir(some_dir)
num_sep = some_dir.count(os.path.sep)
for root, dirs, files in os.walk(some_dir):
yield root, dirs, files
num_sep_this = root.count(os.path.sep)
if num_sep + level <= num_sep_this:
del dirs[:]
49/2: y=walklevel('/home/cedric', level=1)
49/3: next(y)
49/4: next(y)
49/5: next(y)
49/6: next(y)
49/7: next(y)
49/8: y=walklevel('/home/cedric', level=0)
49/9: next(y)
49/10: next(y)
49/11: y=walklevel('/home/cedric/.test/Nexcloud/', level=0)
49/12:
import os
def walklevel(some_dir, level=1):
some_dir = some_dir.rstrip(os.path.sep)
assert os.path.isdir(some_dir)
num_sep = some_dir.count(os.path.sep)
for root, dirs, files in os.walk(some_dir):
yield root, dirs, files
num_sep_this = root.count(os.path.sep)
if num_sep + level <= num_sep_this:
print dirs, files
49/13:
import os
def walklevel(some_dir, level=1):
some_dir = some_dir.rstrip(os.path.sep)
assert os.path.isdir(some_dir)
num_sep = some_dir.count(os.path.sep)
for root, dirs, files in os.walk(some_dir):
yield root, dirs, files
num_sep_this = root.count(os.path.sep)
if num_sep + level <= num_sep_this:
print(dirs, files)
49/14: y=walklevel('/home/cedric/.test/Nexcloud/', level=0)
49/15: next(y)
49/16: next(y)
49/17: y=walklevel('/home/cedric/.test/Nexcloud/', level=0)
49/18:
import os
def walklevel(some_dir, level=1):
some_dir = some_dir.rstrip(os.path.sep)
assert os.path.isdir(some_dir)
num_sep = some_dir.count(os.path.sep)
for root, dirs, files in os.walk(some_dir):
yield root, dirs, files
num_sep_this = root.count(os.path.sep)
49/19: y=walklevel('/home/cedric/.test/Nexcloud/', level=0)
49/20: next(y)
49/21: next(y)
49/22: y=walklevel('/home/cedric/.test/Nexcloud/', level=2)
49/23: next(y)
49/24: next(y)
49/25: y=walklevel('/home/cedric/.test/las canarias 2012/', level=2)
49/26: next(y)
49/27: next(y)
49/28: next(y)
49/29: next(y)
49/30: y=walklevel('/home/cedric/.test/las canarias 2012/', level=0)
49/31: next(y)
49/32: next(y)
49/33: next(y)
49/34:
import os
def walklevel(some_dir, level=1):
some_dir = some_dir.rstrip(os.path.sep)
assert os.path.isdir(some_dir)
num_sep = some_dir.count(os.path.sep)
for root, dirs, files in os.walk(some_dir):
yield root, dirs, files
num_sep_this = root.count(os.path.sep)
if num_sep + level <= num_sep_this:
print('fuck')
49/35: y=walklevel('/home/cedric/.test/las canarias 2012/', level=0)
49/36: next(y)
49/37: next(y)
49/38: next(y)
64/1: a=os.walk('/home/cedric/.test/las canarias 2012')
64/2: import os
64/3: a=os.walk('/home/cedric/.test/las canarias 2012')
64/4: next(a)
64/5: next(a)
64/6: os.path.sep
64/7: os.path.relpath('/home/cedric/.test/las canarias 2012/private', 'private')
64/8: os.path.relpath('/home/cedric/.test/las canarias 2012', 'private')
64/9: os.path.relpath('/home/cedric/.test/las canarias 2012/private', '/home/cedric/.test/las canarias 2012')
64/10: b='test'
64/11: a='private'
64/12: a+b
64/13: os.path.join(a,b,b)
64/14: !True
64/15: not True
64/16: a=TRue
64/17: a=True
64/18: not a
77/1:
import os
import requests
def get_location(geotags):
coords = get_coordinates(geotags)
uri = 'https://revgeocode.search.hereapi.com/v1/revgeocode'
headers = {}
params = {
'apiKey': os.environ['API_KEY'],
'at': "%s,%s" % coords,
'lang': 'en-US',
'limit': 1,
}
response = requests.get(uri, headers=headers, params=params)
try:
response.raise_for_status()
return response.json()
except requests.exceptions.HTTPError as e:
print(str(e))
return {}
77/2: cd ~/.test/
77/3: ls
77/4: cd 2021-02-Feb/
77/5: ls
77/6: cd Villeurbanne/
77/7: ls
77/8: ls -l
77/9: exif = get_exif('2021-02-24_09-33-29-20210305_081001_01.mp4')
77/10:
from PIL import Image
def get_exif(filename):
image = Image.open(filename)
image.verify()
return image._getexif()
77/11: exif = get_exif('2021-02-24_09-33-29-20210305_081001_01.mp4')
77/12: ..
77/13: cd ..
77/14: ls
77/15: cd ..
77/16: ls
77/17: cd 2021-03-Mar/
77/18: cd Villeurbanne/
77/19: ls
77/20: exif = get_exif('2021-03-09_09-58-42-img_20210309_105842.jpg')
77/21: exif
77/22:
def get_geotagging(exif):
if not exif:
raise ValueError("No EXIF metadata found")
geotagging = {}
for (idx, tag) in TAGS.items():
if tag == 'GPSInfo':
if idx not in exif:
raise ValueError("No EXIF geotagging found")
for (key, val) in GPSTAGS.items():
if key in exif[idx]:
geotagging[val] = exif[idx][key]
return geotagging
77/23: get_geotagging(exif)
77/24: from PIL.ExifTags import TAGS
77/25:
def get_labeled_exif(exif):
labeled = {}
for (key, val) in exif.items():
labeled[TAGS.get(key)] = val
return labeled
77/26: get_geotagging(exif)
77/27: from PIL.ExifTags import GPSTAGS
77/28: get_geotagging(exif)
77/29: geotags = get_geotagging(exif)
77/30: get_location(geotags)
77/31:
def get_decimal_from_dms(dms, ref):
degrees = dms[0][0] / dms[0][1]
minutes = dms[1][0] / dms[1][1] / 60.0
seconds = dms[2][0] / dms[2][1] / 3600.0
if ref in ['S', 'W']:
degrees = -degrees
minutes = -minutes
seconds = -seconds
return round(degrees + minutes + seconds, 5)
def get_coordinates(geotags):
lat = get_decimal_from_dms(geotags['GPSLatitude'], geotags['GPSLatitudeRef'])
lon = get_decimal_from_dms(geotags['GPSLongitude'], geotags['GPSLongitudeRef'])
return (lat,lon)
77/32: get_geotagging(exif)
77/33: get_location(geotags)
77/34: from geopy.geocoders import Here
78/1: from geopy.geocoders import Here
78/3:
78/4: get_exif
78/5: ls
78/6: cd ~/.test
78/7: ls
78/8: cd 2021-03-Mar/
78/9: ls
78/10: cd Villeurbanne/
78/11: get_exif('2021-03-04_11-50-32-img_20210304_125032.jpg')
78/12: exif=get_exif('2021-03-04_11-50-32-img_20210304_125032.jpg')
78/13: get_geotagging(exif)
78/14:
from PIL.ExifTags import GPSTAGS
def get_geotagging(exif):
if not exif:
raise ValueError("No EXIF metadata found")
geotagging = {}
for (idx, tag) in TAGS.items():
if tag == 'GPSInfo':
if idx not in exif:
raise ValueError("No EXIF geotagging found")
for (key, val) in GPSTAGS.items():
if key in exif[idx]:
geotagging[val] = exif[idx][key]
return geotagging
78/15: geotags = get_geotagging(exif)
78/17: geotags = get_geotagging(exif)
78/18: get_coordinates(geotags)
78/19:
78/23: get_location(geotags)
78/24:
78/25: get_location(geotags)
78/26:
def get_decimal_from_dms(dms, ref):
degrees = dms[0][0] / dms[0][1]
minutes = dms[1][0] / dms[1][1] / 60.0
seconds = dms[2][0] / dms[2][1] / 3600.0
if ref in ['S', 'W']:
degrees = -degrees
minutes = -minutes
seconds = -seconds
return round(degrees + minutes + seconds, 5)
78/27: get_location(geotags)
78/28:
def get_decimal_from_dms(dms, ref):
degrees = dms[0]
minutes = dms[1] / 60.0
seconds = dms[2] / 3600.0
if ref in ['S', 'W']:
degrees = -degrees
minutes = -minutes
seconds = -seconds
return round(degrees + minutes + seconds, 5)
78/29: get_location(geotags)
78/30: exif
78/31: get_geotagging(exif)
78/32: geotags = get_geotagging(exif)
78/33: get_coordinates(geotags)
78/34: geotags = get_geotagging(exif)
78/35: get_location(geotags)
78/36: get_coordinates(geotags)
78/37: coords = get_coordinates(geotags)
78/38: coords
78/39: uri = 'https://revgeocode.search.hereapi.com/v1/revgeocode'
78/40:
headers = {}
params = {
'apiKey': os.environ['API_KEY'],
'at': "%s,%s" % coords,
'lang': 'en-US',
'limit': 1,
}
78/41: headers = {}
78/42:
params = {
'apiKey': os.environ['API_KEY'],
'at': "%s,%s" % coords,
'lang': 'en-US',
'limit': 1,
}
78/43:
params = {
'apiKey': os.environ['API_KEY'],
'at': "%s,%s" % coords,
'lang': 'en-US',
'limit': 1,
}
78/44: API_KEY=m5aGo8xGe4LLhxeKZYpHr2MPXGN2aDhe
78/45: API_KEY='m5aGo8xGe4LLhxeKZYpHr2MPXGN2aDhe'
78/46:
params = {
'apiKey': os.environ['API_KEY'],
'at': "%s,%s" % coords,
'lang': 'en-US',
'limit': 1,
}
78/47: API_KEY='m5aGo8xGe4LLhxeKZYpHr2MPXGN2aDhe'
78/48:
params = {
'apiKey': os.environ['API_KEY'],
'at': "%s,%s" % coords,
'lang': 'en-US',
'limit': 1,
}
78/49:
params = {
'apiKey': os.environ['m5aGo8xGe4LLhxeKZYpHr2MPXGN2aDhe'],
'at': "%s,%s" % coords,
'lang': 'en-US',
'limit': 1,
}
78/50: %load_ext autotime
78/51:
import pandas as pd
import geopandas as gpd
import geopy
from geopy.geocoders import Nominatim
from geopy.extra.rate_limiter import RateLimiterimport matplotlib.pyplot as plt
import plotly_express as pximport tqdm
from tqdm._tqdm_notebook import tqdm_notebook
78/52:
import pandas as pd
import geopandas as gpd
import geopy
from geopy.geocoders import Nominatim
from geopy.extra.rate_limiter import RateLimiterimport matplotlib.pyplot as plt
import plotly_express as px
import pandas as pd
import geopandas as gpd
from PIL import Image
filename='2021-02-24_09-33-29-20210305_081001_01.mp4'
def get_exif(filename):
image = Image.open(filename)
image.verify()
return image._getexif()
exif=get_exif(filename)
from PIL.ExifTags import TAGS
from PIL.ExifTags import GPSTAGS
def get_geotagging(exif):
if not exif:
raise ValueError("No EXIF metadata found")
geotagging = {}
for (idx, tag) in TAGS.items():
if tag == 'GPSInfo':
if idx not in exif:
raise ValueError("No EXIF geotagging found")
for (key, val) in GPSTAGS.items():
if key in exif[idx]:
geotagging[val] = exif[idx][key]
return geotagging
geotags = get_geotagging(exif)
import os
import requests
def get_location(geotags):
coords = get_coordinates(geotags)
uri = 'https://revgeocode.search.hereapi.com/v1/revgeocode'
headers = {}
params = {
'apiKey': os.environ['API_KEY'],
'at': "%s,%s" % coords,
'lang': 'en-US',
'limit': 1,
}
response = requests.get(uri, headers=headers, params=params)
try:
response.raise_for_status()
return response.json()
except requests.exceptions.HTTPError as e:
print(str(e))
return {}
def get_coordinates(geotags):
lat = get_decimal_from_dms(geotags['GPSLatitude'], geotags['GPSLatitudeRef'])
lon = get_decimal_from_dms(geotags['GPSLongitude'], geotags['GPSLongitudeRef'])
return (lat,lon)
coords = get_coordinates(geotags)
import geopy
from geopy.geocoders import Nominatim
locator = Nominatim(user_agent='myGeocoder')
# coordinates ='53.480837, -2.244914'
lat='45.58339'
lon='4.79823'
coords = lat + ',' + lon
locator.reverse(coords)
location =locator.reverse(coords)
location.address.split(',')
city=location.address.split(',')[1].strip()
country=location.address.split(',')[7].strip()
location.raw
rint
country=location.raw['address']['country']
city=location.raw['address']['village']