2015-10-05 08:36:06 +02:00
|
|
|
"""
|
2016-01-08 23:49:06 +01:00
|
|
|
General file system methods.
|
|
|
|
|
|
|
|
.. moduleauthor:: Jaisen Mathai <jaisen@jmathai.com>
|
2015-10-05 08:36:06 +02:00
|
|
|
"""
|
2016-03-12 20:09:28 +01:00
|
|
|
from __future__ import print_function
|
|
|
|
from builtins import object
|
2016-01-08 23:49:06 +01:00
|
|
|
|
2015-10-02 09:20:27 +02:00
|
|
|
import os
|
2015-10-06 10:28:00 +02:00
|
|
|
import re
|
2015-10-14 05:26:55 +02:00
|
|
|
import shutil
|
2015-10-02 09:20:27 +02:00
|
|
|
import time
|
|
|
|
|
2017-01-22 06:21:03 +01:00
|
|
|
from elodie import compatability
|
2015-10-08 11:22:30 +02:00
|
|
|
from elodie import geolocation
|
2016-11-09 07:41:00 +01:00
|
|
|
from elodie import log
|
2017-01-03 05:58:52 +01:00
|
|
|
from elodie.config import load_config
|
2015-10-14 05:26:55 +02:00
|
|
|
from elodie.localstorage import Db
|
2016-12-20 06:12:36 +01:00
|
|
|
from elodie.media.base import Base, get_all_subclasses
|
2015-10-08 11:22:30 +02:00
|
|
|
|
2016-01-02 08:23:06 +01:00
|
|
|
|
2016-01-08 23:49:06 +01:00
|
|
|
class FileSystem(object):
|
|
|
|
"""A class for interacting with the file system."""
|
2015-10-06 10:28:00 +02:00
|
|
|
|
2017-01-03 05:58:52 +01:00
|
|
|
def __init__(self):
|
|
|
|
# The default folder path is along the lines of 2015-01-Jan/Chicago
|
2017-04-13 08:33:18 +02:00
|
|
|
self.default_folder_path_definition = {
|
|
|
|
'date': '%Y-%m-%b',
|
|
|
|
'location': '%city',
|
|
|
|
'full_path': '%date/%album|%location|"{}"'.format(
|
|
|
|
geolocation.__DEFAULT_LOCATION__
|
|
|
|
),
|
|
|
|
}
|
2017-01-03 05:58:52 +01:00
|
|
|
self.cached_folder_path_definition = None
|
2017-04-13 08:33:18 +02:00
|
|
|
self.default_parts = ['album', 'city', 'state', 'country']
|
2017-01-03 05:58:52 +01:00
|
|
|
|
2015-10-06 10:28:00 +02:00
|
|
|
def create_directory(self, directory_path):
|
2016-01-08 23:49:06 +01:00
|
|
|
"""Create a directory if it does not already exist.
|
|
|
|
|
|
|
|
:param str directory_name: A fully qualified path of the
|
|
|
|
to create.
|
|
|
|
:returns: bool
|
|
|
|
"""
|
2015-12-08 06:33:59 +01:00
|
|
|
try:
|
|
|
|
if os.path.exists(directory_path):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
os.makedirs(directory_path)
|
|
|
|
return True
|
|
|
|
except OSError:
|
|
|
|
# OSError is thrown for cases like no permission
|
|
|
|
pass
|
|
|
|
|
|
|
|
return False
|
2015-10-06 10:28:00 +02:00
|
|
|
|
2015-10-14 05:26:55 +02:00
|
|
|
def delete_directory_if_empty(self, directory_path):
|
2016-01-08 23:49:06 +01:00
|
|
|
"""Delete a directory only if it's empty.
|
|
|
|
|
|
|
|
Instead of checking first using `len([name for name in
|
|
|
|
os.listdir(directory_path)]) == 0`, we catch the OSError exception.
|
|
|
|
|
|
|
|
:param str directory_name: A fully qualified path of the directory
|
|
|
|
to delete.
|
|
|
|
"""
|
2015-10-14 05:26:55 +02:00
|
|
|
try:
|
|
|
|
os.rmdir(directory_path)
|
2015-12-09 08:40:01 +01:00
|
|
|
return True
|
2015-10-14 05:26:55 +02:00
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
2015-12-09 08:40:01 +01:00
|
|
|
return False
|
|
|
|
|
2015-10-05 08:36:06 +02:00
|
|
|
def get_all_files(self, path, extensions=None):
|
2016-01-08 23:49:06 +01:00
|
|
|
"""Recursively get all files which match a path and extension.
|
|
|
|
|
|
|
|
:param str path string: Path to start recursive file listing
|
|
|
|
:param tuple(str) extensions: File extensions to include (whitelist)
|
2016-12-20 06:12:36 +01:00
|
|
|
:returns: generator
|
2016-01-08 23:49:06 +01:00
|
|
|
"""
|
2016-12-20 06:12:36 +01:00
|
|
|
# If extensions is None then we get all supported extensions
|
|
|
|
if not extensions:
|
|
|
|
extensions = set()
|
|
|
|
subclasses = get_all_subclasses(Base)
|
|
|
|
for cls in subclasses:
|
|
|
|
extensions.update(cls.extensions)
|
|
|
|
|
2015-10-05 08:36:06 +02:00
|
|
|
for dirname, dirnames, filenames in os.walk(path):
|
2015-10-05 08:10:46 +02:00
|
|
|
for filename in filenames:
|
2016-12-20 06:12:36 +01:00
|
|
|
# If file extension is in `extensions` then append to the list
|
|
|
|
if os.path.splitext(filename)[1][1:].lower() in extensions:
|
|
|
|
yield os.path.join(dirname, filename)
|
2015-10-05 08:10:46 +02:00
|
|
|
|
2015-10-02 09:20:27 +02:00
|
|
|
def get_current_directory(self):
|
2016-01-08 23:49:06 +01:00
|
|
|
"""Get the current working directory.
|
2015-10-02 09:20:27 +02:00
|
|
|
|
2016-01-08 23:49:06 +01:00
|
|
|
:returns: str
|
|
|
|
"""
|
|
|
|
return os.getcwd()
|
2015-10-05 08:36:06 +02:00
|
|
|
|
2015-10-28 08:19:21 +01:00
|
|
|
def get_file_name(self, media):
|
2016-01-08 23:49:06 +01:00
|
|
|
"""Generate file name for a photo or video using its metadata.
|
|
|
|
|
|
|
|
We use an ISO8601-like format for the file name prefix. Instead of
|
|
|
|
colons as the separator for hours, minutes and seconds we use a hyphen.
|
|
|
|
https://en.wikipedia.org/wiki/ISO_8601#General_principles
|
|
|
|
|
|
|
|
:param media: A Photo or Video instance
|
|
|
|
:type media: :class:`~elodie.media.photo.Photo` or
|
|
|
|
:class:`~elodie.media.video.Video`
|
|
|
|
:returns: str or None for non-photo or non-videos
|
|
|
|
"""
|
2015-10-28 08:19:21 +01:00
|
|
|
if(not media.is_valid()):
|
2015-10-02 09:20:27 +02:00
|
|
|
return None
|
|
|
|
|
2015-10-28 08:19:21 +01:00
|
|
|
metadata = media.get_metadata()
|
2016-01-02 08:23:06 +01:00
|
|
|
if(metadata is None):
|
2015-10-02 09:20:27 +02:00
|
|
|
return None
|
|
|
|
|
2017-03-17 07:43:47 +01:00
|
|
|
# First we check if we have metadata['original_name'].
|
|
|
|
# We have to do this for backwards compatibility because
|
|
|
|
# we original did not store this back into EXIF.
|
2017-03-30 16:13:34 +02:00
|
|
|
if('original_name' in metadata and metadata['original_name']):
|
2017-03-17 07:43:47 +01:00
|
|
|
base_name = os.path.splitext(metadata['original_name'])[0]
|
|
|
|
else:
|
|
|
|
# If the file has EXIF title we use that in the file name
|
|
|
|
# (i.e. my-favorite-photo-img_1234.jpg)
|
|
|
|
# We want to remove the date prefix we add to the name.
|
|
|
|
# This helps when re-running the program on file which were already
|
|
|
|
# processed.
|
|
|
|
base_name = re.sub(
|
|
|
|
'^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}-',
|
|
|
|
'',
|
|
|
|
metadata['base_name']
|
|
|
|
)
|
|
|
|
if(len(base_name) == 0):
|
|
|
|
base_name = metadata['base_name']
|
2016-01-02 08:23:06 +01:00
|
|
|
|
|
|
|
if(
|
|
|
|
'title' in metadata and
|
|
|
|
metadata['title'] is not None and
|
|
|
|
len(metadata['title']) > 0
|
|
|
|
):
|
2015-10-28 08:19:21 +01:00
|
|
|
title_sanitized = re.sub('\W+', '-', metadata['title'].strip())
|
2015-11-03 10:38:53 +01:00
|
|
|
base_name = base_name.replace('-%s' % title_sanitized, '')
|
2015-10-29 09:12:52 +01:00
|
|
|
base_name = '%s-%s' % (base_name, title_sanitized)
|
2015-10-13 04:39:58 +02:00
|
|
|
|
2016-01-02 08:23:06 +01:00
|
|
|
file_name = '%s-%s.%s' % (
|
2016-01-02 18:34:43 +01:00
|
|
|
time.strftime(
|
|
|
|
'%Y-%m-%d_%H-%M-%S',
|
|
|
|
metadata['date_taken']
|
|
|
|
),
|
|
|
|
base_name,
|
|
|
|
metadata['extension'])
|
2015-10-07 07:35:11 +02:00
|
|
|
return file_name.lower()
|
2015-10-02 09:20:27 +02:00
|
|
|
|
2017-01-03 05:58:52 +01:00
|
|
|
def get_folder_path_definition(self):
|
2017-04-13 08:33:18 +02:00
|
|
|
"""Returns a list of folder definitions.
|
|
|
|
|
|
|
|
Each element in the list represents a folder.
|
|
|
|
Fallback folders are supported and are nested lists.
|
|
|
|
Return values take the following form.
|
|
|
|
[
|
|
|
|
('date', '%Y-%m-%d'),
|
|
|
|
[
|
|
|
|
('location', '%city'),
|
|
|
|
('album', ''),
|
|
|
|
('"Unknown Location", '')
|
|
|
|
]
|
|
|
|
]
|
|
|
|
|
|
|
|
:returns: list
|
|
|
|
"""
|
2017-01-03 05:58:52 +01:00
|
|
|
# If we've done this already then return it immediately without
|
|
|
|
# incurring any extra work
|
|
|
|
if self.cached_folder_path_definition is not None:
|
|
|
|
return self.cached_folder_path_definition
|
2015-10-02 09:20:27 +02:00
|
|
|
|
2017-01-03 05:58:52 +01:00
|
|
|
config = load_config()
|
|
|
|
|
|
|
|
# If Directory is in the config we assume full_path and its
|
|
|
|
# corresponding values (date, location) are also present
|
2017-04-13 08:33:18 +02:00
|
|
|
config_directory = self.default_folder_path_definition
|
|
|
|
if('Directory' in config):
|
|
|
|
config_directory = config['Directory']
|
2017-01-03 05:58:52 +01:00
|
|
|
|
2017-03-30 16:13:34 +02:00
|
|
|
# Find all subpatterns of full_path that map to directories.
|
|
|
|
# I.e. %foo/%bar => ['foo', 'bar']
|
2017-04-13 08:33:18 +02:00
|
|
|
# I.e. %foo/%bar|%example|"something" => ['foo', 'bar|example|"something"']
|
2017-03-30 16:13:34 +02:00
|
|
|
path_parts = re.findall(
|
2017-04-13 08:33:18 +02:00
|
|
|
'(\%[^/]+)',
|
2017-01-03 05:58:52 +01:00
|
|
|
config_directory['full_path']
|
|
|
|
)
|
|
|
|
|
2017-03-30 16:13:34 +02:00
|
|
|
if not path_parts or len(path_parts) == 0:
|
2017-01-03 05:58:52 +01:00
|
|
|
return self.default_folder_path_definition
|
|
|
|
|
2017-04-13 08:33:18 +02:00
|
|
|
self.cached_folder_path_definition = []
|
|
|
|
for part in path_parts:
|
|
|
|
if part in config_directory:
|
|
|
|
part = part[1:]
|
|
|
|
self.cached_folder_path_definition.append(
|
|
|
|
[(part, config_directory[part])]
|
|
|
|
)
|
|
|
|
elif part in self.default_parts:
|
|
|
|
part = part[1:]
|
|
|
|
self.cached_folder_path_definition.append(
|
|
|
|
[(part, '')]
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
this_part = []
|
|
|
|
for p in part.split('|'):
|
|
|
|
p = p[1:]
|
|
|
|
this_part.append(
|
|
|
|
(p, config_directory[p] if p in config_directory else '')
|
|
|
|
)
|
|
|
|
self.cached_folder_path_definition.append(this_part)
|
|
|
|
|
2017-01-03 05:58:52 +01:00
|
|
|
return self.cached_folder_path_definition
|
2015-10-08 11:22:30 +02:00
|
|
|
|
2015-10-14 09:39:30 +02:00
|
|
|
def get_folder_path(self, metadata):
|
2017-04-13 08:33:18 +02:00
|
|
|
"""Given a media's metadata this function returns the folder path as a string.
|
2016-01-08 23:49:06 +01:00
|
|
|
|
2017-01-03 05:58:52 +01:00
|
|
|
:param metadata dict: Metadata dictionary.
|
2016-01-08 23:49:06 +01:00
|
|
|
:returns: str
|
|
|
|
"""
|
2017-01-03 05:58:52 +01:00
|
|
|
path_parts = self.get_folder_path_definition()
|
2015-10-08 11:22:30 +02:00
|
|
|
path = []
|
2017-01-03 05:58:52 +01:00
|
|
|
for path_part in path_parts:
|
2017-04-13 08:33:18 +02:00
|
|
|
# We support fallback values so that
|
|
|
|
# 'album|city|"Unknown Location"
|
|
|
|
# %album|%city|"Unknown Location" results in
|
|
|
|
# My Album - when an album exists
|
|
|
|
# Sunnyvale - when no album exists but a city exists
|
|
|
|
# Unknown Location - when neither an album nor location exist
|
|
|
|
for this_part in path_part:
|
|
|
|
part, mask = this_part
|
|
|
|
if part in ('date', 'day', 'month', 'year'):
|
|
|
|
path.append(
|
|
|
|
time.strftime(mask, metadata['date_taken'])
|
|
|
|
)
|
|
|
|
break
|
|
|
|
elif part in ('location', 'city', 'state', 'country'):
|
|
|
|
place_name = geolocation.place_name(
|
|
|
|
metadata['latitude'],
|
|
|
|
metadata['longitude']
|
|
|
|
)
|
|
|
|
|
|
|
|
location_parts = re.findall('(%[^%]+)', mask)
|
|
|
|
parsed_folder_name = self.parse_mask_for_location(
|
|
|
|
mask,
|
|
|
|
location_parts,
|
|
|
|
place_name,
|
|
|
|
)
|
|
|
|
path.append(parsed_folder_name)
|
|
|
|
break
|
|
|
|
elif part in ('album'):
|
|
|
|
if metadata['album']:
|
|
|
|
path.append(metadata['album'])
|
|
|
|
break
|
|
|
|
elif part.startswith('"') and part.endswith('"'):
|
|
|
|
path.append(part[1:-1])
|
2015-10-08 11:22:30 +02:00
|
|
|
|
2016-01-26 20:01:05 +01:00
|
|
|
return os.path.join(*path)
|
2015-10-08 11:22:30 +02:00
|
|
|
|
2017-01-03 05:58:52 +01:00
|
|
|
def parse_mask_for_location(self, mask, location_parts, place_name):
|
|
|
|
"""Takes a mask for a location and interpolates the actual place names.
|
|
|
|
|
|
|
|
Given these parameters here are the outputs.
|
|
|
|
|
|
|
|
mask=%city
|
|
|
|
location_parts=[('%city','%city','city')]
|
|
|
|
place_name={'city': u'Sunnyvale'}
|
|
|
|
output=Sunnyvale
|
|
|
|
|
|
|
|
mask=%city-%state
|
|
|
|
location_parts=[('%city-','%city','city'), ('%state','%state','state')]
|
|
|
|
place_name={'city': u'Sunnyvale', 'state': u'California'}
|
|
|
|
output=Sunnyvale-California
|
|
|
|
|
|
|
|
mask=%country
|
|
|
|
location_parts=[('%country','%country','country')]
|
|
|
|
place_name={'default': u'Sunnyvale', 'city': u'Sunnyvale'}
|
|
|
|
output=Sunnyvale
|
|
|
|
|
|
|
|
|
|
|
|
:param str mask: The location mask in the form of %city-%state, etc
|
|
|
|
:param list location_parts: A list of tuples in the form of
|
|
|
|
[('%city-', '%city', 'city'), ('%state', '%state', 'state')]
|
|
|
|
:param dict place_name: A dictionary of place keywords and names like
|
|
|
|
{'default': u'California', 'state': u'California'}
|
|
|
|
:returns: str
|
|
|
|
"""
|
|
|
|
found = False
|
|
|
|
folder_name = mask
|
|
|
|
for loc_part in location_parts:
|
|
|
|
# We assume the search returns a tuple of length 2.
|
|
|
|
# If not then it's a bad mask in config.ini.
|
|
|
|
# loc_part = '%country-random'
|
|
|
|
# component_full = '%country-random'
|
|
|
|
# component = '%country'
|
|
|
|
# key = 'country
|
|
|
|
component_full, component, key = re.search(
|
|
|
|
'((%([a-z]+))[^%]*)',
|
|
|
|
loc_part
|
|
|
|
).groups()
|
|
|
|
|
|
|
|
if(key in place_name):
|
|
|
|
found = True
|
|
|
|
replace_target = component
|
|
|
|
replace_with = place_name[key]
|
|
|
|
else:
|
|
|
|
replace_target = component_full
|
|
|
|
replace_with = ''
|
|
|
|
|
|
|
|
folder_name = folder_name.replace(
|
|
|
|
replace_target,
|
|
|
|
replace_with,
|
|
|
|
)
|
|
|
|
|
|
|
|
if(not found and folder_name == ''):
|
|
|
|
folder_name = place_name['default']
|
|
|
|
|
|
|
|
return folder_name
|
|
|
|
|
2015-10-14 05:26:55 +02:00
|
|
|
def process_file(self, _file, destination, media, **kwargs):
|
|
|
|
move = False
|
|
|
|
if('move' in kwargs):
|
|
|
|
move = kwargs['move']
|
|
|
|
|
2016-01-02 18:34:43 +01:00
|
|
|
allow_duplicate = False
|
2015-10-14 05:26:55 +02:00
|
|
|
if('allowDuplicate' in kwargs):
|
2016-01-02 18:34:43 +01:00
|
|
|
allow_duplicate = kwargs['allowDuplicate']
|
2015-10-14 05:26:55 +02:00
|
|
|
|
2016-02-14 09:27:06 +01:00
|
|
|
if(not media.is_valid()):
|
2016-03-12 20:09:28 +01:00
|
|
|
print('%s is not a valid media file. Skipping...' % _file)
|
2016-02-14 09:27:06 +01:00
|
|
|
return
|
|
|
|
|
2017-01-22 09:19:44 +01:00
|
|
|
media.set_original_name()
|
2015-10-14 05:26:55 +02:00
|
|
|
metadata = media.get_metadata()
|
|
|
|
|
2015-10-14 09:39:30 +02:00
|
|
|
directory_name = self.get_folder_path(metadata)
|
2015-10-14 05:26:55 +02:00
|
|
|
|
2016-01-26 20:01:05 +01:00
|
|
|
dest_directory = os.path.join(destination, directory_name)
|
2015-10-14 05:26:55 +02:00
|
|
|
file_name = self.get_file_name(media)
|
2016-01-26 20:01:05 +01:00
|
|
|
dest_path = os.path.join(dest_directory, file_name)
|
2015-10-14 05:26:55 +02:00
|
|
|
|
|
|
|
db = Db()
|
|
|
|
checksum = db.checksum(_file)
|
2016-01-02 08:23:06 +01:00
|
|
|
if(checksum is None):
|
2016-11-09 07:41:00 +01:00
|
|
|
log.info('Could not get checksum for %s. Skipping...' % _file)
|
2015-10-14 05:26:55 +02:00
|
|
|
return
|
|
|
|
|
2016-11-08 05:34:25 +01:00
|
|
|
# If duplicates are not allowed then we check if we've seen this file
|
|
|
|
# before via checksum. We also check that the file exists at the
|
|
|
|
# location we believe it to be.
|
|
|
|
# If we find a checksum match but the file doesn't exist where we
|
|
|
|
# believe it to be then we write a debug log and proceed to import.
|
|
|
|
checksum_file = db.get_hash(checksum)
|
|
|
|
if(allow_duplicate is False and checksum_file is not None):
|
|
|
|
if(os.path.isfile(checksum_file)):
|
2016-11-09 07:41:00 +01:00
|
|
|
log.info('%s already exists at %s. Skipping...' % (
|
|
|
|
_file,
|
|
|
|
checksum_file
|
|
|
|
))
|
2016-11-08 05:34:25 +01:00
|
|
|
return
|
|
|
|
else:
|
2016-11-09 07:41:00 +01:00
|
|
|
log.info('%s matched checksum but file not found at %s. Importing again...' % ( # noqa
|
|
|
|
_file,
|
|
|
|
checksum_file
|
|
|
|
))
|
2015-10-14 05:26:55 +02:00
|
|
|
|
|
|
|
self.create_directory(dest_directory)
|
|
|
|
|
2016-01-02 08:23:06 +01:00
|
|
|
if(move is True):
|
2015-11-30 07:01:27 +01:00
|
|
|
stat = os.stat(_file)
|
2015-10-14 05:26:55 +02:00
|
|
|
shutil.move(_file, dest_path)
|
2015-11-30 07:01:27 +01:00
|
|
|
os.utime(dest_path, (stat.st_atime, stat.st_mtime))
|
2015-10-14 05:26:55 +02:00
|
|
|
else:
|
2017-01-22 06:21:03 +01:00
|
|
|
compatability._copyfile(_file, dest_path)
|
2016-10-22 02:09:26 +02:00
|
|
|
self.set_utime(media)
|
2015-10-14 05:26:55 +02:00
|
|
|
|
|
|
|
db.add_hash(checksum, dest_path)
|
|
|
|
db.update_hash_db()
|
|
|
|
|
|
|
|
return dest_path
|
|
|
|
|
2016-10-22 02:09:26 +02:00
|
|
|
def set_utime(self, media):
|
2017-03-30 16:13:34 +02:00
|
|
|
""" Set the modification time on the file based on the file name.
|
2016-10-09 23:12:16 +02:00
|
|
|
"""
|
2015-10-06 10:28:00 +02:00
|
|
|
|
|
|
|
# Initialize date taken to what's returned from the metadata function.
|
2016-01-02 08:23:06 +01:00
|
|
|
# If the folder and file name follow a time format of
|
2016-10-22 02:09:26 +02:00
|
|
|
# YYYY-MM-DD_HH-MM-SS-IMG_0001.JPG then we override the date_taken
|
|
|
|
file_path = media.get_file_path()
|
|
|
|
metadata = media.get_metadata()
|
|
|
|
date_taken = metadata['date_taken']
|
|
|
|
base_name = metadata['base_name']
|
|
|
|
year_month_day_match = re.search(
|
|
|
|
'^(\d{4})-(\d{2})-(\d{2})_(\d{2})-(\d{2})-(\d{2})',
|
|
|
|
base_name
|
2016-01-02 08:23:06 +01:00
|
|
|
)
|
2016-10-22 02:09:26 +02:00
|
|
|
if(year_month_day_match is not None):
|
|
|
|
(year, month, day, hour, minute, second) = year_month_day_match.groups() # noqa
|
|
|
|
date_taken = time.strptime(
|
|
|
|
'{}-{}-{} {}:{}:{}'.format(year, month, day, hour, minute, second), # noqa
|
|
|
|
'%Y-%m-%d %H:%M:%S'
|
|
|
|
)
|
2016-01-02 08:23:06 +01:00
|
|
|
|
2016-10-22 02:09:26 +02:00
|
|
|
os.utime(file_path, (time.time(), time.mktime(date_taken)))
|
|
|
|
else:
|
|
|
|
# We don't make any assumptions about time zones and
|
|
|
|
# assume local time zone.
|
|
|
|
date_taken_in_seconds = time.mktime(date_taken)
|
|
|
|
os.utime(file_path, (time.time(), (date_taken_in_seconds)))
|