Skip to content
This repository has been archived by the owner on Oct 4, 2024. It is now read-only.

Commit

Permalink
improve album links (and fix broken test credentials)
Browse files Browse the repository at this point in the history
Make album file links unique names
handle album folder name changing because its dates changed
  • Loading branch information
gilesknap committed Feb 25, 2019
1 parent 1cbda53 commit 6a0d040
Show file tree
Hide file tree
Showing 10 changed files with 144 additions and 63 deletions.
10 changes: 5 additions & 5 deletions gphotos/GoogleAlbumsRow.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#!/usr/bin/env python3
# coding: utf8
from typing import TypeVar
from pathlib import Path
from datetime import datetime
from gphotos import Utils
from gphotos.DbRow import DbRow
Expand All @@ -24,15 +23,15 @@ class GoogleAlbumsRow(DbRow):
table = "Albums"
cols_def = {'RemoteId': str, 'AlbumName': str, 'Size': int,
'StartDate': datetime,
'EndDate': datetime, 'SyncDate': datetime}
'EndDate': datetime, 'SyncDate': datetime,
'Downloaded': bool}

# todo - overloading GoogleAlbumsRow as a Database Row does not really work
def to_media(self) -> DatabaseMedia:
db_media = DatabaseMedia(
_id=self.RemoteId,
_filename=self.AlbumName,
_size=self.Size,
_create_date=self.StartDate)
_create_date=self.EndDate)
return db_media

@classmethod
Expand All @@ -48,5 +47,6 @@ def from_parm(cls, album_id, filename, size, start, end) -> G:
StartDate=start,
EndDate=end,
SyncDate=Utils.date_to_string(
datetime.now()))
datetime.now()),
Downloaded=0)
return new_row
83 changes: 52 additions & 31 deletions gphotos/GoogleAlbumsSync.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ def __init__(self, api: RestClient, root_folder: Path, db: LocalData,
db: local database for indexing
"""
self._root_folder: Path = root_folder
self._links_root = self._root_folder / 'albums'
self._db: LocalData = db
self._api: RestClient = api
self.flush = flush
Expand Down Expand Up @@ -59,7 +60,6 @@ def fetch_album_contents(self, album_id: str) -> (datetime, datetime):
media_item = GooglePhotosMedia(media_item_json)
log.debug('----%s', media_item.filename)
self._db.put_album_file(album_id, media_item.id)
self._db.put_album_file(album_id, media_item.id)
last_date = max(media_item.create_date, last_date)
first_date = min(media_item.create_date, first_date)
next_page = items_json.get('nextPageToken')
Expand Down Expand Up @@ -107,6 +107,16 @@ def index_album_media(self):
first_date, last_date)
self._db.put_row(gar, update=indexed_album)

# re-indexing means the local links are out of date: remove
# links in preparation for create_album_content_links
if indexed_album:
old_album_folder = self.album_folder_name(
indexed_album.filename, indexed_album.create_date)
if old_album_folder.exists():
log.debug('removing previous album folder %s',
old_album_folder)
shutil.rmtree(old_album_folder)

next_page = results.get('nextPageToken')
if next_page:
response = self._api.albums.list.execute(pageSize=50,
Expand All @@ -115,42 +125,53 @@ def index_album_media(self):
break
log.warning('Indexed %d Albums', count)

def album_folder_name(self, album_name: str, end_date: datetime) -> Path:
year = Utils.safe_str_time(end_date, '%Y')
month = Utils.safe_str_time(end_date, '%m%d')

rel_path = u"{0} {1}".format(month, album_name)
link_folder: Path = self._links_root / year / rel_path
return link_folder

def create_album_content_links(self):
log.warning("Creating album folder links to media ...")
count = 0
links_root = self._root_folder / 'albums'
if links_root.exists() and self.flush:
album_item = 0
current_rid = ''
if self._links_root.exists() and self.flush:
log.debug('removing previous album links tree')
shutil.rmtree(links_root)

for (path, file_name, album_name, end_date) in \
self._db.get_album_files():
shutil.rmtree(self._links_root)
re_download = not self._links_root.exists()

for (path, file_name, album_name, end_date_str, rid) in \
self._db.get_album_files(download_again=re_download):
if current_rid == rid:
album_item += 1
else:
self._db.put_album_downloaded(rid)
current_rid = rid
album_item = 0
end_date = Utils.string_to_date(end_date_str)
full_file_name = self._root_folder / path / file_name

year = Utils.safe_str_time(Utils.string_to_date(end_date), '%Y')
month = Utils.safe_str_time(Utils.string_to_date(end_date), '%m%d')

rel_path = u"{0} {1}".format(month, album_name)
link_folder: Path = links_root / year / rel_path
link_file = link_folder / file_name
if link_file.exists():
log.debug('album link exists: %s', link_file)
else:
# incredibly, pathlib.Path.relative_to cannot handle
# '../' in a relative path !!! reverting to os.path for this.
relative_filename = os.path.relpath(full_file_name,
str(link_folder))
log.debug('adding album link %s -> %s', relative_filename,
link_file)
try:
if not link_folder.is_dir():
log.debug('new album folder %s', link_folder)
link_folder.mkdir(parents=True)

link_file.symlink_to(relative_filename)
count += 1
except FileExistsError:
log.error('bad link to %s', full_file_name)
link_folder: Path = self.album_folder_name(album_name, end_date)

link_file = link_folder / "{:04d}_{}".format(album_item, file_name)
# incredibly, pathlib.Path.relative_to cannot handle
# '../' in a relative path !!! reverting to os.path
relative_filename = os.path.relpath(full_file_name,
str(link_folder))
log.debug('adding album link %s -> %s', relative_filename,
link_file)
try:
if not link_folder.is_dir():
log.debug('new album folder %s', link_folder)
link_folder.mkdir(parents=True)

link_file.symlink_to(relative_filename)
count += 1
except FileExistsError:
log.error('bad link to %s', full_file_name)

log.warning("Created %d new album folder links", count)

35 changes: 26 additions & 9 deletions gphotos/LocalData.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
class LocalData:
DB_FILE_NAME: str = 'gphotos.sqlite'
BLOCK_SIZE: int = 10000
VERSION: float = 5.4
VERSION: float = 5.5

def __init__(self, root_folder: Path, flush_index: bool = False):
""" Initialize a connection to the DB and create some cursors.
Expand Down Expand Up @@ -268,7 +268,12 @@ def get_album(self, album_id: str) -> DatabaseMedia:
res = self.cur.fetchone()
return GoogleAlbumsRow(res).to_media()

def get_album_files(self, album_id: str = '%'
def put_album_downloaded(self, album_id: str, downloaded: bool = True):
self.cur.execute(
"UPDATE Albums SET Downloaded=? "
"WHERE RemoteId IS ?;", (downloaded, album_id))

def get_album_files(self, album_id: str = '%', download_again: bool = False
) -> (str, str, str, str):
""" Join the Albums, SyncFiles and AlbumFiles tables to get a list
of the files in an album or all albums.
Expand All @@ -279,13 +284,19 @@ def get_album_files(self, album_id: str = '%'
A tuple containing:
Path, Filename, AlbumName, Album end date
"""
self.cur.execute(
"SELECT SyncFiles.Path, SyncFiles.Filename, Albums.AlbumName, "
"Albums.EndDate FROM AlbumFiles "
"INNER JOIN SyncFiles ON AlbumFiles.DriveRec=SyncFiles.RemoteId "
"INNER JOIN Albums ON AlbumFiles.AlbumRec=Albums.RemoteId "
"WHERE Albums.RemoteId LIKE ?;",
(album_id,))

extra_clauses = '' if download_again else 'AND Albums.Downloaded==0'

query = """
SELECT SyncFiles.Path, SyncFiles.Filename, Albums.AlbumName,
Albums.EndDate, Albums.RemoteId FROM AlbumFiles
INNER JOIN SyncFiles ON AlbumFiles.DriveRec=SyncFiles.RemoteId
INNER JOIN Albums ON AlbumFiles.AlbumRec=Albums.RemoteId
WHERE Albums.RemoteId LIKE ?
{}
ORDER BY AlbumName, SyncFiles.CreateDate;""".format(extra_clauses)

self.cur.execute(query, (album_id,))
results = self.cur.fetchall()
# fetchall does not need to use cur2
for result in results:
Expand All @@ -300,10 +311,12 @@ def put_album_file(self, album_rec: str, file_rec: str):
"?) ;",
(album_rec, file_rec))


def remove_all_album_files(self):
# noinspection SqlWithoutWhere
self.cur.execute("DELETE FROM AlbumFiles")


# ---- LocalFiles Queries -------------------------------------------

def get_missing_paths(self):
Expand All @@ -317,6 +330,7 @@ def get_missing_paths(self):
pth = Path(r.relative_path.parent / r.filename)
yield pth


def get_duplicates(self):
self.cur2.execute(Queries.duplicate_files)
while True:
Expand All @@ -328,6 +342,7 @@ def get_duplicates(self):
pth = r.relative_path.parent / r.filename
yield r.id, pth


def get_extra_paths(self):
self.cur2.execute(Queries.extra_files)
while True:
Expand All @@ -339,13 +354,15 @@ def get_extra_paths(self):
pth = r.relative_path.parent / r.filename
yield pth


def local_exists(self, file_name: str, path: str):
self.cur.execute(
"SELECT COUNT() FROM main.LocalFiles WHERE FileName = ?"
"AND PATH = ?;", (file_name, path))
result = int(self.cur.fetchone()[0])
return result


def find_local_matches(self):
# noinspection SqlWithoutWhere
for q in Queries.match:
Expand Down
2 changes: 1 addition & 1 deletion gphotos/Main.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def setup(self, args: Namespace, db_path: Path):
self.google_photos_client, root_folder, self.data_store)
self.google_albums_sync = GoogleAlbumsSync(
self.google_photos_client, root_folder, self.data_store,
args.flush_index)
args.flush_index or args.retry_download)
if args.compare_folder:
self.local_files_scan = LocalFilesScan(
root_folder, compare_folder, self.data_store)
Expand Down
1 change: 0 additions & 1 deletion gphotos/authorize.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from urllib3.util.retry import Retry
from typing import List, Optional

# from yaml import safe_load, safe_dump, YAMLError
from json import load, dump, JSONDecodeError
import logging

Expand Down
3 changes: 2 additions & 1 deletion gphotos/sql/gphotos_create.sql
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ create table Albums
Description TEXT,
StartDate INT,
EndDate INT,
SyncDate INT
SyncDate INT,
Downloaded INT DEFAULT 0
)
;
DROP INDEX IF EXISTS Albums_RemoteId_uindex;
Expand Down
2 changes: 1 addition & 1 deletion test/test_credentials/.gphotos.token
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"access_token": "ya29.Gly6Bnafi7sFrUvsRPk1AI2gflhbzj46RSKXDeXInYFJwUSNxapvVAMKowNV9UdeV7yXBzXxR3Rn-jAiRjKDbQ0APedqaXFkf1c70zdzE1HrBKlBrtl1t7moKcr-jw", "expires_in": 3600, "scope": ["https://www.googleapis.com/auth/photoslibrary.readonly", "https://www.googleapis.com/auth/photoslibrary.sharing"], "token_type": "Bearer", "expires_at": 1550994489.6308305, "refresh_token": "1/M7aqfHSQhPxs--IrYglfFeCTcnwRWruYmExnsuh0bbw"}
{"access_token": "ya29.Gly8BocQdvTgBUPw2Do9Ji302NQBhm3jBZ0S1daFqsZ759DpszWz0bS0GeeL19LckDKbPbg8rdhj55Gd4qv8IEoKQkOnZq0NNqU9kgOO5cjgh6tG0t9nVkWW6Np46g", "expires_in": 3600, "scope": ["https://www.googleapis.com/auth/photoslibrary.sharing", "https://www.googleapis.com/auth/photoslibrary.readonly"], "token_type": "Bearer", "expires_at": 1551140160.0511432, "refresh_token": "1/HG0feqbbu7FZLjztEbGneV0Jz2aNoiNYuFIHvcZ9MgQ"}
26 changes: 13 additions & 13 deletions test/test_credentials/client_secret.json
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
{
"installed": {
"client_id": "835058714377-ist0b356u0srf2d5n5dael8t2kes7419.apps.googleusercontent.com",
"project_id": "gktest64",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_secret": "PBZGWNpqQGHld2F-JQ3aiT5P",
"redirect_uris": [
"urn:ietf:wg:oauth:2.0:oob",
"http://localhost"
]
}
{
"installed": {
"client_id": "434668126185-isurs73113r7iafhra2tlkpjsoormvie.apps.googleusercontent.com",
"project_id": "gk-photos-test",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_secret": "3KuGrmcJWy4KZk_47tsDpLGj",
"redirect_uris": [
"urn:ietf:wg:oauth:2.0:oob",
"http://localhost"
]
}
}
2 changes: 1 addition & 1 deletion test/test_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def __init__(self):
# set up the test account credentials
Main.APP_NAME = 'gphotos-sync-test'
app_dirs = AppDirs(Main.APP_NAME)
self.test_folder = Path(__file__).absolute().parent /'test_credentials'
self.test_folder = Path(__file__).absolute().parent / 'test_credentials'
user_data = Path(app_dirs.user_data_dir)
if not user_data.exists():
user_data.mkdir(parents=True)
Expand Down
43 changes: 43 additions & 0 deletions test/test_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,49 @@ def test_sys_whole_library(self):
s.test_setup('test_sys_whole_library')
s.gp.start(s.parsed_args)

def test_sys_album_add_file(self):
"""tests that the album links get re-created in a new folder with
a new last-date prefix when a recent photo is added to an album,
also that the old folder is removed """
s = ts.SetupDbAndCredentials()
args = ['--start-date', '2017-09-19', '--end-date', '2017-09-20']
s.test_setup('test_sys_album_add_file', args=args, trash_db=True,
trash_files=True)
s.gp.start(s.parsed_args)

# the date will be picked from the album contents which still includes
# the file that is not yet downloaded
pat = str(albums_root / '2017' / '0923 Clones' / '*.*')
files = sorted(s.root.glob(pat))
self.assertEqual(3, len(files))

# spoof the album to pretend it only got 3 files up to 2017-09-20
db = LocalData(s.root)
db.cur.execute("UPDATE Albums SET EndDate='2017-09-20',"
"Size=3 WHERE "
"AlbumName='Clones'")
db.store()

args = ['--start-date', '2017-09-19', '--end-date', '2017-09-23',
'--index-only']
s.test_setup('test_sys_album_add_file', args=args)
s.gp.start(s.parsed_args)

# the rescan will reset the date so set it back
db = LocalData(s.root)
db.cur.execute("UPDATE Albums SET EndDate='2017-09-20' "
"WHERE AlbumName='Clones'")
db.store()

args = ['--skip-index', '--skip-files']
s.test_setup('test_sys_album_add_file', args=args)
s.gp.start(s.parsed_args)

pat = str(albums_root / '2017' / '0920 Clones' / '*.*')
files = sorted(s.root.glob(pat))
self.assertEqual(4, len(files))
self.assertFalse((albums_root / '2017' / '0923 Clones').exists())

def test_system_date_range(self):
s = ts.SetupDbAndCredentials()
args = ['--start-date', '2016-01-01', '--end-date', '2017-01-01',
Expand Down

0 comments on commit 6a0d040

Please sign in to comment.