Skip to content

Commit

Permalink
Merge branch 'dev' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
helgeerbe committed Apr 2, 2021
2 parents 6236fa4 + ac7e296 commit de3710a
Show file tree
Hide file tree
Showing 8 changed files with 158 additions and 113 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@ When I started 2019 my DIY project building a raspberry powered digital picture
Please note that PictureFrame may change significantly during its development.
Bug reports, comments, feature requests and fixes are most welcome!

To find out what's new or improved have a look at the [changelog](https://github.com/helgeerbe/picframe/wiki/Changelog).

## Acknowledgement

Many Thanks to Wolfgang [www.thedigitalpictureframe.com](https://www.thedigitalpictureframe.com/) for your inspiring work.
Expand Down
9 changes: 7 additions & 2 deletions picframe/config/configuration_example.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ viewer:
text_justify: "L" # text justification L, C or R
fit: False # default=False, True => scale image so all visible and leave 'gaps'
# False => crop image so no 'gaps'
auto_resize: True # default=True, set this to False if you want to use 4K resolution on Raspberry Pi 4.
#auto_resize: True # default=True, set this to False if you want to use 4K resolution on Raspberry Pi 4.
# You should ensure your images are the correct size for the display")
kenburns: False # default=False, will set fit->False and blur_edges->False
display_x: 0 # offset from left of screen (can be negative)
Expand All @@ -35,7 +35,7 @@ viewer:
inner_mat_use_texture: False # default=False, True uses a texture for the inner mat. False creates a solid-color inner mat.
mat_resource_folder: "~/picframe_data/data/mat" # Folder containing mat image files

codepoints: "1234567890AÄÀÆÅÃBCÇDÈÉÊEËFGHIÏÍJKLMNÑOÓÖÔŌØPQRSTUÚÙÜVWXYZaáàãæåäbcçdeéèêëfghiíïjklmnñoóôōøöpqrsßtuúüvwxyz., _-+*()&/`´'•" # limit to 121 ie 11x11 grid_size
#codepoints: "1234567890AÄÀÆÅÃBCÇDÈÉÊEËFGHIÏÍJKLMNÑOÓÖÔŌØPQRSTUÚÙÜVWXYZaáàãæåäbcçdeéèêëfghiíïjklmnñoóôōøöpqrsßtuúüvwxyz., _-+*()&/`´'•" # limit to 121 ie 11x11 grid_size

model:
pic_dir: "~/Pictures" # default="~/Pictures", root folder for images
Expand All @@ -47,6 +47,11 @@ model:
time_delay: 200.0 # default=200.0, time between consecutive slide starts - can be changed by MQTT
fade_time: 10.0 # default=10.0, change time during which slides overlap - can be changed by MQTT"
shuffle: True # default=True, shuffle on reloading image files - can be changed by MQTT"
sort_cols: 'fname ASC' # default='fname ASC' can be any columns in the table with optional ASC or DESC separated by commas
# fname, last_modified, file_id, orientation, exif_datetime, f_number,
# exposure_time, iso, focal_length, make, model, lens, rating,
# latitude, longitude, width, height, title, caption, tags,
# is_portrait, location
image_attr: [ # image attributes send by MQTT, Keys are taken from exifread library, "PICFRAME GPS" is special to retrieve GPS lon/lat, "PICFRAME LOCATION" is special to retrieve geo reverse (load_geoloc hast to be True)
"PICFRAME GPS",
"PICFRAME LOCATION",
Expand Down
36 changes: 30 additions & 6 deletions picframe/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def date_from(self, val):
try:
self.__date_from = float(val)
except ValueError:
self.__date_from = make_date(val if len(val) > 0 else '1970/1/1')
self.__date_from = make_date(val if len(val) > 0 else '1901/12/15')
if len(val) > 0:
self.__model.set_where_clause('date_from', "exif_datetime > {:.0f}".format(self.__date_from))
else:
Expand Down Expand Up @@ -194,26 +194,50 @@ def location_filter(self):
def location_filter(self, val):
self.__location_filter = val
if len(val) > 0:
self.__model.set_where_clause('location_filter', "location LIKE '%{}%'".format(val))
self.__model.set_where_clause("location_filter", self.__build_filter(val, "location"))
else:
self.__model.set_where_clause('location_filter') # remove from where_clause
self.__model.set_where_clause("location_filter") # remove from where_clause
self.__model.force_reload()
self.__next_tm = 0

@property
def tags_filter(self):
return self.__tags_filter

@tags_filter.setter
def tags_filter(self, val):
self.__tags_filter = val
if len(val) > 0:
self.__model.set_where_clause('tags_filter', "tags LIKE '%{}%'".format(val))
self.__model.set_where_clause("tags_filter", self.__build_filter(val, "tags"))
else:
self.__model.set_where_clause('tags_filter') # remove from where_clause
self.__model.set_where_clause("tags_filter") # remove from where_clause
self.__model.force_reload()
self.__next_tm = 0

def __build_filter(self, val, field):
if val.count("(") != val.count(")"):
return None # this should clear the filter and not raise an error
val = val.replace(";", "").replace("'", "").replace("%", "").replace('"', '') # SQL scrambling
tokens = ("(", ")", "AND", "OR", "NOT") # now copes with NOT
val_split = val.replace("(", " ( ").replace(")", " ) ").split() # so brackets not joined to words
filter = []
last_token = ""
for s in val_split:
s_upper = s.upper()
if s_upper in tokens:
if s_upper in ("AND", "OR"):
if last_token in ("AND", "OR"):
return None # must have a non-token between
last_token = s_upper
filter.append(s)
else:
if last_token is not None:
filter.append("{} LIKE '%{}%'".format(field, s))
else:
filter[-1] = filter[-1].replace("%'", " {}%'".format(s))
last_token = None
return "({})".format(" ".join(filter)) # if OR outside brackets will modify the logic of rest of where clauses

def text_is_on(self, txt_key):
return self.__viewer.text_is_on(txt_key)

Expand Down
1 change: 1 addition & 0 deletions picframe/html/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
"brightness": {type:"number", fn:"setter", val:0},
"subdirectory": {type:"text", fn:"setter", val:""},
"location_filter": {type:"text", fn:"setter", val:""},
"tags_filter": {type:"text", fn:"setter", val:""},
"delete": {type:"action", fn:"delete={}", val:false},
"stop": {type:"action", fn:"stop={}", val:false},
};
Expand Down
80 changes: 47 additions & 33 deletions picframe/image_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,47 +95,55 @@ def update_cache(self):
self.__db.commit()


def query_cache(self, where_clause, sort_clause = 'exif_datetime ASC'):
def query_cache(self, where_clause, sort_clause = 'fname ASC'):
cursor = self.__db.cursor()
cursor.row_factory = None # we don't want the "sqlite3.Row" setting from the db here...

if not self.__portrait_pairs: # TODO SQL insertion? Does it matter in this app?
sql = """SELECT file_id FROM all_data WHERE {0} ORDER BY {1}
""".format(where_clause, sort_clause)
return cursor.execute(sql).fetchall()
else: # make two SELECTS
sql = """SELECT
CASE
WHEN is_portrait = 0 THEN file_id
ELSE -1
END
FROM all_data WHERE {0} ORDER BY {1}
""".format(where_clause, sort_clause)
full_list = cursor.execute(sql).fetchall()
sql = """SELECT file_id FROM all_data
WHERE ({0}) AND is_portrait = 1 ORDER BY {1}
""".format(where_clause, sort_clause)
pair_list = cursor.execute(sql).fetchall()
newlist = []
for i in range(len(full_list)):
if full_list[i][0] != -1:
newlist.append(full_list[i])
elif pair_list: #OK @rec - this is tidier and qicker!
elem = pair_list.pop(0)
if pair_list:
elem += pair_list.pop(0)
newlist.append(elem)
return newlist
try:
if not self.__portrait_pairs: # TODO SQL insertion? Does it matter in this app?
sql = """SELECT file_id FROM all_data WHERE {0} ORDER BY {1}
""".format(where_clause, sort_clause)
return cursor.execute(sql).fetchall()
else: # make two SELECTS
sql = """SELECT
CASE
WHEN is_portrait = 0 THEN file_id
ELSE -1
END
FROM all_data WHERE {0} ORDER BY {1}
""".format(where_clause, sort_clause)
full_list = cursor.execute(sql).fetchall()
sql = """SELECT file_id FROM all_data
WHERE ({0}) AND is_portrait = 1 ORDER BY {1}
""".format(where_clause, sort_clause)
pair_list = cursor.execute(sql).fetchall()
newlist = []
for i in range(len(full_list)):
if full_list[i][0] != -1:
newlist.append(full_list[i])
elif pair_list: #OK @rec - this is tidier and qicker!
elem = pair_list.pop(0)
if pair_list:
elem += pair_list.pop(0)
newlist.append(elem)
return newlist
except:
return []


def get_file_info(self, file_id):
if not file_id: return None
sql = "SELECT * FROM all_data where file_id = {0}".format(file_id)
row = self.__db.execute(sql).fetchone()
if row is not None and row['latitude'] is not None and row['longitude'] is not None and row['location'] is None:
if self.__get_geo_location(row['latitude'], row['longitude']):
row = self.__db.execute(sql).fetchone() # description inserted in table
return row # NB if select fails (i.e. moved file) will return None

def get_column_names(self):
sql = "PRAGMA table_info(all_data)"
rows = self.__db.execute(sql).fetchall()
return [row['name'] for row in rows]

def __get_geo_location(self, lat, lon): # TODO periodically check all lat/lon in meta with no location and try again
location = self.__geo_reverse.get_address(lat, lon)
if len(location) == 0:
Expand Down Expand Up @@ -283,9 +291,9 @@ def __get_modified_folders(self):
def __get_modified_files(self, modified_folders):
out_of_date_files = []
sql_select = "SELECT fname, last_modified FROM all_data WHERE fname = ? and last_modified >= ?"
for dir,date in modified_folders:
for dir,_date in modified_folders:
for file in os.listdir(dir):
base, extension = os.path.splitext(file)
_base, extension = os.path.splitext(file)
if (extension.lower() in ImageCache.EXTENSIONS
and not '.AppleDouble' in dir and not file.startswith('.')): # have to filter out all the Apple junk
full_file = os.path.join(dir, file)
Expand Down Expand Up @@ -376,13 +384,19 @@ def __get_exif_info(self, file_path_name):
e['focal_length'] = exifs.get_exif('EXIF FocalLength')
e['rating'] = exifs.get_exif('EXIF Rating')
e['lens'] = exifs.get_exif('EXIF LensModel')
e['exif_datetime'] = None
val = exifs.get_exif('EXIF DateTimeOriginal')
if val != None:
# Remove any subsecond portion of the DateTimeOriginal value. According to the spec, it's
# not valid here anyway (should be in SubSecTimeOriginal), but it does exist sometimes.
val = val.split('.', 1)[0]
e['exif_datetime'] = time.mktime(time.strptime(val, '%Y:%m:%d %H:%M:%S'))
else:
try:
e['exif_datetime'] = time.mktime(time.strptime(val, '%Y:%m:%d %H:%M:%S'))
except:
pass

# If we still don't have a date/time, just use the file's modificaiton time
if e['exif_datetime'] == None:
e['exif_datetime'] = os.path.getmtime(file_path_name)

gps = exifs.get_location()
Expand Down
26 changes: 21 additions & 5 deletions picframe/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
'show_text': "name location",
'text_justify': 'L',
'fit': False,
'auto_resize': True,
#'auto_resize': True,
'kenburns': False,
'display_x': 0,
'display_y': 0,
Expand All @@ -43,7 +43,7 @@
'inner_mat_use_texture': False,
'outer_mat_use_texture': True,
'mat_resource_folder': '~/picframe_data/data/mat',
'codepoints': "1234567890AÄÀÆÅÃBCÇDÈÉÊEËFGHIÏÍJKLMNÑOÓÖÔŌØPQRSTUÚÙÜVWXYZaáàãæåäbcçdeéèêëfghiíïjklmnñoóôōøöpqrsßtuúüvwxyz., _-+*()&/`´'•" # limit to 121 ie 11x11 grid_size
#'codepoints': "1234567890AÄÀÆÅÃBCÇDÈÉÊEËFGHIÏÍJKLMNÑOÓÖÔŌØPQRSTUÚÙÜVWXYZaáàãæåäbcçdeéèêëfghiíïjklmnñoóôōøöpqrsßtuúüvwxyz., _-+*()&/`´'•" # limit to 121 ie 11x11 grid_size
},
'model': {

Expand All @@ -55,6 +55,7 @@
'time_delay': 200.0,
'fade_time': 10.0,
'shuffle': True,
'sort_cols': 'fname ASC',
'image_attr': ['PICFRAME GPS'], # image attributes send by MQTT, Keys are taken from exifread library, 'PICFRAME GPS' is special to retrieve GPS lon/lat
'load_geoloc': True,
'locale': 'en_US.utf8',
Expand Down Expand Up @@ -160,6 +161,8 @@ def __init__(self, configfile = DEFAULT_CONFIGFILE):
model_config['portrait_pairs'])
self.__deleted_pictures = model_config['deleted_pictures']
self.__no_files_img = os.path.expanduser(model_config['no_files_img'])
self.__sort_cols = model_config['sort_cols']
self.__col_names = None
self.__where_clauses = {} # these will be modified by controller

def get_viewer_config(self):
Expand Down Expand Up @@ -245,6 +248,7 @@ def get_directory_list(self):
if self.subdirectory != '':
actual_dir = self.subdirectory
subdir_list = next(os.walk(self.__pic_dir))[1]
subdir_list[:] = [d for d in subdir_list if not d[0] == '.']
subdir_list.insert(0,root)
return actual_dir, subdir_list

Expand All @@ -256,7 +260,7 @@ def set_next_file_to_previous_file(self):

def get_next_file(self):
if self.__reload_files:
for i in range(5): # give image_cache chance on first load if a large directory
for _i in range(5): # give image_cache chance on first load if a large directory
self.__get_files()
if self.__number_of_files > 0:
break
Expand Down Expand Up @@ -285,7 +289,13 @@ def get_next_file(self):
return self.__current_pics

def get_number_of_files(self):
return self.__number_of_files
#return self.__number_of_files
#return sum(1 for pics in self.__file_list for pic in pics if pic is not None)
# or
return sum(
sum(1 for pic in pics if pic is not None)
for pics in self.__file_list
)

def get_current_pics(self):
return self.__current_pics
Expand Down Expand Up @@ -329,7 +339,13 @@ def __get_files(self):
if self.shuffle:
sort_list.append("RANDOM()")
else:
sort_list.append("exif_datetime ASC")
if self.__col_names is None:
self.__col_names = self.__image_cache.get_column_names() # do this once
for col in self.__sort_cols.split(","):
colsplit = col.split()
if colsplit[0] in self.__col_names and (len(colsplit) == 1 or colsplit[1].upper() in ("ASC", "DESC")):
sort_list.append(col)
sort_list.append("fname ASC") # always finally sort on this in case nothing else to sort on or sort_cols is ""
sort_clause = ",".join(sort_list)

self.__file_list = self.__image_cache.query_cache(where_clause, sort_clause)
Expand Down
Loading

0 comments on commit de3710a

Please sign in to comment.