Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/dev'
Browse files Browse the repository at this point in the history
  • Loading branch information
helgeerbe committed Aug 30, 2021
2 parents 07bbaaf + ac0c85c commit 64e1fa9
Show file tree
Hide file tree
Showing 12 changed files with 260 additions and 268 deletions.
5 changes: 1 addition & 4 deletions picframe/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,10 +297,7 @@ def loop(self): #TODO exit loop gracefully and call image_cache.stop()
field_name = self.__model.EXIF_TO_FIELD[key]
image_attr[key] = pics[0].__dict__[field_name] #TODO nicer using namedtuple for Pic
self.publish_state(pics[0].fname, image_attr)
if self.__viewer.is_in_transition() == False: # safe to do long running tasks
self.__model.pause_looping(True)
else:
self.__model.pause_looping(False) #TODO only need to set this once rather than every loop
self.__model.pause_looping = self.__viewer.is_in_transition()
(loop_running, skip_image) = self.__viewer.slideshow_is_running(pics, time_delay, fade_time, self.__paused)
if not loop_running:
break
Expand Down
294 changes: 92 additions & 202 deletions picframe/data/fonts/LICENSE.txt

Large diffs are not rendered by default.

Binary file modified picframe/data/fonts/NotoSans-Bold.ttf
Binary file not shown.
Binary file modified picframe/data/fonts/NotoSans-BoldItalic.ttf
Binary file not shown.
Binary file modified picframe/data/fonts/NotoSans-Italic.ttf
Binary file not shown.
Binary file modified picframe/data/fonts/NotoSans-Regular.ttf
Binary file not shown.
38 changes: 31 additions & 7 deletions picframe/image_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ def __init__(self, picture_dir, db_file, geo_reverse, portrait_pairs=False):
# different version from the latest one - should this argument be taken out?
self.__modified_folders = []
self.__modified_files = []
self.__cached_file_stats = [] # collection shared between threads
self.__cached_file_stats_lock = threading.Lock() # lock to manage shared collection
self.__logger = logging.getLogger("image_cache.ImageCache")
self.__logger.debug('Creating an instance of ImageCache')
self.__picture_dir = picture_dir
Expand All @@ -52,6 +54,7 @@ def __loop(self):
self.update_cache()
time.sleep(2.0)
time.sleep(0.01)
self.__update_file_stats() # write any unsaved file stats before closing
self.__db.commit() # close after update_cache finished for last time
self.__db.close()
self.__shutdown_completed = True
Expand All @@ -75,6 +78,10 @@ def update_cache(self):

self.__logger.debug('Updating cache')

# Update any cached file stats. This should be really light-weight
# so just process any new stats in every pass...
self.__update_file_stats()

# If the current collection of updated files is empty, check for disk-based changes
if not self.__modified_files:
self.__logger.debug('No unprocessed files in memory, checking disk')
Expand Down Expand Up @@ -123,13 +130,20 @@ def query_cache(self, where_clause, sort_clause = 'fname ASC'):
""".format(where_clause, sort_clause)
pair_list = cursor.execute(sql).fetchall()
newlist = []
skip_portrait_slot = False
for i in range(len(full_list)):
if full_list[i][0] != -1:
newlist.append(full_list[i])
elif skip_portrait_slot:
skip_portrait_slot = False
continue
elif pair_list:
elem = pair_list.pop(0)
if pair_list:
elem += pair_list.pop(0)
# Here, we just doubled-up a set of portrait images.
# Skip the next available "portrait slot" as it's unneeded.
skip_portrait_slot = True
newlist.append(elem)
return newlist
except:
Expand All @@ -143,20 +157,30 @@ def get_file_info(self, file_id):
if row is not None and row['latitude'] is not None and row['longitude'] is not None and row['location'] is None:
if self.__get_geo_location(row['latitude'], row['longitude']):
row = self.__db.execute(sql).fetchone() # description inserted in table
# Update the file's displayed stats
self.__update_file_stats(file_id)
self.__add_file_to_stats_cache(file_id) # Add a record to the file stats cache collection
return row # NB if select fails (i.e. moved file) will return None

def get_column_names(self):
sql = "PRAGMA table_info(all_data)"
rows = self.__db.execute(sql).fetchall()
return [row['name'] for row in rows]

def __update_file_stats(self, file_id):
# Increment the displayed count for the specified file
# Update the last displayed time for the specified file to "now"
sql = "UPDATE file SET displayed_count = displayed_count + 1, last_displayed = strftime('%s','now') WHERE file_id = ?"
self.__db.execute(sql, (file_id,))
def __add_file_to_stats_cache(self, file_id):
# This collection is shared between threads, so lock it to update
self.__cached_file_stats_lock.acquire()
self.__cached_file_stats.append([file_id, time.time()])
self.__cached_file_stats_lock.release()

def __update_file_stats(self):
# Process (and drain) the entire collection of cached file stats by storing them in the db
# Note, this is likely an empty or very small collection
if self.__cached_file_stats:
sql = "UPDATE file SET displayed_count = displayed_count + 1, last_displayed = ? WHERE file_id = ?"
self.__cached_file_stats_lock.acquire()
while self.__cached_file_stats:
file_id, timestamp = self.__cached_file_stats.pop()
self.__db.execute(sql, (timestamp, file_id))
self.__cached_file_stats_lock.release()

def __get_geo_location(self, lat, lon): # TODO periodically check all lat/lon in meta with no location and try again
location = self.__geo_reverse.get_address(lat, lon)
Expand Down
107 changes: 78 additions & 29 deletions picframe/interface_mqtt.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,26 +69,32 @@ def on_connect(self, client, userdata, flags, rc):
self.__logger.info('Connected with mqtt broker')

sensor_topic_head = "homeassistant/sensor/" + self.__device_id
number_topic_head = "homeassistant/number/" + self.__device_id
select_topic_head = "homeassistant/select/" + self.__device_id
switch_topic_head = "homeassistant/switch/" + self.__device_id

# send last will and testament
available_topic = switch_topic_head + "/available"
client.publish(available_topic, "online", qos=0, retain=True)

# state_topic for all picframe sensors
state_topic = sensor_topic_head + "/state"

## sensors
self.__setup_sensor(client, sensor_topic_head, "date_from", "mdi:calendar-arrow-left", available_topic)
self.__setup_sensor(client, sensor_topic_head, "date_to", "mdi:calendar-arrow-right", available_topic)
self.__setup_sensor(client, sensor_topic_head, "time_delay", "mdi:image-plus", available_topic)
self.__setup_sensor(client, sensor_topic_head, "brightness", "mdi:brightness-6", available_topic)
self.__setup_sensor(client, sensor_topic_head, "fade_time", "mdi:image-size-select-large", available_topic)
self.__setup_sensor(client, sensor_topic_head, "location_filter", "mdi:map-search", available_topic)
self.__setup_sensor(client, sensor_topic_head, "tags_filter", "mdi:image-search", available_topic)
self.__setup_sensor(client, sensor_topic_head, "image_counter", "mdi:camera-burst", available_topic)
self.__setup_sensor(client, sensor_topic_head, "image", "mdi:file-image", available_topic, has_attributes=True)
self.__setup_sensor(client, sensor_topic_head, "directory", "mdi:folder-multiple-image", available_topic, has_attributes=True)


## numbers
self.__setup_number(client, number_topic_head, "brightness", 0.0, 1.0, 0.1, "mdi:brightness-6", available_topic)
self.__setup_number(client, number_topic_head, "time_delay", 1, 400, 1, "mdi:image-plus", available_topic)
self.__setup_number(client, number_topic_head, "fade_time", 1, 50, 1,"mdi:image-size-select-large", available_topic)

## selects
_, dir_list = self.__controller.get_directory_list()
dir_list.sort()
self.__setup_select(client, select_topic_head, "directory", dir_list, "mdi:folder-multiple-image", available_topic)

## switches
self.__setup_switch(client, switch_topic_head, "_text_refresh", "mdi:refresh", available_topic)
Expand Down Expand Up @@ -142,6 +148,43 @@ def __setup_sensor(self, client, sensor_topic_head, topic, icon, available_topic
"dev":{"ids":[self.__device_id]}})
client.publish(config_topic, config_payload, qos=0, retain=True)
client.subscribe(self.__device_id + "/" + topic, qos=0)

def __setup_number(self, client, number_topic_head, topic, min, max, step, icon, available_topic):
config_topic = number_topic_head + "_" + topic + "/config"
command_topic = self.__device_id + "/" + topic
state_topic = "homeassistant/sensor/" + self.__device_id + "/state"
name = self.__device_id + "_" + topic
config_payload = json.dumps({"name": name,
"min": min,
"max": max,
"step": step,
"icon": icon,
"state_topic": state_topic,
"command_topic": command_topic,
"value_template": "{{ value_json." + topic + "}}",
"avty_t": available_topic,
"uniq_id": name,
"dev":{"ids":[self.__device_id]}})
client.publish(config_topic, config_payload, qos=0, retain=True)
client.subscribe(command_topic, qos=0)

def __setup_select(self, client, select_topic_head, topic, options, icon, available_topic):
config_topic = select_topic_head + "_" + topic + "/config"
command_topic = self.__device_id + "/" + topic
state_topic = "homeassistant/sensor/" + self.__device_id + "/state"
name = self.__device_id + "_" + topic

config_payload = json.dumps({"name": name,
"icon": icon,
"options": options,
"state_topic": state_topic,
"command_topic": command_topic,
"value_template": "{{ value_json." + topic + "}}",
"avty_t": available_topic,
"uniq_id": name,
"dev":{"ids":[self.__device_id]}})
client.publish(config_topic, config_payload, qos=0, retain=True)
client.subscribe(command_topic, qos=0)

def __setup_switch(self, client, switch_topic_head, topic, icon,
available_topic, is_on=False):
Expand Down Expand Up @@ -328,44 +371,50 @@ def on_message(self, client, userdata, message):
self.__controller.stop()

def publish_state(self, image, image_attr):
topic_head = "homeassistant/sensor/" + self.__device_id
sensor_topic_head = "homeassistant/sensor/" + self.__device_id
switch_topic_head = "homeassistant/switch/" + self.__device_id
state_topic = topic_head + "/state"
state_payload = {}
select_topic_head = "homeassistant/select/" + self.__device_id
sensor_state_topic = sensor_topic_head + "/state"

sensor_state_payload = {}

## sensor
# directory sensor
actual_dir, dir_list = self.__controller.get_directory_list()
state_payload["directory"] = actual_dir
dir_attr = {}
dir_attr['directories'] = dir_list
sensor_state_payload["directory"] = actual_dir
# image counter sensor
state_payload["image_counter"] = str(self.__controller.get_number_of_files())
sensor_state_payload["image_counter"] = str(self.__controller.get_number_of_files())
# image sensor
_, tail = os.path.split(image)
state_payload["image"] = tail
sensor_state_payload["image"] = tail
# date_from
state_payload["date_from"] = int(self.__controller.date_from)
sensor_state_payload["date_from"] = int(self.__controller.date_from)
# date_to
state_payload["date_to"] = int(self.__controller.date_to)
sensor_state_payload["date_to"] = int(self.__controller.date_to)
# location_filter
sensor_state_payload["location_filter"] = self.__controller.location_filter
# tags_filter
sensor_state_payload["tags_filter"] = self.__controller.tags_filter

## number state
# time_delay
state_payload["time_delay"] = self.__controller.time_delay
sensor_state_payload["time_delay"] = self.__controller.time_delay
# fade_time
state_payload["fade_time"] = self.__controller.fade_time
sensor_state_payload["fade_time"] = self.__controller.fade_time
# brightness
state_payload["brightness"] = self.__controller.brightness
# location_filter
state_payload["location_filter"] = self.__controller.location_filter
# tags_filter
state_payload["tags_filter"] = self.__controller.tags_filter
sensor_state_payload["brightness"] = self.__controller.brightness

# send last will and testament
available_topic = switch_topic_head + "/available"
self.__client.publish(available_topic, "online", qos=0, retain=True)

#pulish sensors
attributes_topic = topic_head + "_image/attributes"
attributes_topic = sensor_topic_head + "_image/attributes"
self.__logger.debug("Send image attributes: %s", image_attr)
self.__client.publish(attributes_topic, json.dumps(image_attr), qos=0, retain=False)
attributes_topic = topic_head + "_directory/attributes"
self.__client.publish(attributes_topic, json.dumps(dir_attr), qos=0, retain=False)
self.__logger.info("Send state: %s", state_payload)
self.__client.publish(state_topic, json.dumps(state_payload), qos=0, retain=False)
dir_list.sort()
self.__setup_select(self.__client, select_topic_head, "directory", dir_list, "mdi:folder-multiple-image", available_topic)

self.__logger.info("Send sensor state: %s", sensor_state_payload)
self.__client.publish(sensor_state_topic, json.dumps(sensor_state_payload), qos=0, retain=False)

4 changes: 2 additions & 2 deletions picframe/mat_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ def __init__(self, k=3, max_iterations=5, min_distance=5.0, size=200):
def run(self, image, start_clusters=None):
image = image.copy()
image.thumbnail(self.size)
im = np.array(image, dtype=np.float)[:,:,:3]
im = np.array(image, dtype=float)[:,:,:3]
# following section can be used to give the clusters location as well as colour proximity
#(ix0, ix1) = np.indices(im.shape[:2]) # vert,horiz pixel locations
#ix0.shape = ix0.shape + (1,) # make same dim as im
Expand All @@ -516,7 +516,7 @@ def run(self, image, start_clusters=None):
if start_clusters is None:
centroids = im[np.random.choice(np.arange(n), self.k)]
else:
centroids = np.array(start_clusters, dtype=np.float)
centroids = np.array(start_clusters, dtype=float)
old_centroids = centroids.copy()
for i in range(self.max_iterations):
im.shape = (1, n, d) # add dimension to allow broadcasting
Expand Down
77 changes: 55 additions & 22 deletions picframe/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,33 +276,66 @@ def set_next_file_to_previous_file(self):
self.__file_index = (self.__file_index - 2) % self.__number_of_files # TODO deleting last image results in ZeroDivisionError

def get_next_file(self):
if self.__reload_files:
for _i in range(5): # give image_cache chance on first load if a large directory
self.__get_files()
if self.__number_of_files > 0:
break
time.sleep(0.5)
if self.__file_index == self.__number_of_files:
self.__num_run_through += 1
if self.shuffle and self.__num_run_through >= self.get_model_config()['reshuffle_num']:
#self.__num_run_through = 0
#self.__shuffle_files()
missing_images = 0

# loop until we acquire a valid image set
while True:
pic1 = None
pic2 = None

# Reload the playlist if requested
if self.__reload_files:
for _i in range(5): # give image_cache chance on first load if a large directory
self.__get_files()
missing_images = 0
if self.__number_of_files > 0:
break
time.sleep(0.5)

# If we don't have any files to show, prepare the "no images" image
# Also, set the reload_files flag so we'll check for new files on the next pass...
if self.__number_of_files == 0 or missing_images >= self.__number_of_files:
pic1 = Pic(self.__no_files_img, 0, 0)
self.__reload_files = True
self.__file_index = 0
if self.__number_of_files == 0:
pic = Pic(self.__no_files_img, 0, 0)
paired_pic = None
else:
break

# If we've displayed all images...
# If it's time to shuffle, set a flag to do so
# Loop back, which will reload and shuffle if necessary
if self.__file_index == self.__number_of_files:
self.__num_run_through += 1
if self.shuffle and self.__num_run_through >= self.get_model_config()['reshuffle_num']:
self.__reload_files = True
self.__file_index = 0
continue

# Load the current image set
file_ids = self.__file_list[self.__file_index]
pic_row = self.__image_cache.get_file_info(file_ids[0])
pic = Pic(**pic_row) if pic_row is not None else None
pic1 = Pic(**pic_row) if pic_row is not None else None
if len(file_ids) == 2:
pic_row = self.__image_cache.get_file_info(file_ids[1])
paired_pic = Pic(**pic_row) if pic_row is not None else None
else:
paired_pic = None
self.__current_pics = (pic, paired_pic)
self.__file_index += 1 # don't wrap back as __file_index == __number_of_files used as trigger above
pic2 = Pic(**pic_row) if pic_row is not None else None

# Verify the images in the selected image set actually exist on disk
# Blank out missing references and swap positions if necessary to try and get
# a valid image in the first slot.
if pic1 and not os.path.isfile(pic1.fname): pic1 = None
if pic2 and not os.path.isfile(pic2.fname): pic2 = None
if (not pic1 and pic2): pic1, pic2 = pic2, pic1

# Increment the image index for next time
self.__file_index += 1

# If pic1 is valid here, everything is OK. Break out of the loop and return the set
if pic1:
break

# Here, pic1 is undefined. That's a problem. Loop back and get another image set.
# Track the number of times we've looped back so we can abort if we don't have *any* images to display
missing_images += 1

self.__current_pics = (pic1, pic2)
return self.__current_pics

def get_number_of_files(self):
Expand Down
1 change: 0 additions & 1 deletion picframe/viewer_display.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import sys
sys.path.insert(1, '/home/patrick/python/pi3d')
import pi3d
#from pi3d.Texture import MAX_SIZE
import math
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
install_requires=[
'Pillow',
'ExifRead',
'pi3d>=2.46',
'pi3d>=2.47',
'PyYAML',
'paho-mqtt',
'IPTCInfo3',
Expand Down

0 comments on commit 64e1fa9

Please sign in to comment.