2019-12-28 00:59:41 +00:00
|
|
|
import numpy as np
|
|
|
|
from typing import List
|
2020-07-17 12:40:31 +01:00
|
|
|
from datetime import date
|
|
|
|
|
|
|
|
from fmframework.net.network import Network
|
2020-08-09 12:50:15 +01:00
|
|
|
from fmframework.net.scrape import UserScraper
|
2020-07-17 12:40:31 +01:00
|
|
|
from fmframework.image.downloader import Downloader, ImageSizeNotAvailableException
|
2020-06-28 10:04:47 +01:00
|
|
|
from fmframework.model import Image
|
2019-12-28 00:59:41 +00:00
|
|
|
|
|
|
|
import logging
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-12-28 02:53:16 +00:00
|
|
|
def get_blank_image(height, width):
|
2019-12-28 00:59:41 +00:00
|
|
|
return np.zeros((height, width, 3), np.uint8)
|
|
|
|
|
|
|
|
|
|
|
|
def arrange_cover_grid(images: List[np.array], width: int = 5):
|
|
|
|
logger.debug(f'arranging {len(images)} images at width {width}')
|
|
|
|
rows = []
|
|
|
|
for row in chunk(images, width):
|
|
|
|
row_img = row[0]
|
|
|
|
for image in row[1:]:
|
|
|
|
row_img = np.concatenate((row_img, image), axis=1)
|
|
|
|
|
|
|
|
# handle incomplete final row
|
|
|
|
if len(row) < width and len(rows) > 0:
|
|
|
|
width = rows[0].shape[1] - row_img.shape[1]
|
|
|
|
height = rows[0].shape[0]
|
|
|
|
logger.debug(rows[0].shape)
|
|
|
|
row_img = np.concatenate((row_img, get_blank_image(width=width, height=height)), axis=1)
|
|
|
|
|
|
|
|
rows.append(row_img)
|
|
|
|
|
|
|
|
final_img = rows[0]
|
|
|
|
if len(rows) > 1:
|
|
|
|
for row in rows[1:]:
|
|
|
|
final_img = np.concatenate((final_img, row), axis=0)
|
|
|
|
return final_img
|
|
|
|
|
|
|
|
|
2020-07-17 12:40:31 +01:00
|
|
|
def get_image_grid_from_objects(objects,
|
|
|
|
image_size=None,
|
|
|
|
final_scale=(300, 300),
|
|
|
|
image_width: int = 5,
|
|
|
|
overlay_count: bool = False,
|
|
|
|
loader=None,
|
|
|
|
check_cache=True,
|
|
|
|
cache=True):
|
|
|
|
logger.debug(f'getting {image_size.name if image_size is not None else "best"} image grid '
|
|
|
|
f'of {len(objects)} objects at width {image_width}')
|
|
|
|
|
|
|
|
if loader is None:
|
|
|
|
loader = Downloader()
|
|
|
|
|
2019-12-28 00:59:41 +00:00
|
|
|
images = []
|
|
|
|
for counter, iter_object in enumerate(objects):
|
2019-12-28 02:53:16 +00:00
|
|
|
logger.debug(f'downloading image {counter+1} of {len(objects)}')
|
2019-12-28 00:59:41 +00:00
|
|
|
try:
|
2019-12-28 02:53:16 +00:00
|
|
|
if image_size is None:
|
2020-08-12 09:25:19 +01:00
|
|
|
downloaded = loader.best_image(iter_object,
|
|
|
|
final_scale=final_scale,
|
|
|
|
check_cache=check_cache,
|
|
|
|
cache=cache)
|
2019-12-28 02:53:16 +00:00
|
|
|
else:
|
2020-08-12 09:25:19 +01:00
|
|
|
downloaded = loader.image_by_size(iter_object,
|
|
|
|
size=image_size,
|
|
|
|
check_cache=check_cache,
|
|
|
|
cache=cache)
|
2019-12-28 02:53:16 +00:00
|
|
|
|
|
|
|
if downloaded is not None:
|
2020-07-17 12:40:31 +01:00
|
|
|
if overlay_count:
|
|
|
|
loader.add_scrobble_count_to_image(downloaded, iter_object.user_scrobbles)
|
|
|
|
|
2019-12-28 02:53:16 +00:00
|
|
|
images.append(downloaded)
|
|
|
|
else:
|
|
|
|
images.append(get_blank_image(final_scale[0], final_scale[1]))
|
|
|
|
|
2019-12-28 00:59:41 +00:00
|
|
|
except ImageSizeNotAvailableException:
|
2019-12-28 02:53:16 +00:00
|
|
|
logger.error(f'{image_size.name if image_size is not None else "best"} image not available for {iter_object.name}')
|
2019-12-28 00:59:41 +00:00
|
|
|
|
|
|
|
grid_image = arrange_cover_grid(images=images, width=image_width)
|
|
|
|
return grid_image
|
|
|
|
|
|
|
|
|
|
|
|
def chunk(l, n):
|
|
|
|
for i in range(0, len(l), n):
|
|
|
|
yield l[i:i+n]
|
|
|
|
|
|
|
|
|
2020-07-17 12:40:31 +01:00
|
|
|
class AlbumChartCollage:
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def from_relative_range(net: Network,
|
|
|
|
chart_range: Network.Range,
|
|
|
|
username: str = None,
|
|
|
|
limit: int = 20,
|
|
|
|
overlay_count: bool = False,
|
|
|
|
image_size: Image.Size = None,
|
|
|
|
image_width: int = 5,
|
|
|
|
check_cache=True,
|
|
|
|
cache=True):
|
2020-08-12 09:25:19 +01:00
|
|
|
chart = net.top_albums(username=username,
|
|
|
|
period=chart_range,
|
|
|
|
limit=limit)
|
2020-07-17 12:40:31 +01:00
|
|
|
return get_image_grid_from_objects(objects=chart,
|
|
|
|
image_size=image_size,
|
|
|
|
image_width=image_width,
|
|
|
|
overlay_count=overlay_count,
|
|
|
|
check_cache=check_cache,
|
|
|
|
cache=cache)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def from_dates(net: Network,
|
|
|
|
from_date: date,
|
|
|
|
to_date: date,
|
|
|
|
username: str = None,
|
|
|
|
limit: int = 20,
|
|
|
|
overlay_count: bool = False,
|
|
|
|
image_size: Image.Size = None,
|
|
|
|
image_width: int = 5,
|
|
|
|
check_cache=True,
|
|
|
|
cache=True):
|
2020-08-12 09:25:19 +01:00
|
|
|
chart = UserScraper.album_chart(net=net,
|
|
|
|
username=username,
|
|
|
|
from_date=from_date,
|
|
|
|
to_date=to_date,
|
|
|
|
limit=limit)
|
2020-07-17 12:40:31 +01:00
|
|
|
return get_image_grid_from_objects(objects=chart,
|
|
|
|
image_size=image_size,
|
|
|
|
image_width=image_width,
|
|
|
|
overlay_count=overlay_count,
|
|
|
|
check_cache=check_cache,
|
|
|
|
cache=cache)
|