added download best image and caching
This commit is contained in:
parent
626873a164
commit
f3715f2176
@ -2,3 +2,5 @@ import logging
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logger.setLevel('DEBUG')
|
logger.setLevel('DEBUG')
|
||||||
|
|
||||||
|
config_directory = '.fm'
|
||||||
|
@ -18,7 +18,7 @@ def get_album_chart_image(net: Network,
|
|||||||
from_date: date,
|
from_date: date,
|
||||||
to_date: date,
|
to_date: date,
|
||||||
limit: int = 20,
|
limit: int = 20,
|
||||||
image_size: Image.Size = Image.Size.extralarge,
|
image_size: Image.Size = None,
|
||||||
image_width: int = 5):
|
image_width: int = 5):
|
||||||
album_chart = get_populated_album_chart(net=net, username=username,
|
album_chart = get_populated_album_chart(net=net, username=username,
|
||||||
from_date=from_date, to_date=to_date,
|
from_date=from_date, to_date=to_date,
|
||||||
@ -34,7 +34,7 @@ def get_populated_album_chart(net: Network, username: str, from_date: date, to_d
|
|||||||
logger.info('populating scraped albums')
|
logger.info('populating scraped albums')
|
||||||
albums = []
|
albums = []
|
||||||
for counter, scraped in enumerate(chart):
|
for counter, scraped in enumerate(chart):
|
||||||
logger.debug(f'populating {counter} of {len(chart)}')
|
logger.debug(f'populating {counter+1} of {len(chart)}')
|
||||||
albums.append(net.get_album(name=scraped.name, artist=scraped.artist.name))
|
albums.append(net.get_album(name=scraped.name, artist=scraped.artist.name))
|
||||||
|
|
||||||
return albums
|
return albums
|
||||||
|
@ -7,7 +7,7 @@ import logging
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_blank_image(width, height):
|
def get_blank_image(height, width):
|
||||||
return np.zeros((height, width, 3), np.uint8)
|
return np.zeros((height, width, 3), np.uint8)
|
||||||
|
|
||||||
|
|
||||||
@ -35,15 +35,24 @@ def arrange_cover_grid(images: List[np.array], width: int = 5):
|
|||||||
return final_img
|
return final_img
|
||||||
|
|
||||||
|
|
||||||
def get_image_grid_from_objects(net: Network, objects, image_size: Image.Size, image_width: int = 5):
|
def get_image_grid_from_objects(net: Network, objects, image_size=None, final_scale=(300, 300), image_width: int = 5):
|
||||||
logger.debug(f'getting {image_size.name} image grid of {len(objects)} objects at width {image_width}')
|
logger.debug(f'getting {image_size.name if image_size is not None else "best"} image grid of {len(objects)} objects at width {image_width}')
|
||||||
images = []
|
images = []
|
||||||
for counter, iter_object in enumerate(objects):
|
for counter, iter_object in enumerate(objects):
|
||||||
logger.debug(f'downloading image {counter} of {len(objects)}')
|
logger.debug(f'downloading image {counter+1} of {len(objects)}')
|
||||||
try:
|
try:
|
||||||
images.append(net.download_image_by_size(iter_object, size=image_size))
|
if image_size is None:
|
||||||
|
downloaded = net.download_best_image(iter_object, final_scale=final_scale)
|
||||||
|
else:
|
||||||
|
downloaded = net.download_image_by_size(iter_object, size=image_size)
|
||||||
|
|
||||||
|
if downloaded is not None:
|
||||||
|
images.append(downloaded)
|
||||||
|
else:
|
||||||
|
images.append(get_blank_image(final_scale[0], final_scale[1]))
|
||||||
|
|
||||||
except ImageSizeNotAvailableException:
|
except ImageSizeNotAvailableException:
|
||||||
logger.error(f'{image_size.name} image not available for {iter_object.name}')
|
logger.error(f'{image_size.name if image_size is not None else "best"} image not available for {iter_object.name}')
|
||||||
|
|
||||||
grid_image = arrange_cover_grid(images=images, width=image_width)
|
grid_image = arrange_cover_grid(images=images, width=image_width)
|
||||||
return grid_image
|
return grid_image
|
||||||
@ -56,8 +65,8 @@ def chunk(l, n):
|
|||||||
|
|
||||||
def generate_album_chart_grid(net: Network,
|
def generate_album_chart_grid(net: Network,
|
||||||
chart_range: Network.Range,
|
chart_range: Network.Range,
|
||||||
image_size: Image.Size = Image.Size.extralarge,
|
image_size: Image.Size = None,
|
||||||
limit: int = 100,
|
limit: int = 20,
|
||||||
image_width: int = 5):
|
image_width: int = 5):
|
||||||
chart = net.get_top_albums(period=chart_range, limit=limit)
|
chart = net.get_top_albums(period=chart_range, limit=limit)
|
||||||
return get_image_grid_from_objects(net=net, objects=chart, image_size=image_size, image_width=image_width)
|
return get_image_grid_from_objects(net=net, objects=chart, image_size=image_size, image_width=image_width)
|
||||||
|
@ -11,12 +11,12 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
class Image:
|
class Image:
|
||||||
class Size(Enum):
|
class Size(Enum):
|
||||||
|
other = 0
|
||||||
small = 1
|
small = 1
|
||||||
medium = 2
|
medium = 2
|
||||||
large = 3
|
large = 3
|
||||||
extralarge = 4
|
extralarge = 4
|
||||||
mega = 5
|
mega = 5
|
||||||
other = 6
|
|
||||||
|
|
||||||
def __init__(self, size: Size, link: str):
|
def __init__(self, size: Size, link: str):
|
||||||
self.size = size
|
self.size = size
|
||||||
|
@ -2,6 +2,7 @@ import requests
|
|||||||
from typing import Optional, List, Union
|
from typing import Optional, List, Union
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from datetime import datetime, date, time, timedelta
|
from datetime import datetime, date, time, timedelta
|
||||||
|
|
||||||
@ -12,6 +13,7 @@ from fmframework.model.fm import Scrobble, Wiki, Image, WeeklyChart
|
|||||||
from fmframework.model.track import Track
|
from fmframework.model.track import Track
|
||||||
from fmframework.model.album import Album
|
from fmframework.model.album import Album
|
||||||
from fmframework.model.artist import Artist
|
from fmframework.model.artist import Artist
|
||||||
|
from fmframework import config_directory
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -279,14 +281,49 @@ class Network:
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
logger.error(f'{fm_object} has no images')
|
logger.error(f'{fm_object} has no images')
|
||||||
|
|
||||||
|
def download_best_image(self, fm_object: Union[Track, Album, Artist], final_scale=None):
|
||||||
|
try:
|
||||||
|
images = sorted(fm_object.images, key=lambda x: x.size.value, reverse=True)
|
||||||
|
|
||||||
|
for image in images:
|
||||||
|
|
||||||
|
downloaded = self.download_image(image_pointer=image)
|
||||||
|
if downloaded is not None:
|
||||||
|
|
||||||
|
if final_scale is not None:
|
||||||
|
if downloaded.shape != final_scale:
|
||||||
|
downloaded = cv2.resize(downloaded, final_scale)
|
||||||
|
|
||||||
|
return downloaded
|
||||||
|
else:
|
||||||
|
logger.error('null image returned, iterating')
|
||||||
|
except AttributeError:
|
||||||
|
logger.error(f'{fm_object} has no images')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def download_image(image_pointer: Image):
|
def download_image(image_pointer: Image, cache=True):
|
||||||
logger.info(f'downloading {image_pointer.size.name} image - {image_pointer.link}')
|
logger.info(f'downloading {image_pointer.size.name} image - {image_pointer.link}')
|
||||||
|
if image_pointer.link is None or len(image_pointer.link) == 0 or image_pointer.link == '':
|
||||||
|
logger.error('invalid image url')
|
||||||
|
return None
|
||||||
|
|
||||||
|
url_split = image_pointer.link.split('/')
|
||||||
|
cache_path = os.path.join(config_directory, 'cache')
|
||||||
|
file_path = os.path.join(cache_path, url_split[-2]+url_split[-1])
|
||||||
|
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
return cv2.imread(file_path)
|
||||||
|
|
||||||
resp = requests.get(image_pointer.link, stream=True)
|
resp = requests.get(image_pointer.link, stream=True)
|
||||||
|
|
||||||
if 200 <= resp.status_code < 300:
|
if 200 <= resp.status_code < 300:
|
||||||
image = np.asarray(bytearray(resp.content), dtype="uint8")
|
image = np.asarray(bytearray(resp.content), dtype="uint8")
|
||||||
image = cv2.imdecode(image, cv2.IMREAD_COLOR)
|
image = cv2.imdecode(image, cv2.IMREAD_COLOR)
|
||||||
|
if cache:
|
||||||
|
if not os.path.exists(cache_path):
|
||||||
|
os.makedirs(cache_path)
|
||||||
|
if not cv2.imwrite(filename=file_path, img=image):
|
||||||
|
logger.error('failed to dump to cache')
|
||||||
return image
|
return image
|
||||||
else:
|
else:
|
||||||
logger.error(f'http error {resp.status_code}')
|
logger.error(f'http error {resp.status_code}')
|
||||||
|
Loading…
Reference in New Issue
Block a user