concise method names

This commit is contained in:
aj 2020-08-12 09:25:19 +01:00
parent cc67a08402
commit 8ad8509f25
6 changed files with 181 additions and 180 deletions

View File

@ -20,7 +20,7 @@ def backup_scrobbles(file_path):
net = Network(username='sarsoo', api_key=os.environ['FMKEY']) net = Network(username='sarsoo', api_key=os.environ['FMKEY'])
try: try:
scrobbles = net.get_recent_tracks() scrobbles = net.recent_tracks()
if not os.path.exists(file_path): if not os.path.exists(file_path):
os.makedirs(file_path) os.makedirs(file_path)

View File

@ -1,5 +1,6 @@
from fmframework.net.network import Network, LastFMNetworkException from fmframework.net.network import Network, LastFMNetworkException
from urllib import parse
from csv import DictWriter from csv import DictWriter
import os import os
import logging import logging
@ -42,7 +43,7 @@ def check_for_duplicates(fmkey, retrieval_limit):
net.retry_counter = 20 net.retry_counter = 20
try: try:
scrobbles = net.get_recent_tracks(limit=retrieval_limit, page_limit=200) scrobbles = net.recent_tracks(limit=retrieval_limit, page_limit=200)
if not scrobbles: if not scrobbles:
logger.error('No scrobbles returned') logger.error('No scrobbles returned')
@ -60,8 +61,8 @@ def check_for_duplicates(fmkey, retrieval_limit):
for duplicate in duplicates_found: for duplicate in duplicates_found:
print(f'{duplicate[1].time} - {duplicate[0].time}, {duplicate[0].track}') print(f'{duplicate[1].time} - {duplicate[0].time}, {duplicate[0].track}')
print(f'https://www.last.fm/user/{username}/library/music/' print(f'https://www.last.fm/user/{username}/library/music/'
f'{duplicate[0].track.artist.name.replace(" ", "+")}/_/' f'{parse.quote_plus(duplicate[0].track.artist.name)}/_/'
f'{duplicate[0].track.name.replace(" ", "+")}') f'{parse.quote_plus(duplicate[0].track.name)}')
print(f'https://www.last.fm/user/{username}/library' print(f'https://www.last.fm/user/{username}/library'
f'?from={duplicate[0].time.strftime("%Y-%m-%d")}' f'?from={duplicate[0].time.strftime("%Y-%m-%d")}'
f'&to={duplicate[1].time.strftime("%Y-%m-%d")}') f'&to={duplicate[1].time.strftime("%Y-%m-%d")}')
@ -84,8 +85,8 @@ def check_for_duplicates(fmkey, retrieval_limit):
'album': duplicate[0].track.album.name, 'album': duplicate[0].track.album.name,
'artist': duplicate[0].track.artist.name, 'artist': duplicate[0].track.artist.name,
'track url': f'https://www.last.fm/user/{username}/library/music/' 'track url': f'https://www.last.fm/user/{username}/library/music/'
f'{duplicate[0].track.artist.name.replace(" ", "+")}/_/' f'{parse.quote_plus(duplicate[0].track.artist.name)}/_/'
f'{duplicate[0].track.name.replace(" ", "+")}', f'{parse.quote_plus(duplicate[0].track.name)}',
'scrobbles url': f'https://www.last.fm/user/{username}/library' 'scrobbles url': f'https://www.last.fm/user/{username}/library'
f'?from={duplicate[1].time.strftime("%Y-%m-%d")}' f'?from={duplicate[1].time.strftime("%Y-%m-%d")}'
f'&to={duplicate[0].time.strftime("%Y-%m-%d")}' f'&to={duplicate[0].time.strftime("%Y-%m-%d")}'

View File

@ -58,12 +58,12 @@ def get_image_grid_from_objects(objects,
logger.debug(f'downloading image {counter+1} of {len(objects)}') logger.debug(f'downloading image {counter+1} of {len(objects)}')
try: try:
if image_size is None: if image_size is None:
downloaded = loader.download_best_image(iter_object, downloaded = loader.best_image(iter_object,
final_scale=final_scale, final_scale=final_scale,
check_cache=check_cache, check_cache=check_cache,
cache=cache) cache=cache)
else: else:
downloaded = loader.download_image_by_size(iter_object, downloaded = loader.image_by_size(iter_object,
size=image_size, size=image_size,
check_cache=check_cache, check_cache=check_cache,
cache=cache) cache=cache)
@ -100,7 +100,7 @@ class AlbumChartCollage:
image_width: int = 5, image_width: int = 5,
check_cache=True, check_cache=True,
cache=True): cache=True):
chart = net.get_top_albums(username=username, chart = net.top_albums(username=username,
period=chart_range, period=chart_range,
limit=limit) limit=limit)
return get_image_grid_from_objects(objects=chart, return get_image_grid_from_objects(objects=chart,
@ -121,7 +121,7 @@ class AlbumChartCollage:
image_width: int = 5, image_width: int = 5,
check_cache=True, check_cache=True,
cache=True): cache=True):
chart = UserScraper.get_album_chart(net=net, chart = UserScraper.album_chart(net=net,
username=username, username=username,
from_date=from_date, from_date=from_date,
to_date=to_date, to_date=to_date,

View File

@ -21,7 +21,7 @@ class Downloader:
self.rsession = requests.Session() self.rsession = requests.Session()
self.cache_path = os.path.join(config_directory, 'cache') self.cache_path = os.path.join(config_directory, 'cache')
def download_image_by_size(self, def image_by_size(self,
fm_object: Union[Track, Album, Artist], fm_object: Union[Track, Album, Artist],
size: Image.Size, size: Image.Size,
check_cache=True, check_cache=True,
@ -31,7 +31,7 @@ class Downloader:
image_pointer = next((i for i in images if i.size == size), None) image_pointer = next((i for i in images if i.size == size), None)
if image_pointer is not None: if image_pointer is not None:
return self.download_image(image_pointer=image_pointer, check_cache=check_cache, cache=cache) return self.download(image_pointer=image_pointer, check_cache=check_cache, cache=cache)
else: else:
logger.error(f'image of size {size.name} not found') logger.error(f'image of size {size.name} not found')
raise ImageSizeNotAvailableException raise ImageSizeNotAvailableException
@ -39,7 +39,7 @@ class Downloader:
except AttributeError: except AttributeError:
logger.error(f'{fm_object} has no images') logger.error(f'{fm_object} has no images')
def download_best_image(self, def best_image(self,
fm_object: Union[Track, Album, Artist], fm_object: Union[Track, Album, Artist],
final_scale=None, final_scale=None,
check_cache=True, check_cache=True,
@ -49,7 +49,7 @@ class Downloader:
for image in images: for image in images:
downloaded = self.download_image(image_pointer=image, check_cache=check_cache, cache=cache) downloaded = self.download(image_pointer=image, check_cache=check_cache, cache=cache)
if downloaded is not None: if downloaded is not None:
if final_scale is not None: if final_scale is not None:
@ -87,7 +87,7 @@ class Downloader:
(255, 255, 255), (255, 255, 255),
2) 2)
def download_image(self, image_pointer: Image, check_cache=True, cache=True): def download(self, image_pointer: Image, check_cache=True, cache=True):
"""Perform network action to download Image object""" """Perform network action to download Image object"""
logger.info(f'downloading {image_pointer.size.name} image - {image_pointer.link}') logger.info(f'downloading {image_pointer.size.name} image - {image_pointer.link}')

View File

@ -101,7 +101,7 @@ class Network:
return self.net_call(http_method='GET', method=method, params=data) return self.net_call(http_method='GET', method=method, params=data)
def get_user_scrobble_count(self, username: str = None) -> int: def user_scrobble_count(self, username: str = None) -> int:
if username is None: if username is None:
username = self.username username = self.username
logger.info(f'getting scrobble count {username}') logger.info(f'getting scrobble count {username}')
@ -111,7 +111,7 @@ class Network:
.get('playcount', None) .get('playcount', None)
) )
def get_recent_tracks(self, def recent_tracks(self,
username: str = None, username: str = None,
limit: int = None, limit: int = None,
from_time: datetime = None, from_time: datetime = None,
@ -139,7 +139,7 @@ class Network:
return [self.parse_scrobble(i) for i in items[:limit] if i.get('date')] return [self.parse_scrobble(i) for i in items[:limit] if i.get('date')]
def get_scrobbles_from_date(self, def scrobbles_from_date(self,
input_date: date, input_date: date,
username: str = None, username: str = None,
limit: int = None) -> Optional[List[Scrobble]]: limit: int = None) -> Optional[List[Scrobble]]:
@ -149,22 +149,22 @@ class Network:
from_date = datetime.combine(date=input_date, time=midnight) from_date = datetime.combine(date=input_date, time=midnight)
to_date = datetime.combine(date=input_date + timedelta(days=1), time=midnight) to_date = datetime.combine(date=input_date + timedelta(days=1), time=midnight)
return self.get_recent_tracks(username=username, from_time=from_date, to_time=to_date, limit=limit) return self.recent_tracks(username=username, from_time=from_date, to_time=to_date, limit=limit)
def get_scrobble_count_from_date(self, def count_scrobbles_from_date(self,
input_date: date, input_date: date,
username: str = None, username: str = None,
limit: int = None) -> int: limit: int = None) -> int:
logger.info(f'getting {input_date} scrobble count for {username or self.username}') logger.info(f'getting {input_date} scrobble count for {username or self.username}')
scrobbles = self.get_scrobbles_from_date(input_date=input_date, username=username, limit=limit) scrobbles = self.scrobbles_from_date(input_date=input_date, username=username, limit=limit)
if scrobbles: if scrobbles:
return len(scrobbles) return len(scrobbles)
else: else:
return 0 return 0
def get_track(self, def track(self,
name: str, name: str,
artist: str, artist: str,
username: str = None) -> Optional[Track]: username: str = None) -> Optional[Track]:
@ -180,7 +180,7 @@ class Network:
else: else:
logging.error(f'abnormal response - {resp}') logging.error(f'abnormal response - {resp}')
def get_album(self, def album(self,
name: str, name: str,
artist: str, artist: str,
username: str = None) -> Optional[Album]: username: str = None) -> Optional[Album]:
@ -196,7 +196,7 @@ class Network:
else: else:
logging.error(f'abnormal response - {resp}') logging.error(f'abnormal response - {resp}')
def get_artist(self, def artist(self,
name: str, name: str,
username: str = None) -> Optional[Artist]: username: str = None) -> Optional[Artist]:
logger.info(f'getting {name} for {username or self.username}') logger.info(f'getting {name} for {username or self.username}')
@ -210,7 +210,7 @@ class Network:
else: else:
logging.error(f'abnormal response - {resp}') logging.error(f'abnormal response - {resp}')
def get_top_tracks(self, def top_tracks(self,
period: Range, period: Range,
username: str = None, username: str = None,
limit: int = None): limit: int = None):
@ -229,7 +229,7 @@ class Network:
return [self.parse_track(i) for i in iterator.items] return [self.parse_track(i) for i in iterator.items]
def get_top_albums(self, def top_albums(self,
period: Range, period: Range,
username: str = None, username: str = None,
limit: int = None): limit: int = None):
@ -248,7 +248,7 @@ class Network:
return [self.parse_chart_album(i) for i in iterator.items] return [self.parse_chart_album(i) for i in iterator.items]
def get_top_artists(self, def top_artists(self,
period: Range, period: Range,
username: str = None, username: str = None,
limit: int = None): limit: int = None):
@ -267,7 +267,7 @@ class Network:
return [self.parse_artist(i) for i in iterator.items] return [self.parse_artist(i) for i in iterator.items]
def get_weekly_charts(self, username: str = None): def weekly_charts(self, username: str = None):
logger.info('getting weekly chart list') logger.info('getting weekly chart list')
resp = self.get_request('user.getweeklychartlist', user=username or self.username) resp = self.get_request('user.getweeklychartlist', user=username or self.username)
@ -277,7 +277,7 @@ class Network:
else: else:
logger.error('no response') logger.error('no response')
def get_weekly_chart(self, def weekly_chart(self,
object_type, object_type,
chart: WeeklyChart = None, chart: WeeklyChart = None,
from_time: int = None, from_time: int = None,

View File

@ -34,12 +34,12 @@ class LibraryScraper:
raise TypeError(f'invalid period provided, {period} / {type(period)}') raise TypeError(f'invalid period provided, {period} / {type(period)}')
@staticmethod @staticmethod
def get_scrobbled_tracks(username: str, artist: str, net: Network = None, whole_track=True, def artist_tracks(username: str, artist: str, net: Network = None, whole_track=True,
from_date: datetime = None, to_date: datetime = None, from_date: datetime = None, to_date: datetime = None,
date_preset: str = None): date_preset: str = None):
logger.info(f"loading {artist}'s tracks for {username}") logger.info(f"loading {artist}'s tracks for {username}")
tracks = LibraryScraper.get_scraped_scrobbled_tracks(username=username, artist=artist, tracks = LibraryScraper.scraped_artist_tracks(username=username, artist=artist,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
date_preset=date_preset) date_preset=date_preset)
@ -50,7 +50,7 @@ class LibraryScraper:
if tracks is not None: if tracks is not None:
if whole_track: if whole_track:
for track in tracks: for track in tracks:
populated_tracks.append(net.get_track(name=track.name, artist=track.artist.name, username=username)) populated_tracks.append(net.track(name=track.name, artist=track.artist.name, username=username))
return populated_tracks return populated_tracks
else: else:
@ -59,12 +59,12 @@ class LibraryScraper:
logger.error(f'no scraped tracks returned for {artist} / {username}') logger.error(f'no scraped tracks returned for {artist} / {username}')
@staticmethod @staticmethod
def get_scraped_scrobbled_tracks(username: str, artist: str, def scraped_artist_tracks(username: str, artist: str,
from_date: datetime = None, to_date: datetime = None, from_date: datetime = None, to_date: datetime = None,
date_preset: str = None): date_preset: str = None):
logger.info(f'loading page scraped {artist} tracks for {username}') logger.info(f'loading page scraped {artist} tracks for {username}')
page1 = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page=1, page1 = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, page=1,
url_key='tracks', include_pages=True, url_key='tracks', include_pages=True,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
date_preset=date_preset) date_preset=date_preset)
@ -73,7 +73,7 @@ class LibraryScraper:
tracks = page1[0] tracks = page1[0]
for page_number in range(page1[1] - 1): for page_number in range(page1[1] - 1):
page = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page = LibraryScraper.scraped_artist_subpage(username=username, artist=artist,
url_key='tracks', url_key='tracks',
page=page_number + 2, page=page_number + 2,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
@ -99,12 +99,12 @@ class LibraryScraper:
logger.error(f'no tracks returned for page 1 of {artist} / {username}') logger.error(f'no tracks returned for page 1 of {artist} / {username}')
@staticmethod @staticmethod
def get_scrobbled_albums(username: str, artist: str, net: Network = None, whole_album=True, def artists_albums(username: str, artist: str, net: Network = None, whole_album=True,
from_date: datetime = None, to_date: datetime = None, from_date: datetime = None, to_date: datetime = None,
date_preset: str = None): date_preset: str = None):
logger.info(f"loading {artist}'s albums for {username}") logger.info(f"loading {artist}'s albums for {username}")
albums = LibraryScraper.get_scraped_scrobbled_albums(username=username, artist=artist, albums = LibraryScraper.scraped_artists_albums(username=username, artist=artist,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
date_preset=date_preset) date_preset=date_preset)
@ -115,7 +115,7 @@ class LibraryScraper:
if albums is not None: if albums is not None:
if whole_album: if whole_album:
for album in albums: for album in albums:
populated_albums.append(net.get_album(name=album.name, artist=album.artist.name, username=username)) populated_albums.append(net.album(name=album.name, artist=album.artist.name, username=username))
return populated_albums return populated_albums
else: else:
@ -124,12 +124,12 @@ class LibraryScraper:
logger.error(f'no scraped albums returned for {artist} / {username}') logger.error(f'no scraped albums returned for {artist} / {username}')
@staticmethod @staticmethod
def get_scraped_scrobbled_albums(username: str, artist: str, def scraped_artists_albums(username: str, artist: str,
from_date: datetime = None, to_date: datetime = None, from_date: datetime = None, to_date: datetime = None,
date_preset: str = None): date_preset: str = None):
logger.info(f'loading page scraped {artist} albums for {username}') logger.info(f'loading page scraped {artist} albums for {username}')
page1 = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page=1, page1 = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, page=1,
url_key='albums', url_key='albums',
include_pages=True, include_pages=True,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
@ -139,7 +139,7 @@ class LibraryScraper:
albums = page1[0] albums = page1[0]
for page_number in range(page1[1] - 1): for page_number in range(page1[1] - 1):
page = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page = LibraryScraper.scraped_artist_subpage(username=username, artist=artist,
url_key='albums', url_key='albums',
page=page_number + 2, page=page_number + 2,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
@ -165,12 +165,12 @@ class LibraryScraper:
logger.error(f'no albums returned for page 1 of {artist} / {username}') logger.error(f'no albums returned for page 1 of {artist} / {username}')
@staticmethod @staticmethod
def get_albums_tracks(username: str, artist: str, album: str, net: Network = None, whole_track=True, def album_tracks(username: str, artist: str, album: str, net: Network = None, whole_track=True,
from_date: datetime = None, to_date: datetime = None, from_date: datetime = None, to_date: datetime = None,
date_preset: str = None): date_preset: str = None):
logger.info(f"loading {artist}'s {album} tracks for {username}") logger.info(f"loading {artist}'s {album} tracks for {username}")
tracks = LibraryScraper.get_scraped_albums_tracks(username=username, artist=artist, album=album, tracks = LibraryScraper.scraped_album_tracks(username=username, artist=artist, album=album,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
date_preset=date_preset) date_preset=date_preset)
@ -181,7 +181,7 @@ class LibraryScraper:
if tracks is not None: if tracks is not None:
if whole_track: if whole_track:
for track in tracks: for track in tracks:
populated_tracks.append(net.get_track(name=track.name, artist=track.artist.name, username=username)) populated_tracks.append(net.track(name=track.name, artist=track.artist.name, username=username))
return populated_tracks return populated_tracks
else: else:
@ -190,12 +190,12 @@ class LibraryScraper:
logger.error(f'no scraped tracks returned for {album} / {artist} / {username}') logger.error(f'no scraped tracks returned for {album} / {artist} / {username}')
@staticmethod @staticmethod
def get_scraped_albums_tracks(username: str, artist: str, album: str, def scraped_album_tracks(username: str, artist: str, album: str,
from_date: datetime = None, to_date: datetime = None, from_date: datetime = None, to_date: datetime = None,
date_preset: str = None): date_preset: str = None):
logger.info(f'loading page scraped {artist} albums for {username}') logger.info(f'loading page scraped {artist} albums for {username}')
page1 = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page=1, page1 = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, page=1,
album=album, album=album,
include_pages=True, include_pages=True,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
@ -205,7 +205,7 @@ class LibraryScraper:
albums = page1[0] albums = page1[0]
for page_number in range(page1[1] - 1): for page_number in range(page1[1] - 1):
page = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page = LibraryScraper.scraped_artist_subpage(username=username, artist=artist,
album=album, album=album,
page=page_number + 2, page=page_number + 2,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
@ -233,12 +233,12 @@ class LibraryScraper:
logger.error(f'no tracks returned for page 1 of {album} / {artist} / {username}') logger.error(f'no tracks returned for page 1 of {album} / {artist} / {username}')
@staticmethod @staticmethod
def get_track_scrobbles(username: str, artist: str, track: str, net: Network = None, whole_track=True, def track_scrobbles(username: str, artist: str, track: str, net: Network = None, whole_track=True,
from_date: datetime = None, to_date: datetime = None, from_date: datetime = None, to_date: datetime = None,
date_preset: str = None): date_preset: str = None):
logger.info(f"loading {track} / {artist} for {username}") logger.info(f"loading {track} / {artist} for {username}")
tracks = LibraryScraper.get_scraped_track_scrobbles(username=username, artist=artist, track=track, tracks = LibraryScraper.scraped_track_scrobbles(username=username, artist=artist, track=track,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
date_preset=date_preset) date_preset=date_preset)
@ -249,10 +249,10 @@ class LibraryScraper:
if tracks is not None: if tracks is not None:
if whole_track: if whole_track:
for track in tracks: for track in tracks:
pulled_track = net.get_track(name=track.track.name, pulled_track = net.track(name=track.track.name,
artist=track.track.artist.name, artist=track.track.artist.name,
username=username) username=username)
pulled_track.album = net.get_album(name=track.track.album.name, pulled_track.album = net.album(name=track.track.album.name,
artist=track.track.album.name, artist=track.track.album.name,
username=username) username=username)
populated_tracks.append(pulled_track) populated_tracks.append(pulled_track)
@ -264,12 +264,12 @@ class LibraryScraper:
logger.error(f'no scraped tracks returned for {track} / {artist} / {username}') logger.error(f'no scraped tracks returned for {track} / {artist} / {username}')
@staticmethod @staticmethod
def get_scraped_track_scrobbles(username: str, artist: str, track: str, def scraped_track_scrobbles(username: str, artist: str, track: str,
from_date: datetime = None, to_date: datetime = None, from_date: datetime = None, to_date: datetime = None,
date_preset: str = None): date_preset: str = None):
logger.info(f'loading page scraped {track} / {artist} for {username}') logger.info(f'loading page scraped {track} / {artist} for {username}')
page1 = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page=1, page1 = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, page=1,
track=track, track=track,
include_pages=True, include_pages=True,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
@ -279,7 +279,7 @@ class LibraryScraper:
albums = page1[0] albums = page1[0]
for page_number in range(page1[1] - 1): for page_number in range(page1[1] - 1):
page = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page = LibraryScraper.scraped_artist_subpage(username=username, artist=artist,
track=track, track=track,
page=page_number + 2, page=page_number + 2,
from_date=from_date, to_date=to_date, from_date=from_date, to_date=to_date,
@ -327,7 +327,7 @@ class LibraryScraper:
logger.error(f'no scrobbles returned for page 1 of {track} / {artist} / {username}') logger.error(f'no scrobbles returned for page 1 of {track} / {artist} / {username}')
@staticmethod @staticmethod
def get_scraped_artist_subpage(username: str, artist: str, page: int, def scraped_artist_subpage(username: str, artist: str, page: int,
url_key: str = None, url_key: str = None,
album: str = None, album: str = None,
@ -391,23 +391,23 @@ class UserScraper:
rsession = Session() rsession = Session()
@staticmethod @staticmethod
def get_album_chart(net: Network, username: str, from_date: date, to_date: date, limit: int): def album_chart(net: Network, username: str, from_date: date, to_date: date, limit: int):
"""Scrape chart from last.fm frontend before pulling each from the backend for a complete object""" """Scrape chart from last.fm frontend before pulling each from the backend for a complete object"""
chart = UserScraper.get_scraped_album_chart(username or net.username, from_date, to_date, limit) chart = UserScraper.scraped_album_chart(username or net.username, from_date, to_date, limit)
logger.info('populating scraped albums') logger.info('populating scraped albums')
albums = [] albums = []
for counter, scraped in enumerate(chart): for counter, scraped in enumerate(chart):
logger.debug(f'populating {counter+1} of {len(chart)}') logger.debug(f'populating {counter+1} of {len(chart)}')
try: try:
albums.append(net.get_album(name=scraped.name, artist=scraped.artist.name)) albums.append(net.album(name=scraped.name, artist=scraped.artist.name))
except LastFMNetworkException: except LastFMNetworkException:
logger.exception(f'error occured during album retrieval') logger.exception(f'error occured during album retrieval')
return albums return albums
@staticmethod @staticmethod
def get_scraped_album_chart(username: str, from_date: date, to_date: date, limit: int): def scraped_album_chart(username: str, from_date: date, to_date: date, limit: int):
"""Scrape 'light' objects from last.fm frontend based on date range and limit""" """Scrape 'light' objects from last.fm frontend based on date range and limit"""
logger.info(f'scraping album chart from {from_date} to {to_date} for {username}') logger.info(f'scraping album chart from {from_date} to {to_date} for {username}')
@ -418,14 +418,14 @@ class UserScraper:
albums = [] albums = []
for i in range(pages): for i in range(pages):
scraped_albums = UserScraper.get_scraped_album_chart_page(username, from_date, to_date, i + 1) scraped_albums = UserScraper.scraped_album_chart_page(username, from_date, to_date, i + 1)
if scraped_albums is not None: if scraped_albums is not None:
albums += scraped_albums albums += scraped_albums
return albums[:limit] return albums[:limit]
@staticmethod @staticmethod
def get_scraped_album_chart_page(username: str, from_date: date, to_date: date, page: int): def scraped_album_chart_page(username: str, from_date: date, to_date: date, page: int):
"""Scrape 'light' objects single page of last.fm frontend based on date range""" """Scrape 'light' objects single page of last.fm frontend based on date range"""
logger.debug(f'loading page {page} from {from_date} to {to_date} for {username}') logger.debug(f'loading page {page} from {from_date} to {to_date} for {username}')