From 8ad8509f250d2a26f9eb4aadf4b8eb5cc4abca6d Mon Sep 17 00:00:00 2001 From: aj Date: Wed, 12 Aug 2020 09:25:19 +0100 Subject: [PATCH] concise method names --- backup.py | 2 +- duplicate.py | 11 +- fmframework/image/__init__.py | 32 +++--- fmframework/image/downloader.py | 26 ++--- fmframework/net/network.py | 96 ++++++++-------- fmframework/net/scrape.py | 194 ++++++++++++++++---------------- 6 files changed, 181 insertions(+), 180 deletions(-) diff --git a/backup.py b/backup.py index 5a07c41..910117e 100644 --- a/backup.py +++ b/backup.py @@ -20,7 +20,7 @@ def backup_scrobbles(file_path): net = Network(username='sarsoo', api_key=os.environ['FMKEY']) try: - scrobbles = net.get_recent_tracks() + scrobbles = net.recent_tracks() if not os.path.exists(file_path): os.makedirs(file_path) diff --git a/duplicate.py b/duplicate.py index ebc3309..72205ef 100644 --- a/duplicate.py +++ b/duplicate.py @@ -1,5 +1,6 @@ from fmframework.net.network import Network, LastFMNetworkException +from urllib import parse from csv import DictWriter import os import logging @@ -42,7 +43,7 @@ def check_for_duplicates(fmkey, retrieval_limit): net.retry_counter = 20 try: - scrobbles = net.get_recent_tracks(limit=retrieval_limit, page_limit=200) + scrobbles = net.recent_tracks(limit=retrieval_limit, page_limit=200) if not scrobbles: logger.error('No scrobbles returned') @@ -60,8 +61,8 @@ def check_for_duplicates(fmkey, retrieval_limit): for duplicate in duplicates_found: print(f'{duplicate[1].time} - {duplicate[0].time}, {duplicate[0].track}') print(f'https://www.last.fm/user/{username}/library/music/' - f'{duplicate[0].track.artist.name.replace(" ", "+")}/_/' - f'{duplicate[0].track.name.replace(" ", "+")}') + f'{parse.quote_plus(duplicate[0].track.artist.name)}/_/' + f'{parse.quote_plus(duplicate[0].track.name)}') print(f'https://www.last.fm/user/{username}/library' f'?from={duplicate[0].time.strftime("%Y-%m-%d")}' f'&to={duplicate[1].time.strftime("%Y-%m-%d")}') @@ -84,8 +85,8 @@ def check_for_duplicates(fmkey, retrieval_limit): 'album': duplicate[0].track.album.name, 'artist': duplicate[0].track.artist.name, 'track url': f'https://www.last.fm/user/{username}/library/music/' - f'{duplicate[0].track.artist.name.replace(" ", "+")}/_/' - f'{duplicate[0].track.name.replace(" ", "+")}', + f'{parse.quote_plus(duplicate[0].track.artist.name)}/_/' + f'{parse.quote_plus(duplicate[0].track.name)}', 'scrobbles url': f'https://www.last.fm/user/{username}/library' f'?from={duplicate[1].time.strftime("%Y-%m-%d")}' f'&to={duplicate[0].time.strftime("%Y-%m-%d")}' diff --git a/fmframework/image/__init__.py b/fmframework/image/__init__.py index ee0b96c..35648cb 100644 --- a/fmframework/image/__init__.py +++ b/fmframework/image/__init__.py @@ -58,15 +58,15 @@ def get_image_grid_from_objects(objects, logger.debug(f'downloading image {counter+1} of {len(objects)}') try: if image_size is None: - downloaded = loader.download_best_image(iter_object, - final_scale=final_scale, - check_cache=check_cache, - cache=cache) + downloaded = loader.best_image(iter_object, + final_scale=final_scale, + check_cache=check_cache, + cache=cache) else: - downloaded = loader.download_image_by_size(iter_object, - size=image_size, - check_cache=check_cache, - cache=cache) + downloaded = loader.image_by_size(iter_object, + size=image_size, + check_cache=check_cache, + cache=cache) if downloaded is not None: if overlay_count: @@ -100,9 +100,9 @@ class AlbumChartCollage: image_width: int = 5, check_cache=True, cache=True): - chart = net.get_top_albums(username=username, - period=chart_range, - limit=limit) + chart = net.top_albums(username=username, + period=chart_range, + limit=limit) return get_image_grid_from_objects(objects=chart, image_size=image_size, image_width=image_width, @@ -121,11 +121,11 @@ class AlbumChartCollage: image_width: int = 5, check_cache=True, cache=True): - chart = UserScraper.get_album_chart(net=net, - username=username, - from_date=from_date, - to_date=to_date, - limit=limit) + chart = UserScraper.album_chart(net=net, + username=username, + from_date=from_date, + to_date=to_date, + limit=limit) return get_image_grid_from_objects(objects=chart, image_size=image_size, image_width=image_width, diff --git a/fmframework/image/downloader.py b/fmframework/image/downloader.py index fbcf2db..a97c853 100644 --- a/fmframework/image/downloader.py +++ b/fmframework/image/downloader.py @@ -21,17 +21,17 @@ class Downloader: self.rsession = requests.Session() self.cache_path = os.path.join(config_directory, 'cache') - def download_image_by_size(self, - fm_object: Union[Track, Album, Artist], - size: Image.Size, - check_cache=True, - cache=True): + def image_by_size(self, + fm_object: Union[Track, Album, Artist], + size: Image.Size, + check_cache=True, + cache=True): try: images = fm_object.images image_pointer = next((i for i in images if i.size == size), None) if image_pointer is not None: - return self.download_image(image_pointer=image_pointer, check_cache=check_cache, cache=cache) + return self.download(image_pointer=image_pointer, check_cache=check_cache, cache=cache) else: logger.error(f'image of size {size.name} not found') raise ImageSizeNotAvailableException @@ -39,17 +39,17 @@ class Downloader: except AttributeError: logger.error(f'{fm_object} has no images') - def download_best_image(self, - fm_object: Union[Track, Album, Artist], - final_scale=None, - check_cache=True, - cache=True): + def best_image(self, + fm_object: Union[Track, Album, Artist], + final_scale=None, + check_cache=True, + cache=True): try: images = sorted(fm_object.images, key=lambda x: x.size.value, reverse=True) for image in images: - downloaded = self.download_image(image_pointer=image, check_cache=check_cache, cache=cache) + downloaded = self.download(image_pointer=image, check_cache=check_cache, cache=cache) if downloaded is not None: if final_scale is not None: @@ -87,7 +87,7 @@ class Downloader: (255, 255, 255), 2) - def download_image(self, image_pointer: Image, check_cache=True, cache=True): + def download(self, image_pointer: Image, check_cache=True, cache=True): """Perform network action to download Image object""" logger.info(f'downloading {image_pointer.size.name} image - {image_pointer.link}') diff --git a/fmframework/net/network.py b/fmframework/net/network.py index 62bc0db..ff38ee1 100644 --- a/fmframework/net/network.py +++ b/fmframework/net/network.py @@ -101,7 +101,7 @@ class Network: return self.net_call(http_method='GET', method=method, params=data) - def get_user_scrobble_count(self, username: str = None) -> int: + def user_scrobble_count(self, username: str = None) -> int: if username is None: username = self.username logger.info(f'getting scrobble count {username}') @@ -111,12 +111,12 @@ class Network: .get('playcount', None) ) - def get_recent_tracks(self, - username: str = None, - limit: int = None, - from_time: datetime = None, - to_time: datetime = None, - page_limit: int = 50) -> Optional[List[Scrobble]]: + def recent_tracks(self, + username: str = None, + limit: int = None, + from_time: datetime = None, + to_time: datetime = None, + page_limit: int = 50) -> Optional[List[Scrobble]]: if limit is not None: logger.info(f'pulling {limit} tracks') else: @@ -139,35 +139,35 @@ class Network: return [self.parse_scrobble(i) for i in items[:limit] if i.get('date')] - def get_scrobbles_from_date(self, - input_date: date, - username: str = None, - limit: int = None) -> Optional[List[Scrobble]]: + def scrobbles_from_date(self, + input_date: date, + username: str = None, + limit: int = None) -> Optional[List[Scrobble]]: logger.info(f'getting {input_date} scrobbles for {username or self.username}') midnight = time(hour=0, minute=0, second=0) from_date = datetime.combine(date=input_date, time=midnight) to_date = datetime.combine(date=input_date + timedelta(days=1), time=midnight) - return self.get_recent_tracks(username=username, from_time=from_date, to_time=to_date, limit=limit) + return self.recent_tracks(username=username, from_time=from_date, to_time=to_date, limit=limit) - def get_scrobble_count_from_date(self, - input_date: date, - username: str = None, - limit: int = None) -> int: + def count_scrobbles_from_date(self, + input_date: date, + username: str = None, + limit: int = None) -> int: logger.info(f'getting {input_date} scrobble count for {username or self.username}') - scrobbles = self.get_scrobbles_from_date(input_date=input_date, username=username, limit=limit) + scrobbles = self.scrobbles_from_date(input_date=input_date, username=username, limit=limit) if scrobbles: return len(scrobbles) else: return 0 - def get_track(self, - name: str, - artist: str, - username: str = None) -> Optional[Track]: + def track(self, + name: str, + artist: str, + username: str = None) -> Optional[Track]: logger.info(f'getting {name} / {artist} for {username or self.username}') resp = self.get_request('track.getInfo', @@ -180,10 +180,10 @@ class Network: else: logging.error(f'abnormal response - {resp}') - def get_album(self, - name: str, - artist: str, - username: str = None) -> Optional[Album]: + def album(self, + name: str, + artist: str, + username: str = None) -> Optional[Album]: logger.info(f'getting {name} / {artist} for {username or self.username}') resp = self.get_request('album.getInfo', @@ -196,9 +196,9 @@ class Network: else: logging.error(f'abnormal response - {resp}') - def get_artist(self, - name: str, - username: str = None) -> Optional[Artist]: + def artist(self, + name: str, + username: str = None) -> Optional[Artist]: logger.info(f'getting {name} for {username or self.username}') resp = self.get_request('artist.getInfo', @@ -210,10 +210,10 @@ class Network: else: logging.error(f'abnormal response - {resp}') - def get_top_tracks(self, - period: Range, - username: str = None, - limit: int = None): + def top_tracks(self, + period: Range, + username: str = None, + limit: int = None): if limit is not None: logger.info(f'pulling top {limit} tracks from {period.value} for {username or self.username}') else: @@ -229,10 +229,10 @@ class Network: return [self.parse_track(i) for i in iterator.items] - def get_top_albums(self, - period: Range, - username: str = None, - limit: int = None): + def top_albums(self, + period: Range, + username: str = None, + limit: int = None): if limit is not None: logger.info(f'pulling top {limit} albums from {period.value} for {username or self.username}') else: @@ -248,10 +248,10 @@ class Network: return [self.parse_chart_album(i) for i in iterator.items] - def get_top_artists(self, - period: Range, - username: str = None, - limit: int = None): + def top_artists(self, + period: Range, + username: str = None, + limit: int = None): if limit is not None: logger.info(f'pulling top {limit} artists from {period.value} for {username or self.username}') else: @@ -267,7 +267,7 @@ class Network: return [self.parse_artist(i) for i in iterator.items] - def get_weekly_charts(self, username: str = None): + def weekly_charts(self, username: str = None): logger.info('getting weekly chart list') resp = self.get_request('user.getweeklychartlist', user=username or self.username) @@ -277,13 +277,13 @@ class Network: else: logger.error('no response') - def get_weekly_chart(self, - object_type, - chart: WeeklyChart = None, - from_time: int = None, - to_time: int = None, - username: str = None, - limit: int = None): + def weekly_chart(self, + object_type, + chart: WeeklyChart = None, + from_time: int = None, + to_time: int = None, + username: str = None, + limit: int = None): if object_type not in ['album', 'artist', 'track']: raise ValueError('invalid object type') diff --git a/fmframework/net/scrape.py b/fmframework/net/scrape.py index 71a6740..047ca9d 100644 --- a/fmframework/net/scrape.py +++ b/fmframework/net/scrape.py @@ -34,14 +34,14 @@ class LibraryScraper: raise TypeError(f'invalid period provided, {period} / {type(period)}') @staticmethod - def get_scrobbled_tracks(username: str, artist: str, net: Network = None, whole_track=True, - from_date: datetime = None, to_date: datetime = None, - date_preset: str = None): + def artist_tracks(username: str, artist: str, net: Network = None, whole_track=True, + from_date: datetime = None, to_date: datetime = None, + date_preset: str = None): logger.info(f"loading {artist}'s tracks for {username}") - tracks = LibraryScraper.get_scraped_scrobbled_tracks(username=username, artist=artist, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + tracks = LibraryScraper.scraped_artist_tracks(username=username, artist=artist, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if whole_track and net is None: raise NameError('Network required for populating tracks') @@ -50,7 +50,7 @@ class LibraryScraper: if tracks is not None: if whole_track: for track in tracks: - populated_tracks.append(net.get_track(name=track.name, artist=track.artist.name, username=username)) + populated_tracks.append(net.track(name=track.name, artist=track.artist.name, username=username)) return populated_tracks else: @@ -59,25 +59,25 @@ class LibraryScraper: logger.error(f'no scraped tracks returned for {artist} / {username}') @staticmethod - def get_scraped_scrobbled_tracks(username: str, artist: str, - from_date: datetime = None, to_date: datetime = None, - date_preset: str = None): + def scraped_artist_tracks(username: str, artist: str, + from_date: datetime = None, to_date: datetime = None, + date_preset: str = None): logger.info(f'loading page scraped {artist} tracks for {username}') - page1 = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page=1, - url_key='tracks', include_pages=True, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + page1 = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, page=1, + url_key='tracks', include_pages=True, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if page1 is not None: tracks = page1[0] for page_number in range(page1[1] - 1): - page = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, - url_key='tracks', - page=page_number + 2, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + page = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, + url_key='tracks', + page=page_number + 2, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if page is not None: tracks += page @@ -99,14 +99,14 @@ class LibraryScraper: logger.error(f'no tracks returned for page 1 of {artist} / {username}') @staticmethod - def get_scrobbled_albums(username: str, artist: str, net: Network = None, whole_album=True, - from_date: datetime = None, to_date: datetime = None, - date_preset: str = None): + def artists_albums(username: str, artist: str, net: Network = None, whole_album=True, + from_date: datetime = None, to_date: datetime = None, + date_preset: str = None): logger.info(f"loading {artist}'s albums for {username}") - albums = LibraryScraper.get_scraped_scrobbled_albums(username=username, artist=artist, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + albums = LibraryScraper.scraped_artists_albums(username=username, artist=artist, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if whole_album and net is None: raise NameError('Network required for populating albums') @@ -115,7 +115,7 @@ class LibraryScraper: if albums is not None: if whole_album: for album in albums: - populated_albums.append(net.get_album(name=album.name, artist=album.artist.name, username=username)) + populated_albums.append(net.album(name=album.name, artist=album.artist.name, username=username)) return populated_albums else: @@ -124,26 +124,26 @@ class LibraryScraper: logger.error(f'no scraped albums returned for {artist} / {username}') @staticmethod - def get_scraped_scrobbled_albums(username: str, artist: str, - from_date: datetime = None, to_date: datetime = None, - date_preset: str = None): + def scraped_artists_albums(username: str, artist: str, + from_date: datetime = None, to_date: datetime = None, + date_preset: str = None): logger.info(f'loading page scraped {artist} albums for {username}') - page1 = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page=1, - url_key='albums', - include_pages=True, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + page1 = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, page=1, + url_key='albums', + include_pages=True, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if page1 is not None: albums = page1[0] for page_number in range(page1[1] - 1): - page = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, - url_key='albums', - page=page_number + 2, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + page = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, + url_key='albums', + page=page_number + 2, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if page is not None: albums += page @@ -165,14 +165,14 @@ class LibraryScraper: logger.error(f'no albums returned for page 1 of {artist} / {username}') @staticmethod - def get_albums_tracks(username: str, artist: str, album: str, net: Network = None, whole_track=True, - from_date: datetime = None, to_date: datetime = None, - date_preset: str = None): + def album_tracks(username: str, artist: str, album: str, net: Network = None, whole_track=True, + from_date: datetime = None, to_date: datetime = None, + date_preset: str = None): logger.info(f"loading {artist}'s {album} tracks for {username}") - tracks = LibraryScraper.get_scraped_albums_tracks(username=username, artist=artist, album=album, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + tracks = LibraryScraper.scraped_album_tracks(username=username, artist=artist, album=album, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if whole_track and net is None: raise NameError('Network required for populating tracks') @@ -181,7 +181,7 @@ class LibraryScraper: if tracks is not None: if whole_track: for track in tracks: - populated_tracks.append(net.get_track(name=track.name, artist=track.artist.name, username=username)) + populated_tracks.append(net.track(name=track.name, artist=track.artist.name, username=username)) return populated_tracks else: @@ -190,26 +190,26 @@ class LibraryScraper: logger.error(f'no scraped tracks returned for {album} / {artist} / {username}') @staticmethod - def get_scraped_albums_tracks(username: str, artist: str, album: str, - from_date: datetime = None, to_date: datetime = None, - date_preset: str = None): + def scraped_album_tracks(username: str, artist: str, album: str, + from_date: datetime = None, to_date: datetime = None, + date_preset: str = None): logger.info(f'loading page scraped {artist} albums for {username}') - page1 = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page=1, - album=album, - include_pages=True, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + page1 = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, page=1, + album=album, + include_pages=True, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if page1 is not None: albums = page1[0] for page_number in range(page1[1] - 1): - page = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, - album=album, - page=page_number + 2, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + page = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, + album=album, + page=page_number + 2, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if page is not None: albums += page @@ -233,14 +233,14 @@ class LibraryScraper: logger.error(f'no tracks returned for page 1 of {album} / {artist} / {username}') @staticmethod - def get_track_scrobbles(username: str, artist: str, track: str, net: Network = None, whole_track=True, - from_date: datetime = None, to_date: datetime = None, - date_preset: str = None): + def track_scrobbles(username: str, artist: str, track: str, net: Network = None, whole_track=True, + from_date: datetime = None, to_date: datetime = None, + date_preset: str = None): logger.info(f"loading {track} / {artist} for {username}") - tracks = LibraryScraper.get_scraped_track_scrobbles(username=username, artist=artist, track=track, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + tracks = LibraryScraper.scraped_track_scrobbles(username=username, artist=artist, track=track, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if whole_track and net is None: raise NameError('Network required for populating tracks') @@ -249,12 +249,12 @@ class LibraryScraper: if tracks is not None: if whole_track: for track in tracks: - pulled_track = net.get_track(name=track.track.name, - artist=track.track.artist.name, - username=username) - pulled_track.album = net.get_album(name=track.track.album.name, - artist=track.track.album.name, - username=username) + pulled_track = net.track(name=track.track.name, + artist=track.track.artist.name, + username=username) + pulled_track.album = net.album(name=track.track.album.name, + artist=track.track.album.name, + username=username) populated_tracks.append(pulled_track) return populated_tracks @@ -264,26 +264,26 @@ class LibraryScraper: logger.error(f'no scraped tracks returned for {track} / {artist} / {username}') @staticmethod - def get_scraped_track_scrobbles(username: str, artist: str, track: str, - from_date: datetime = None, to_date: datetime = None, - date_preset: str = None): + def scraped_track_scrobbles(username: str, artist: str, track: str, + from_date: datetime = None, to_date: datetime = None, + date_preset: str = None): logger.info(f'loading page scraped {track} / {artist} for {username}') - page1 = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, page=1, - track=track, - include_pages=True, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + page1 = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, page=1, + track=track, + include_pages=True, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if page1 is not None: albums = page1[0] for page_number in range(page1[1] - 1): - page = LibraryScraper.get_scraped_artist_subpage(username=username, artist=artist, - track=track, - page=page_number + 2, - from_date=from_date, to_date=to_date, - date_preset=date_preset) + page = LibraryScraper.scraped_artist_subpage(username=username, artist=artist, + track=track, + page=page_number + 2, + from_date=from_date, to_date=to_date, + date_preset=date_preset) if page is not None: albums += page @@ -327,15 +327,15 @@ class LibraryScraper: logger.error(f'no scrobbles returned for page 1 of {track} / {artist} / {username}') @staticmethod - def get_scraped_artist_subpage(username: str, artist: str, page: int, + def scraped_artist_subpage(username: str, artist: str, page: int, - url_key: str = None, - album: str = None, - track: str = None, + url_key: str = None, + album: str = None, + track: str = None, - include_pages=False, - from_date: datetime = None, to_date: datetime = None, - date_preset: Union[str, Network.Range] = None): + include_pages=False, + from_date: datetime = None, to_date: datetime = None, + date_preset: Union[str, Network.Range] = None): logger.debug(f'loading page {page} of {artist} for {username}') url = f'https://www.last.fm/user/{username}/library/music/{parse.quote_plus(artist)}' @@ -391,23 +391,23 @@ class UserScraper: rsession = Session() @staticmethod - def get_album_chart(net: Network, username: str, from_date: date, to_date: date, limit: int): + def album_chart(net: Network, username: str, from_date: date, to_date: date, limit: int): """Scrape chart from last.fm frontend before pulling each from the backend for a complete object""" - chart = UserScraper.get_scraped_album_chart(username or net.username, from_date, to_date, limit) + chart = UserScraper.scraped_album_chart(username or net.username, from_date, to_date, limit) logger.info('populating scraped albums') albums = [] for counter, scraped in enumerate(chart): logger.debug(f'populating {counter+1} of {len(chart)}') try: - albums.append(net.get_album(name=scraped.name, artist=scraped.artist.name)) + albums.append(net.album(name=scraped.name, artist=scraped.artist.name)) except LastFMNetworkException: logger.exception(f'error occured during album retrieval') return albums @staticmethod - def get_scraped_album_chart(username: str, from_date: date, to_date: date, limit: int): + def scraped_album_chart(username: str, from_date: date, to_date: date, limit: int): """Scrape 'light' objects from last.fm frontend based on date range and limit""" logger.info(f'scraping album chart from {from_date} to {to_date} for {username}') @@ -418,14 +418,14 @@ class UserScraper: albums = [] for i in range(pages): - scraped_albums = UserScraper.get_scraped_album_chart_page(username, from_date, to_date, i + 1) + scraped_albums = UserScraper.scraped_album_chart_page(username, from_date, to_date, i + 1) if scraped_albums is not None: albums += scraped_albums return albums[:limit] @staticmethod - def get_scraped_album_chart_page(username: str, from_date: date, to_date: date, page: int): + def scraped_album_chart_page(username: str, from_date: date, to_date: date, page: int): """Scrape 'light' objects single page of last.fm frontend based on date range""" logger.debug(f'loading page {page} from {from_date} to {to_date} for {username}')