added handling for now playing scrobble, made page parsing more resilient
added get top methods
This commit is contained in:
parent
13120b0a47
commit
028bce6ee8
@ -27,7 +27,8 @@ class Album(LastFM):
|
|||||||
return f'{self.name} / {self.artist}'
|
return f'{self.name} / {self.artist}'
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return Color.DARKCYAN + Color.BOLD + 'Album' + Color.END + f': {self.name} {self.artist} ' + super().__repr__()
|
return Color.DARKCYAN + Color.BOLD + 'Album' + Color.END + f': {self.name} artist({repr(self.artist)}) ' \
|
||||||
|
+ super().__repr__()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def wrap(name: str = None,
|
def wrap(name: str = None,
|
||||||
|
@ -23,4 +23,4 @@ class Artist(LastFM):
|
|||||||
return f'{self.name}'
|
return f'{self.name}'
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return Color.PURPLE + Color.BOLD + 'Artist' + Color.END + f': {self.name}' + super().__repr__()
|
return Color.PURPLE + Color.BOLD + 'Artist' + Color.END + f': {self.name} ' + super().__repr__()
|
||||||
|
@ -2,6 +2,7 @@ import requests
|
|||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
import logging
|
import logging
|
||||||
|
from enum import Enum
|
||||||
from datetime import datetime, date, time, timedelta
|
from datetime import datetime, date, time, timedelta
|
||||||
|
|
||||||
from fmframework.model.fm import Scrobble, Wiki
|
from fmframework.model.fm import Scrobble, Wiki
|
||||||
@ -14,6 +15,14 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
class Network:
|
class Network:
|
||||||
|
|
||||||
|
class Range(Enum):
|
||||||
|
OVERALL = 'overall'
|
||||||
|
WEEK = '7day'
|
||||||
|
MONTH = '1month'
|
||||||
|
QUARTER = '3month'
|
||||||
|
HALFYEAR = '6month'
|
||||||
|
YEAR = '12month'
|
||||||
|
|
||||||
def __init__(self, username, api_key):
|
def __init__(self, username, api_key):
|
||||||
self.api_key = api_key
|
self.api_key = api_key
|
||||||
|
|
||||||
@ -85,8 +94,9 @@ class Network:
|
|||||||
|
|
||||||
items = iterator.items
|
items = iterator.items
|
||||||
|
|
||||||
if items[0].get('@attr', {}).get('nowplaying', None):
|
if len(items) >= 1:
|
||||||
items.pop(0)
|
if items[0].get('@attr', {}).get('nowplaying', None):
|
||||||
|
items.pop(0)
|
||||||
|
|
||||||
return [self.parse_scrobble(i) for i in items[:limit]]
|
return [self.parse_scrobble(i) for i in items[:limit]]
|
||||||
|
|
||||||
@ -172,6 +182,67 @@ class Network:
|
|||||||
else:
|
else:
|
||||||
logger.error('no response')
|
logger.error('no response')
|
||||||
|
|
||||||
|
def get_top_tracks(self,
|
||||||
|
period: Range,
|
||||||
|
username: str = None,
|
||||||
|
limit: int = None):
|
||||||
|
if limit is not None:
|
||||||
|
logger.info(f'pulling top {limit} tracks from {period.value} '
|
||||||
|
f'for {self.username if username is None else username}')
|
||||||
|
else:
|
||||||
|
logger.info(f'pulling top tracks from {period.value} for {self.username if username is None else username}')
|
||||||
|
|
||||||
|
params = {
|
||||||
|
'user': self.username if username is None else username,
|
||||||
|
'period': period.value
|
||||||
|
}
|
||||||
|
|
||||||
|
iterator = PageCollection(net=self, method='user.gettoptracks', params=params, response_limit=limit)
|
||||||
|
iterator.load()
|
||||||
|
|
||||||
|
return [self.parse_track(i) for i in iterator.items]
|
||||||
|
|
||||||
|
def get_top_albums(self,
|
||||||
|
period: Range,
|
||||||
|
username: str = None,
|
||||||
|
limit: int = None):
|
||||||
|
if limit is not None:
|
||||||
|
logger.info(f'pulling top {limit} albums from {period.value} '
|
||||||
|
f'for {self.username if username is None else username}')
|
||||||
|
else:
|
||||||
|
logger.info(f'pulling top albums from {period.value} for {self.username if username is None else username}')
|
||||||
|
|
||||||
|
params = {
|
||||||
|
'user': self.username if username is None else username,
|
||||||
|
'period': period.value
|
||||||
|
}
|
||||||
|
|
||||||
|
iterator = PageCollection(net=self, method='user.gettopalbums', params=params, response_limit=limit)
|
||||||
|
iterator.load()
|
||||||
|
|
||||||
|
return [self.parse_album(i) for i in iterator.items]
|
||||||
|
|
||||||
|
def get_top_artists(self,
|
||||||
|
period: Range,
|
||||||
|
username: str = None,
|
||||||
|
limit: int = None):
|
||||||
|
if limit is not None:
|
||||||
|
logger.info(f'pulling top {limit} artists from {period.value} '
|
||||||
|
f'for {self.username if username is None else username}')
|
||||||
|
else:
|
||||||
|
logger.info(f'pulling top artists from {period.value} '
|
||||||
|
f'for {self.username if username is None else username}')
|
||||||
|
|
||||||
|
params = {
|
||||||
|
'user': self.username if username is None else username,
|
||||||
|
'period': period.value
|
||||||
|
}
|
||||||
|
|
||||||
|
iterator = PageCollection(net=self, method='user.gettopartists', params=params, response_limit=limit)
|
||||||
|
iterator.load()
|
||||||
|
|
||||||
|
return [self.parse_artist(i) for i in iterator.items]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_wiki(wiki_dict) -> Optional[Wiki]:
|
def parse_wiki(wiki_dict) -> Optional[Wiki]:
|
||||||
if wiki_dict:
|
if wiki_dict:
|
||||||
@ -185,13 +256,14 @@ class Network:
|
|||||||
return Artist(name=artist_dict.get('name', 'n/a'),
|
return Artist(name=artist_dict.get('name', 'n/a'),
|
||||||
url=artist_dict.get('url', None),
|
url=artist_dict.get('url', None),
|
||||||
mbid=artist_dict.get('mbid', None),
|
mbid=artist_dict.get('mbid', None),
|
||||||
listeners=int(artist_dict["stats"].get('listeners', 0)),
|
listeners=int(artist_dict.get('stats', {}).get('listeners', 0)),
|
||||||
play_count=int(artist_dict["stats"].get('playcount', 0)),
|
play_count=int(artist_dict.get('stats', {}).get('playcount', 0)),
|
||||||
user_scrobbles=int(artist_dict["stats"].get('userplaycount', 0)),
|
user_scrobbles=int(artist_dict.get('stats', {}).get('userplaycount',
|
||||||
|
artist_dict.get('playcount', 0))),
|
||||||
wiki=self.parse_wiki(artist_dict['wiki']) if artist_dict.get('wiki', None) else None)
|
wiki=self.parse_wiki(artist_dict['wiki']) if artist_dict.get('wiki', None) else None)
|
||||||
|
|
||||||
def parse_album(self, album_dict) -> Album:
|
def parse_album(self, album_dict) -> Album:
|
||||||
return Album(name=album_dict.get('name', 'n/a'),
|
return Album(name=album_dict.get('name', album_dict.get('title', 'n/a')),
|
||||||
url=album_dict.get('url', 'n/a'),
|
url=album_dict.get('url', 'n/a'),
|
||||||
mbid=album_dict.get('mbid', 'n/a'),
|
mbid=album_dict.get('mbid', 'n/a'),
|
||||||
listeners=int(album_dict.get('listeners', 0)),
|
listeners=int(album_dict.get('listeners', 0)),
|
||||||
@ -213,7 +285,7 @@ class Network:
|
|||||||
track.album = self.parse_album(track_dict['album'])
|
track.album = self.parse_album(track_dict['album'])
|
||||||
|
|
||||||
if track_dict.get('artist', None):
|
if track_dict.get('artist', None):
|
||||||
track.album = self.parse_album(track_dict['artist'])
|
track.artist = self.parse_artist(track_dict['artist'])
|
||||||
|
|
||||||
return track
|
return track
|
||||||
|
|
||||||
@ -281,7 +353,7 @@ class PageCollection:
|
|||||||
tracker = True
|
tracker = True
|
||||||
while len(self) < self.response_limit and tracker:
|
while len(self) < self.response_limit and tracker:
|
||||||
page = self.iterate()
|
page = self.iterate()
|
||||||
if len(page) == 0:
|
if len(page) == 0 or self.counter > page.total_pages:
|
||||||
tracker = False
|
tracker = False
|
||||||
else:
|
else:
|
||||||
self.pages.append(page)
|
self.pages.append(page)
|
||||||
@ -289,7 +361,7 @@ class PageCollection:
|
|||||||
tracker = True
|
tracker = True
|
||||||
while tracker:
|
while tracker:
|
||||||
page = self.iterate()
|
page = self.iterate()
|
||||||
if len(page) == 0:
|
if len(page) == 0 or self.counter > page.total_pages:
|
||||||
tracker = False
|
tracker = False
|
||||||
else:
|
else:
|
||||||
self.pages.append(page)
|
self.pages.append(page)
|
||||||
@ -325,12 +397,16 @@ class PageCollection:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_page(page_dict):
|
def parse_page(page_dict):
|
||||||
first_value = list(page_dict.values())[0]
|
first_value = list(page_dict.values())[0]
|
||||||
items = list(first_value.values())[1]
|
attr = first_value['@attr']
|
||||||
|
|
||||||
|
del first_value['@attr']
|
||||||
|
items = list(first_value.values())[0]
|
||||||
|
|
||||||
return Page(
|
return Page(
|
||||||
number=first_value['@attr'].get('page', None),
|
number=int(attr.get('page', None)),
|
||||||
size=first_value['@attr'].get('perPage', None),
|
size=int(attr.get('perPage', None)),
|
||||||
total=first_value['@attr'].get('total', None),
|
total=int(attr.get('total', None)),
|
||||||
total_pages=first_value['@attr'].get('totalPages', None),
|
total_pages=int(attr.get('totalPages', None)),
|
||||||
items=items)
|
items=items)
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user