fixed cron schedule again, error handling scrobble
scrobble request and mapping
This commit is contained in:
parent
349d62b5cc
commit
12842c240b
@ -124,7 +124,7 @@ namespace Selector.CLI
|
|||||||
public const string Key = "Scrobble";
|
public const string Key = "Scrobble";
|
||||||
|
|
||||||
public bool Enabled { get; set; } = true;
|
public bool Enabled { get; set; } = true;
|
||||||
public string FullScrobbleCron { get; set; } = "0 0 2 * * *";
|
public string FullScrobbleCron { get; set; } = "0 0 2 * * ?";
|
||||||
public TimeSpan InterJobDelay { get; set; } = TimeSpan.FromMinutes(5);
|
public TimeSpan InterJobDelay { get; set; } = TimeSpan.FromMinutes(5);
|
||||||
public TimeSpan InterRequestDelay { get; set; } = TimeSpan.FromMilliseconds(100);
|
public TimeSpan InterRequestDelay { get; set; } = TimeSpan.FromMilliseconds(100);
|
||||||
public DateTime? From { get; set; } = DateTime.UtcNow.AddDays(-14);
|
public DateTime? From { get; set; } = DateTime.UtcNow.AddDays(-14);
|
||||||
|
@ -45,29 +45,37 @@ namespace Selector
|
|||||||
currentTask = searchClient.Item(new (QueryType, Query));
|
currentTask = searchClient.Item(new (QueryType, Query));
|
||||||
currentTask.ContinueWith(async t =>
|
currentTask.ContinueWith(async t =>
|
||||||
{
|
{
|
||||||
netTime.Stop();
|
try
|
||||||
logger.LogTrace("Network request took {:n} ms", netTime.ElapsedMilliseconds);
|
{
|
||||||
|
netTime.Stop();
|
||||||
|
logger.LogTrace("Network request took {:n} ms", netTime.ElapsedMilliseconds);
|
||||||
|
|
||||||
if (t.IsCompletedSuccessfully)
|
if (t.IsCompletedSuccessfully)
|
||||||
{
|
|
||||||
HandleResponse(t);
|
|
||||||
OnSuccess();
|
|
||||||
AggregateTaskSource.SetResult();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if(t.Exception.InnerException is APITooManyRequestsException ex)
|
|
||||||
{
|
{
|
||||||
logger.LogError("Spotify search request too many requests, waiting for {}", ex.RetryAfter);
|
HandleResponse(t);
|
||||||
await Task.Delay(ex.RetryAfter.Add(TimeSpan.FromSeconds(1)));
|
OnSuccess();
|
||||||
await Execute();
|
AggregateTaskSource.SetResult();
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
logger.LogError("Spotify search request task faulted, {}", t.Exception);
|
if (t.Exception.InnerException is APITooManyRequestsException ex)
|
||||||
AggregateTaskSource.SetException(t.Exception);
|
{
|
||||||
|
logger.LogError("Spotify search request too many requests, waiting for {}", ex.RetryAfter);
|
||||||
|
await Task.Delay(ex.RetryAfter.Add(TimeSpan.FromSeconds(1)));
|
||||||
|
await Execute();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
logger.LogError("Spotify search request task faulted, {}", t.Exception);
|
||||||
|
AggregateTaskSource.SetException(t.Exception);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
catch (Exception e)
|
||||||
|
{
|
||||||
|
logger.LogError(e, "Error while mapping Last.fm {} ({}) to Spotify on attempt {}", Query, QueryType, Attempts);
|
||||||
|
Succeeded = false;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
Attempts++;
|
Attempts++;
|
||||||
|
@ -56,39 +56,47 @@ namespace Selector
|
|||||||
currentTask = userClient.GetRecentScrobbles(username, pagenumber: pageNumber, count: pageSize, from: from, to: to);
|
currentTask = userClient.GetRecentScrobbles(username, pagenumber: pageNumber, count: pageSize, from: from, to: to);
|
||||||
currentTask.ContinueWith(async t =>
|
currentTask.ContinueWith(async t =>
|
||||||
{
|
{
|
||||||
netTime.Stop();
|
try
|
||||||
logger.LogTrace("Network request took {:n} ms", netTime.ElapsedMilliseconds);
|
|
||||||
|
|
||||||
if (t.IsCompletedSuccessfully)
|
|
||||||
{
|
{
|
||||||
var result = t.Result;
|
netTime.Stop();
|
||||||
Succeeded = result.Success;
|
logger.LogTrace("Network request took {:n} ms", netTime.ElapsedMilliseconds);
|
||||||
|
|
||||||
if (Succeeded)
|
if (t.IsCompletedSuccessfully)
|
||||||
{
|
{
|
||||||
Scrobbles = result.Content.ToArray();
|
var result = t.Result;
|
||||||
TotalPages = result.TotalPages;
|
Succeeded = result.Success;
|
||||||
OnSuccess();
|
|
||||||
AggregateTaskSource.SetResult();
|
if (Succeeded)
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if(Attempts < MaxAttempts)
|
|
||||||
{
|
{
|
||||||
logger.LogDebug("Request failed for {}, #{} by {}: {}, retrying ({} of {})", username, pageNumber, pageSize, result.Status, Attempts + 1, MaxAttempts);
|
Scrobbles = result.Content.ToArray();
|
||||||
await Execute();
|
TotalPages = result.TotalPages;
|
||||||
|
OnSuccess();
|
||||||
|
AggregateTaskSource.SetResult();
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
logger.LogDebug("Request failed for {}, #{} by {}: {}, max retries exceeded {}, not retrying", username, pageNumber, pageSize, result.Status, MaxAttempts);
|
if (Attempts < MaxAttempts)
|
||||||
AggregateTaskSource.SetCanceled();
|
{
|
||||||
|
logger.LogDebug("Request failed for {}, #{} by {}: {}, retrying ({} of {})", username, pageNumber, pageSize, result.Status, Attempts + 1, MaxAttempts);
|
||||||
|
await Execute();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
logger.LogDebug("Request failed for {}, #{} by {}: {}, max retries exceeded {}, not retrying", username, pageNumber, pageSize, result.Status, MaxAttempts);
|
||||||
|
AggregateTaskSource.SetCanceled();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
logger.LogError("Scrobble request task faulted, {}", t.Exception);
|
||||||
|
AggregateTaskSource.SetException(t.Exception);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else
|
catch(Exception e)
|
||||||
{
|
{
|
||||||
logger.LogError("Scrobble request task faulted, {}", t.Exception);
|
logger.LogError(e, "Error while making scrobble request #{} for {} by {} from {} to {} on attempt {}", pageNumber, username, pageSize, from, to, Attempts);
|
||||||
AggregateTaskSource.SetException(t.Exception);
|
Succeeded = false;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user