retrying network ops, filtering calendar by genuine only by default

This commit is contained in:
Andy Pack 2024-06-10 22:06:16 +01:00
parent 9331519649
commit 1d36c8d165
Signed by: sarsoo
GPG Key ID: A55BA3536A5E0ED7
6 changed files with 129 additions and 70 deletions

View File

@ -7,7 +7,7 @@ using Overflow.SouthernWater;
var driver = new MongoClient("mongodb://localhost");
var api = new SouthernWaterApi(new HttpClient());
var api = new SouthernWaterApi(new HttpClient(), NullLogger<SouthernWaterApi>.Instance);
await api.LoadApiUrl();
var runner = new SouthernWaterApiJobRunnerPersisting(api, NullLogger<SouthernWaterApiJobRunner>.Instance, driver.GetDatabase(Static.DatabaseName));

View File

@ -1,3 +1,6 @@
using Microsoft.Extensions.Logging.Abstractions;
using Overflow.SouthernWater;
namespace Overflow.Test;
public class Tests
@ -10,7 +13,7 @@ public class Tests
[Test]
public async Task Test1()
{
var southern = new SouthernWater.SouthernWaterApi(new HttpClient());
var southern = new SouthernWater.SouthernWaterApi(new HttpClient(), NullLogger<SouthernWaterApi>.Instance);
await southern.LoadApiUrl();
var spills = await southern.GetSpills();
}

View File

@ -10,7 +10,7 @@
}
else
{
<RadzenScheduler @ref=@scheduler SlotRender=@OnSlotRender style="height: 768px;" TItem="Spill" Data=@Job.Spills StartProperty="eventStart" EndProperty="eventStop"
<RadzenScheduler @ref=@scheduler SlotRender=@OnSlotRender style="height: 768px;" TItem="Spill" Data=@Spills StartProperty="eventStart" EndProperty="eventStop"
TextProperty="bathingSite" SelectedIndex="2"
SlotSelect=@OnSlotSelect AppointmentSelect=@OnAppointmentSelect AppointmentRender=@OnAppointmentRender
AppointmentMove=@OnAppointmentMove >
@ -26,9 +26,21 @@ else
@code {
RadzenScheduler<Spill> scheduler;
[Parameter] public SouthernWaterApiJob? Job { get; set; }
[Parameter] public bool GenuineOnly { get; set; } = true;
Dictionary<DateTime, string> events = new Dictionary<DateTime, string>();
private IEnumerable<Spill> Spills {
get
{
if (GenuineOnly)
{
return Job.Spills.Where(j => j.status == "Genuine");
}
else
{
return Job.Spills;
}
}
}
void OnSlotRender(SchedulerSlotRenderEventArgs args)
{

View File

@ -15,15 +15,20 @@
{
<RadzenText TextStyle="TextStyle.Body1">Last updated at <b>@job.EndTime</b></RadzenText>
}
<RadzenStack Orientation="Orientation.Horizontal" AlignItems="AlignItems.Start" Wrap="FlexWrap.Wrap">
<RadzenCheckBox @bind-Value=@genuineOnly Name="genuineOnly" />
<RadzenLabel Text="Genuine Events Only" Component="genuineOnly" Style="margin-left: 8px; vertical-align: middle;" />
</RadzenStack>
</RadzenStack>
</RadzenCard>
<SpillsCalendar Job="@job" />
<SpillsCalendar Job="@job" GenuineOnly="@genuineOnly" />
@code {
private SouthernWaterApiJob? job;
[Inject] private IMongoDatabase database { get; set; }
// private bool showIds;
private bool genuineOnly = true;
protected override async Task OnInitializedAsync()
{

View File

@ -1,6 +1,7 @@
using System.Net.Http.Json;
using System.Text.Json;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
using Quartz.Util;
namespace Overflow.SouthernWater;
@ -8,15 +9,17 @@ namespace Overflow.SouthernWater;
public partial class SouthernWaterApi
{
private readonly HttpClient _client;
private readonly ILogger<SouthernWaterApi> _logger;
private static readonly string spillsEndpoint = "Spills/GetHistoricSpills";
private string baseUrl;
private string apiKey;
public SouthernWaterApi(HttpClient client)
public SouthernWaterApi(HttpClient client, ILogger<SouthernWaterApi> logger)
{
_client = client;
_logger = logger;
}
[GeneratedRegex(@".*const APIURL = '(?<APIURL>.*)'.*\n.*const APIGWKEY = '(?<APIKEY>.*)'.*", RegexOptions.IgnoreCase)]
@ -24,42 +27,58 @@ public partial class SouthernWaterApi
public async Task LoadApiUrl()
{
var request = new HttpRequestMessage
var success = false;
while (!success)
{
RequestUri = new Uri("https://www.southernwater.co.uk/scripts/beachbuoyhistoricspillstable.js"),
Method = HttpMethod.Get,
Headers =
try
{
{"Accept", "*/*"},
{"Accept-Language", "en-GB,en;q=0.5"},
// {"Accept-Encoding", "gzip, deflate, br, zstd"},
{"Cache-Control", "no-cache"},
{"Connection", "keep-alive"},
{"DNT", "1"},
{"User-Agent", "Mozilla/5.0 (Windows NT 10.0; rv:126.0) Gecko/20100101 Firefox/126.0"},
{"Upgrade-Insecure-Requests", "1"},
{"Referer", "https://www.southernwater.co.uk/our-region/clean-rivers-and-seas-task-force/beachbuoy-historic-release-table/"}
var request = new HttpRequestMessage
{
RequestUri = new Uri("https://www.southernwater.co.uk/scripts/beachbuoyhistoricspillstable.js"),
Method = HttpMethod.Get,
Headers =
{
{"Accept", "*/*"},
{"Accept-Language", "en-GB,en;q=0.5"},
// {"Accept-Encoding", "gzip, deflate, br, zstd"},
{"Cache-Control", "no-cache"},
{"Connection", "keep-alive"},
{"DNT", "1"},
{"User-Agent", "Mozilla/5.0 (Windows NT 10.0; rv:126.0) Gecko/20100101 Firefox/126.0"},
{"Upgrade-Insecure-Requests", "1"},
{"Referer", "https://www.southernwater.co.uk/our-region/clean-rivers-and-seas-task-force/beachbuoy-historic-release-table/"}
}
};
var content = await _client.SendAsync(request);
content.EnsureSuccessStatusCode();
success = true;
var contentString = await content.Content.ReadAsStringAsync();
Match m = ApiUrlAndKey().Match(contentString);
var apiUrlFound = m.Groups.TryGetValue("APIURL", out var apiUrl);
var apiKeyFound = m.Groups.TryGetValue("APIKEY", out var apiKey);
if (apiUrlFound)
{
baseUrl = apiUrl.Value;
}
if (apiKeyFound)
{
this.apiKey = apiKey.Value;
}
}
catch (HttpRequestException e)
{
_logger.LogError(e, "HTTP Exception while API details, waiting {} before retrying", Static.Interval);
await Task.Delay(Static.Interval);
}
};
var content = await _client.SendAsync(request);
content.EnsureSuccessStatusCode();
var contentString = await content.Content.ReadAsStringAsync();
Match m = ApiUrlAndKey().Match(contentString);
var apiUrlFound = m.Groups.TryGetValue("APIURL", out var apiUrl);
var apiKeyFound = m.Groups.TryGetValue("APIKEY", out var apiKey);
if (apiUrlFound)
{
baseUrl = apiUrl.Value;
}
if (apiKeyFound)
{
this.apiKey = apiKey.Value;
}
}
@ -67,39 +86,59 @@ public partial class SouthernWaterApi
{
if (baseUrl.IsNullOrWhiteSpace()) await LoadApiUrl();
var request = new HttpRequestMessage()
PagedItems<Spill>? parsedPage = null;
var success = false;
while (!success)
{
RequestUri = new Uri(baseUrl + spillsEndpoint + "?page=" + page),
Method = HttpMethod.Get,
Headers =
try
{
{"Accept", "*/*"},
{"Accept-Language", "en-GB,en;q=0.5"},
// {"Accept-Encoding", "gzip, deflate, br, zstd"},
{"Cache-Control", "no-cache"},
{"Connection", "keep-alive"},
{"DNT", "1"},
{"User-Agent", "Mozilla/5.0 (Windows NT 10.0; rv:126.0) Gecko/20100101 Firefox/126.0"},
{"Upgrade-Insecure-Requests", "1"},
{"Referer", "https://www.southernwater.co.uk/our-region/clean-rivers-and-seas-task-force/beachbuoy-historic-release-table/"},
{"x-Gateway-APIKey", apiKey},
{"X-Requested-With", "XMLHttpRequest"},
var request = new HttpRequestMessage()
{
RequestUri = new Uri(baseUrl + spillsEndpoint + "?page=" + page),
Method = HttpMethod.Get,
Headers =
{
{ "Accept", "*/*" },
{ "Accept-Language", "en-GB,en;q=0.5" },
// {"Accept-Encoding", "gzip, deflate, br, zstd"},
{ "Cache-Control", "no-cache" },
{ "Connection", "keep-alive" },
{ "DNT", "1" },
{ "User-Agent", "Mozilla/5.0 (Windows NT 10.0; rv:126.0) Gecko/20100101 Firefox/126.0" },
{ "Upgrade-Insecure-Requests", "1" },
{
"Referer",
"https://www.southernwater.co.uk/our-region/clean-rivers-and-seas-task-force/beachbuoy-historic-release-table/"
},
{ "x-Gateway-APIKey", apiKey },
{ "X-Requested-With", "XMLHttpRequest" },
}
};
var content = await _client.SendAsync(request);
content.EnsureSuccessStatusCode();
success = true;
parsedPage = (PagedItems<Spill>?)await content.Content.ReadFromJsonAsync(typeof(PagedItems<Spill>),
jsonSerialiser ?? new JsonSerialiser());
if (parsedPage is not null)
{
parsedPage.items.ForEach(x =>
{
x.eventStart = x.eventStart.ToUniversalTime();
x.eventStop = x.eventStop.ToUniversalTime();
});
}
}
};
var content = await _client.SendAsync(request);
content.EnsureSuccessStatusCode();
var parsedPage = (PagedItems<Spill>?) await content.Content.ReadFromJsonAsync(typeof(PagedItems<Spill>), jsonSerialiser ?? new JsonSerialiser());
if (parsedPage is not null)
{
parsedPage.items.ForEach(x =>
catch (HttpRequestException e)
{
x.eventStart = x.eventStart.ToUniversalTime();
x.eventStop = x.eventStop.ToUniversalTime();
});
_logger.LogError(e, "HTTP Exception while loading page [{}], waiting {} before retrying", page, Static.Interval);
await Task.Delay(Static.Interval);
}
}
return parsedPage;