diff --git a/typescript/frontend-marios2/src/content/dashboards/Installations/fetchData.tsx b/typescript/frontend-marios2/src/content/dashboards/Installations/fetchData.tsx index 00db2c09e..6fb612b6e 100644 --- a/typescript/frontend-marios2/src/content/dashboards/Installations/fetchData.tsx +++ b/typescript/frontend-marios2/src/content/dashboards/Installations/fetchData.tsx @@ -5,6 +5,66 @@ import { S3Access } from 'src/dataCache/S3/S3Access'; import { JSONRecordData, parseChunkJson } from '../Log/graph.util'; import JSZip from 'jszip'; +// Find the latest chunk file in S3 using ListObjects (single HTTP request) +// Returns the parsed chunk data or FetchResult.notAvailable +export const fetchLatestDataJson = ( + s3Credentials?: I_S3Credentials, + maxAgeSeconds: number = 400 +): Promise>> => { + if (!s3Credentials || !s3Credentials.s3Bucket) { + return Promise.resolve(FetchResult.notAvailable); + } + + const s3Access = new S3Access( + s3Credentials.s3Bucket, + s3Credentials.s3Region, + s3Credentials.s3Provider, + s3Credentials.s3Key, + s3Credentials.s3Secret + ); + + // Use marker to skip files older than maxAgeSeconds + const oldestTimestamp = Math.floor(Date.now() / 1000) - maxAgeSeconds; + const marker = `${oldestTimestamp}.json`; + + return s3Access + .list(marker, 50) + .then(async (r) => { + if (r.status !== 200) { + return Promise.resolve(FetchResult.notAvailable); + } + const xml = await r.text(); + const parser = new DOMParser(); + const doc = parser.parseFromString(xml, 'application/xml'); + const keys = Array.from(doc.getElementsByTagName('Key')) + .map((el) => el.textContent) + .filter((key) => key && /^\d+\.json$/.test(key)) + .sort((a, b) => Number(b.replace('.json', '')) - Number(a.replace('.json', ''))); + + if (keys.length === 0) { + return Promise.resolve(FetchResult.notAvailable); + } + + // Fetch the most recent chunk file + const latestKey = keys[0]; + const res = await s3Access.get(latestKey); + if (res.status !== 200) { + return Promise.resolve(FetchResult.notAvailable); + } + + const jsontext = await res.text(); + const byteArray = Uint8Array.from(atob(jsontext), (c) => + c.charCodeAt(0) + ); + const zip = await JSZip.loadAsync(byteArray); + const jsonContent = await zip.file('data.json').async('text'); + return parseChunkJson(jsonContent); + }) + .catch(() => { + return Promise.resolve(FetchResult.tryLater); + }); +}; + export const fetchDataJson = ( timestamp: UnixTime, s3Credentials?: I_S3Credentials, diff --git a/typescript/frontend-marios2/src/content/dashboards/SodiohomeInstallations/Installation.tsx b/typescript/frontend-marios2/src/content/dashboards/SodiohomeInstallations/Installation.tsx index 703098d50..da82a3c54 100644 --- a/typescript/frontend-marios2/src/content/dashboards/SodiohomeInstallations/Installation.tsx +++ b/typescript/frontend-marios2/src/content/dashboards/SodiohomeInstallations/Installation.tsx @@ -111,20 +111,52 @@ function SodioHomeInstallation(props: singleInstallationProps) { return btoa(String.fromCharCode(...combined)); } - const fetchDataPeriodically = async () => { - var timeperiodToSearch = 350; - let res; - let timestampToFetch; + // Probe multiple timestamps in parallel, return first successful result + const probeTimestampBatch = async ( + offsets: number[] + ): Promise<{ res: any; timestamp: UnixTime } | null> => { + const now = UnixTime.now(); + const promises = offsets.map(async (offset) => { + const ts = now.earlier(TimeSpan.fromSeconds(offset)); + const result = await fetchDataJson(ts, s3Credentials, false); + if (result !== FetchResult.notAvailable && result !== FetchResult.tryLater) { + return { res: result, timestamp: ts }; + } + return null; + }); - for (var i = 0; i < timeperiodToSearch; i += 30) { + const results = await Promise.all(promises); + // Return the most recent hit (smallest offset = first in array) + return results.find((r) => r !== null) || null; + }; + + const fetchDataPeriodically = async () => { + let res; + let timestampToFetch: UnixTime; + + // Search backward in parallel batches of 10 timestamps (2s apart) + // Each batch covers 20 seconds, so 20 batches cover 400 seconds + const batchSize = 10; + const step = 2; // 2-second steps to match even-rounding granularity + const maxAge = 400; + let found = false; + + for (let batchStart = 0; batchStart < maxAge; batchStart += batchSize * step) { if (!continueFetching.current) { return false; } - timestampToFetch = UnixTime.now().earlier(TimeSpan.fromSeconds(i)); + const offsets = []; + for (let j = 0; j < batchSize; j++) { + const offset = batchStart + j * step; + if (offset < maxAge) offsets.push(offset); + } try { - res = await fetchDataJson(timestampToFetch, s3Credentials, false); - if (res !== FetchResult.notAvailable && res !== FetchResult.tryLater) { + const hit = await probeTimestampBatch(offsets); + if (hit) { + res = hit.res; + timestampToFetch = hit.timestamp; + found = true; break; } } catch (err) { @@ -133,7 +165,7 @@ function SodioHomeInstallation(props: singleInstallationProps) { } } - if (i >= timeperiodToSearch) { + if (!found) { setConnected(false); setLoading(false); return false; @@ -154,10 +186,12 @@ function SodioHomeInstallation(props: singleInstallationProps) { await timeout(2000); } - timestampToFetch = timestampToFetch.later(TimeSpan.fromSeconds(60)); + // Advance by 150s to find the next chunk (15 records × 10s interval) + timestampToFetch = timestampToFetch.later(TimeSpan.fromSeconds(150)); console.log('NEW TIMESTAMP TO FETCH IS ' + timestampToFetch); - for (i = 0; i < 30; i++) { + let foundNext = false; + for (var i = 0; i < 60; i++) { if (!continueFetching.current) { return false; } @@ -169,6 +203,7 @@ function SodioHomeInstallation(props: singleInstallationProps) { res !== FetchResult.notAvailable && res !== FetchResult.tryLater ) { + foundNext = true; break; } } catch (err) { @@ -177,24 +212,30 @@ function SodioHomeInstallation(props: singleInstallationProps) { } timestampToFetch = timestampToFetch.later(TimeSpan.fromSeconds(1)); } - if (i == 30) { + if (!foundNext) { return false; } } }; const fetchDataForOneTime = async () => { - var timeperiodToSearch = 300; // 5 minutes to cover ~2 upload cycles (150s each) + // Search backward in parallel batches of 10 timestamps (2s apart) + const batchSize = 10; + const step = 2; + const maxAge = 400; let res; - let timestampToFetch; - // Search from NOW backward to find the most recent data - // Step by 50 seconds - data is uploaded every ~150s, so finer steps are wasteful - for (var i = 0; i < timeperiodToSearch; i += 50) { - timestampToFetch = UnixTime.now().earlier(TimeSpan.fromSeconds(i)); + for (let batchStart = 0; batchStart < maxAge; batchStart += batchSize * step) { + const offsets = []; + for (let j = 0; j < batchSize; j++) { + const offset = batchStart + j * step; + if (offset < maxAge) offsets.push(offset); + } + try { - res = await fetchDataJson(timestampToFetch, s3Credentials, false); - if (res !== FetchResult.notAvailable && res !== FetchResult.tryLater) { + const hit = await probeTimestampBatch(offsets); + if (hit) { + res = hit.res; break; } } catch (err) { @@ -203,11 +244,12 @@ function SodioHomeInstallation(props: singleInstallationProps) { } } - if (i >= timeperiodToSearch) { + if (!res) { setConnected(false); setLoading(false); return false; } + setConnected(true); setLoading(false); @@ -215,12 +257,6 @@ function SodioHomeInstallation(props: singleInstallationProps) { const timestamps = Object.keys(res).sort((a, b) => Number(b) - Number(a)); const latestTimestamp = timestamps[0]; setValues(res[latestTimestamp]); - // setValues( - // extractValues({ - // time: UnixTime.fromTicks(parseInt(timestamp, 10)), - // value: res[timestamp] - // }) - // ); return true; }; diff --git a/typescript/frontend-marios2/src/dataCache/S3/S3Access.ts b/typescript/frontend-marios2/src/dataCache/S3/S3Access.ts index 294247187..12a6171b6 100644 --- a/typescript/frontend-marios2/src/dataCache/S3/S3Access.ts +++ b/typescript/frontend-marios2/src/dataCache/S3/S3Access.ts @@ -32,6 +32,22 @@ export class S3Access { } } + public list(marker?: string, maxKeys: number = 50): Promise { + const method = "GET"; + const auth = this.createAuthorizationHeader(method, "", ""); + const params = new URLSearchParams(); + if (marker) params.set("marker", marker); + params.set("max-keys", maxKeys.toString()); + const url = this.url + "/" + this.bucket + "/?" + params.toString(); + const headers = { Host: this.host, Authorization: auth }; + + try { + return fetch(url, { method: method, mode: "cors", headers: headers }); + } catch { + return Promise.reject(); + } + } + private createAuthorizationHeader( method: string, s3Path: string,