fix hazard history duplication for long multi-day hazard events

This commit is contained in:
mrkmntal 2026-04-17 09:40:40 -04:00
commit 9aae190f74

View file

@ -50,13 +50,77 @@ const saveHistory = async (history) => {
}
};
const isCoordinateLocationKey = (value) => /^-?\d+(?:\.\d+)?,-?\d+(?:\.\d+)?$/.test(value ?? '');
/**
* Generate a stable key for a hazard entry
* @param {string} location - Formatted location string
* @param {string} hazardId - Hazard ID
* @returns {string} Stable key
* Generate a stable identity for a hazard entry.
* This intentionally ignores upstream alert ids so alert revisions
* continue updating the same logical history row.
* @param {string} locationKey - Stable location key
* @param {string} hazardType - Hazard/event name
* @param {string} source - Hazard source
* @returns {string} Stable identity key
*/
const generateKey = (location, hazardId) => `${location}::${hazardId}`;
const generateKey = (locationKey, hazardType, source) => `${locationKey}::${hazardType}::${source}`;
const normalizeTimestamp = (value, fallback) => {
const date = new Date(value);
return Number.isNaN(date.getTime()) ? fallback : date.toISOString();
};
const isSameLogicalHazard = (left, right) => left.location === right.location
&& left.hazardType === right.hazardType
&& left.source === right.source;
const mergeEntries = (existing, incoming) => {
const existingEncountered = normalizeTimestamp(existing.encounteredAt, incoming.encounteredAt);
const incomingEncountered = normalizeTimestamp(incoming.encounteredAt, existing.encounteredAt);
const existingLastSeen = normalizeTimestamp(existing.lastSeenAt, incoming.lastSeenAt);
const incomingLastSeen = normalizeTimestamp(incoming.lastSeenAt, existing.lastSeenAt);
const keepIncomingLocationKey = isCoordinateLocationKey(incoming.locationKey) && !isCoordinateLocationKey(existing.locationKey);
const latestHazardId = new Date(incomingLastSeen) >= new Date(existingLastSeen)
? (incoming.latestHazardId ?? existing.latestHazardId)
: (existing.latestHazardId ?? incoming.latestHazardId);
return {
...existing,
location: keepIncomingLocationKey ? incoming.location : (existing.location || incoming.location),
locationKey: keepIncomingLocationKey ? incoming.locationKey : (existing.locationKey || incoming.locationKey),
key: keepIncomingLocationKey ? incoming.key : existing.key,
encounteredAt: new Date(existingEncountered) <= new Date(incomingEncountered) ? existingEncountered : incomingEncountered,
lastSeenAt: new Date(existingLastSeen) >= new Date(incomingLastSeen) ? existingLastSeen : incomingLastSeen,
ongoing: Boolean(existing.ongoing || incoming.ongoing),
severity: incoming.severity || existing.severity,
source: incoming.source || existing.source,
latestHazardId,
};
};
const normalizeHistory = (history = []) => {
const normalized = [];
for (const rawEntry of history) {
if (!rawEntry?.hazardType || !rawEntry?.source) continue;
const locationKey = rawEntry.locationKey || rawEntry.location;
const entry = {
...rawEntry,
locationKey,
key: generateKey(locationKey, rawEntry.hazardType, rawEntry.source),
encounteredAt: normalizeTimestamp(rawEntry.encounteredAt, new Date().toISOString()),
lastSeenAt: normalizeTimestamp(rawEntry.lastSeenAt ?? rawEntry.encounteredAt, new Date().toISOString()),
latestHazardId: rawEntry.latestHazardId ?? rawEntry.hazardId ?? rawEntry.id ?? rawEntry.key,
};
const existingIndex = normalized.findIndex((candidate) => candidate.key === entry.key || isSameLogicalHazard(candidate, entry));
if (existingIndex >= 0) {
normalized[existingIndex] = mergeEntries(normalized[existingIndex], entry);
} else {
normalized.push(entry);
}
}
return normalized;
};
/**
* Format location label from weather parameters
@ -92,23 +156,17 @@ const updateHistory = async (payload) => {
const { location, locationKey, hazards = [] } = payload;
// Load existing history
let history = await loadHistory();
let history = normalizeHistory(await loadHistory());
const now = new Date().toISOString();
// Use locationKey for matching if provided, fall back to location for backward compatibility
const matchKey = locationKey || location;
// Create a set of active hazard keys for this location
const activeKeys = new Set();
hazards.forEach((hazard) => {
const key = generateKey(matchKey, hazard.id);
activeKeys.add(key);
});
// Create a set of active hazard identities for this location
const activeKeys = new Set(hazards.map((hazard) => generateKey(matchKey, hazard.hazardType, hazard.source)));
// Mark previously ongoing hazards for this location as ended if no longer active
history = history.map((entry) => {
// Only process entries for this location
// Use locationKey for matching if available, fall back to location for backward compatibility
const entryMatchKey = entry.locationKey || entry.location;
if (entryMatchKey !== matchKey) return entry;
@ -125,7 +183,7 @@ const updateHistory = async (payload) => {
// Add or update active hazards
hazards.forEach((hazard) => {
const key = generateKey(matchKey, hazard.id);
const key = generateKey(matchKey, hazard.hazardType, hazard.source);
const existingIndex = history.findIndex((entry) => entry.key === key);
if (existingIndex >= 0) {
@ -136,6 +194,7 @@ const updateHistory = async (payload) => {
ongoing: true,
// Update severity if it changed
severity: hazard.severity || history[existingIndex].severity,
latestHazardId: hazard.id,
};
} else {
// Create new entry
@ -149,9 +208,12 @@ const updateHistory = async (payload) => {
ongoing: true,
severity: hazard.severity,
source: hazard.source,
latestHazardId: hazard.id,
});
}
});
history = normalizeHistory(history);
// Sort by lastSeenAt descending (newest first)
history.sort((a, b) => new Date(b.lastSeenAt) - new Date(a.lastSeenAt));
@ -172,7 +234,7 @@ const updateHistory = async (payload) => {
* @returns {Array} Current history entries
*/
const getHistory = async () => {
const history = await loadHistory();
const history = normalizeHistory(await loadHistory());
// Ensure sorted by lastSeenAt descending
return history.sort((a, b) => new Date(b.lastSeenAt) - new Date(a.lastSeenAt));
};