fix hazard history duplication for long multi-day hazard events
This commit is contained in:
parent
dac15405fa
commit
9aae190f74
1 changed files with 78 additions and 16 deletions
|
|
@ -50,13 +50,77 @@ const saveHistory = async (history) => {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const isCoordinateLocationKey = (value) => /^-?\d+(?:\.\d+)?,-?\d+(?:\.\d+)?$/.test(value ?? '');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate a stable key for a hazard entry
|
* Generate a stable identity for a hazard entry.
|
||||||
* @param {string} location - Formatted location string
|
* This intentionally ignores upstream alert ids so alert revisions
|
||||||
* @param {string} hazardId - Hazard ID
|
* continue updating the same logical history row.
|
||||||
* @returns {string} Stable key
|
* @param {string} locationKey - Stable location key
|
||||||
|
* @param {string} hazardType - Hazard/event name
|
||||||
|
* @param {string} source - Hazard source
|
||||||
|
* @returns {string} Stable identity key
|
||||||
*/
|
*/
|
||||||
const generateKey = (location, hazardId) => `${location}::${hazardId}`;
|
const generateKey = (locationKey, hazardType, source) => `${locationKey}::${hazardType}::${source}`;
|
||||||
|
|
||||||
|
const normalizeTimestamp = (value, fallback) => {
|
||||||
|
const date = new Date(value);
|
||||||
|
return Number.isNaN(date.getTime()) ? fallback : date.toISOString();
|
||||||
|
};
|
||||||
|
|
||||||
|
const isSameLogicalHazard = (left, right) => left.location === right.location
|
||||||
|
&& left.hazardType === right.hazardType
|
||||||
|
&& left.source === right.source;
|
||||||
|
|
||||||
|
const mergeEntries = (existing, incoming) => {
|
||||||
|
const existingEncountered = normalizeTimestamp(existing.encounteredAt, incoming.encounteredAt);
|
||||||
|
const incomingEncountered = normalizeTimestamp(incoming.encounteredAt, existing.encounteredAt);
|
||||||
|
const existingLastSeen = normalizeTimestamp(existing.lastSeenAt, incoming.lastSeenAt);
|
||||||
|
const incomingLastSeen = normalizeTimestamp(incoming.lastSeenAt, existing.lastSeenAt);
|
||||||
|
const keepIncomingLocationKey = isCoordinateLocationKey(incoming.locationKey) && !isCoordinateLocationKey(existing.locationKey);
|
||||||
|
const latestHazardId = new Date(incomingLastSeen) >= new Date(existingLastSeen)
|
||||||
|
? (incoming.latestHazardId ?? existing.latestHazardId)
|
||||||
|
: (existing.latestHazardId ?? incoming.latestHazardId);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...existing,
|
||||||
|
location: keepIncomingLocationKey ? incoming.location : (existing.location || incoming.location),
|
||||||
|
locationKey: keepIncomingLocationKey ? incoming.locationKey : (existing.locationKey || incoming.locationKey),
|
||||||
|
key: keepIncomingLocationKey ? incoming.key : existing.key,
|
||||||
|
encounteredAt: new Date(existingEncountered) <= new Date(incomingEncountered) ? existingEncountered : incomingEncountered,
|
||||||
|
lastSeenAt: new Date(existingLastSeen) >= new Date(incomingLastSeen) ? existingLastSeen : incomingLastSeen,
|
||||||
|
ongoing: Boolean(existing.ongoing || incoming.ongoing),
|
||||||
|
severity: incoming.severity || existing.severity,
|
||||||
|
source: incoming.source || existing.source,
|
||||||
|
latestHazardId,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const normalizeHistory = (history = []) => {
|
||||||
|
const normalized = [];
|
||||||
|
|
||||||
|
for (const rawEntry of history) {
|
||||||
|
if (!rawEntry?.hazardType || !rawEntry?.source) continue;
|
||||||
|
const locationKey = rawEntry.locationKey || rawEntry.location;
|
||||||
|
const entry = {
|
||||||
|
...rawEntry,
|
||||||
|
locationKey,
|
||||||
|
key: generateKey(locationKey, rawEntry.hazardType, rawEntry.source),
|
||||||
|
encounteredAt: normalizeTimestamp(rawEntry.encounteredAt, new Date().toISOString()),
|
||||||
|
lastSeenAt: normalizeTimestamp(rawEntry.lastSeenAt ?? rawEntry.encounteredAt, new Date().toISOString()),
|
||||||
|
latestHazardId: rawEntry.latestHazardId ?? rawEntry.hazardId ?? rawEntry.id ?? rawEntry.key,
|
||||||
|
};
|
||||||
|
|
||||||
|
const existingIndex = normalized.findIndex((candidate) => candidate.key === entry.key || isSameLogicalHazard(candidate, entry));
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
normalized[existingIndex] = mergeEntries(normalized[existingIndex], entry);
|
||||||
|
} else {
|
||||||
|
normalized.push(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalized;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format location label from weather parameters
|
* Format location label from weather parameters
|
||||||
|
|
@ -92,23 +156,17 @@ const updateHistory = async (payload) => {
|
||||||
const { location, locationKey, hazards = [] } = payload;
|
const { location, locationKey, hazards = [] } = payload;
|
||||||
|
|
||||||
// Load existing history
|
// Load existing history
|
||||||
let history = await loadHistory();
|
let history = normalizeHistory(await loadHistory());
|
||||||
const now = new Date().toISOString();
|
const now = new Date().toISOString();
|
||||||
|
|
||||||
// Use locationKey for matching if provided, fall back to location for backward compatibility
|
// Use locationKey for matching if provided, fall back to location for backward compatibility
|
||||||
const matchKey = locationKey || location;
|
const matchKey = locationKey || location;
|
||||||
|
|
||||||
// Create a set of active hazard keys for this location
|
// Create a set of active hazard identities for this location
|
||||||
const activeKeys = new Set();
|
const activeKeys = new Set(hazards.map((hazard) => generateKey(matchKey, hazard.hazardType, hazard.source)));
|
||||||
hazards.forEach((hazard) => {
|
|
||||||
const key = generateKey(matchKey, hazard.id);
|
|
||||||
activeKeys.add(key);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Mark previously ongoing hazards for this location as ended if no longer active
|
// Mark previously ongoing hazards for this location as ended if no longer active
|
||||||
history = history.map((entry) => {
|
history = history.map((entry) => {
|
||||||
// Only process entries for this location
|
|
||||||
// Use locationKey for matching if available, fall back to location for backward compatibility
|
|
||||||
const entryMatchKey = entry.locationKey || entry.location;
|
const entryMatchKey = entry.locationKey || entry.location;
|
||||||
if (entryMatchKey !== matchKey) return entry;
|
if (entryMatchKey !== matchKey) return entry;
|
||||||
|
|
||||||
|
|
@ -125,7 +183,7 @@ const updateHistory = async (payload) => {
|
||||||
|
|
||||||
// Add or update active hazards
|
// Add or update active hazards
|
||||||
hazards.forEach((hazard) => {
|
hazards.forEach((hazard) => {
|
||||||
const key = generateKey(matchKey, hazard.id);
|
const key = generateKey(matchKey, hazard.hazardType, hazard.source);
|
||||||
const existingIndex = history.findIndex((entry) => entry.key === key);
|
const existingIndex = history.findIndex((entry) => entry.key === key);
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
|
|
@ -136,6 +194,7 @@ const updateHistory = async (payload) => {
|
||||||
ongoing: true,
|
ongoing: true,
|
||||||
// Update severity if it changed
|
// Update severity if it changed
|
||||||
severity: hazard.severity || history[existingIndex].severity,
|
severity: hazard.severity || history[existingIndex].severity,
|
||||||
|
latestHazardId: hazard.id,
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
// Create new entry
|
// Create new entry
|
||||||
|
|
@ -149,9 +208,12 @@ const updateHistory = async (payload) => {
|
||||||
ongoing: true,
|
ongoing: true,
|
||||||
severity: hazard.severity,
|
severity: hazard.severity,
|
||||||
source: hazard.source,
|
source: hazard.source,
|
||||||
|
latestHazardId: hazard.id,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
history = normalizeHistory(history);
|
||||||
|
|
||||||
// Sort by lastSeenAt descending (newest first)
|
// Sort by lastSeenAt descending (newest first)
|
||||||
history.sort((a, b) => new Date(b.lastSeenAt) - new Date(a.lastSeenAt));
|
history.sort((a, b) => new Date(b.lastSeenAt) - new Date(a.lastSeenAt));
|
||||||
|
|
@ -172,7 +234,7 @@ const updateHistory = async (payload) => {
|
||||||
* @returns {Array} Current history entries
|
* @returns {Array} Current history entries
|
||||||
*/
|
*/
|
||||||
const getHistory = async () => {
|
const getHistory = async () => {
|
||||||
const history = await loadHistory();
|
const history = normalizeHistory(await loadHistory());
|
||||||
// Ensure sorted by lastSeenAt descending
|
// Ensure sorted by lastSeenAt descending
|
||||||
return history.sort((a, b) => new Date(b.lastSeenAt) - new Date(a.lastSeenAt));
|
return history.sort((a, b) => new Date(b.lastSeenAt) - new Date(a.lastSeenAt));
|
||||||
};
|
};
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue