fix: average swap time in analytics page

main
Kirubakaran 2025-09-16 02:03:40 +05:30
parent 7f0a93a80d
commit 219fbd92b4
4 changed files with 97 additions and 19 deletions

View File

@ -108,6 +108,11 @@ def on_message_handler(station_id, topic, payload):
'data': decoded_data 'data': decoded_data
}, room=station_id) }, room=station_id)
# If the message is an EVENT or PERIODIC data, it could affect analytics.
if message_type in ['EVENTS', 'PERIODIC']:
print(f"Analytics-related data received ({message_type}). Notifying clients to refresh.")
socketio.emit('analytics_updated', room=station_id)
# --- (WebSocket and API routes remain the same) --- # --- (WebSocket and API routes remain the same) ---
@socketio.on('connect') @socketio.on('connect')
def handle_connect(): def handle_connect():
@ -367,7 +372,7 @@ def get_analytics_data():
MqttLog.station_id == station_id, MqttLog.station_id == station_id,
MqttLog.topic_type == 'EVENTS', MqttLog.topic_type == 'EVENTS',
MqttLog.timestamp.between(start_datetime, end_datetime) MqttLog.timestamp.between(start_datetime, end_datetime)
).all() ).order_by(MqttLog.timestamp.asc()).all() # <-- ADD THIS SORTING
except Exception as e: except Exception as e:
return jsonify({"message": f"Could not query event logs: {e}"}), 500 return jsonify({"message": f"Could not query event logs: {e}"}), 500
@ -381,35 +386,83 @@ def get_analytics_data():
except Exception as e: except Exception as e:
return jsonify({"message": f"Could not query periodic logs: {e}"}), 500 return jsonify({"message": f"Could not query periodic logs: {e}"}), 500
# 3. Process EVENT logs for swap KPIs and charts # # 3. Process EVENT logs for swap KPIs and charts
total_swaps, completed_swaps, aborted_swaps = 0, 0, 0 # total_swaps, completed_swaps, aborted_swaps = 0, 0, 0
completed_swap_times, daily_completed, daily_aborted, hourly_swaps, abort_reason_counts = [], {}, {}, [0] * 24, {} # completed_swap_times, daily_completed, daily_aborted, hourly_swaps, abort_reason_counts = [], {}, {}, [0] * 24, {}
slot_utilization_counts = {i: 0 for i in range(1, 10)} # For the heatmap # slot_utilization_counts = {i: 0 for i in range(1, 10)} # For the heatmap
# for log in event_logs:
# # (This processing logic is unchanged)
# event_type = log.payload.get('eventType')
# log_date = log.timestamp.date()
# log_hour = log.timestamp.hour
# if event_type == 'EVENT_SWAP_START':
# total_swaps += 1
# hourly_swaps[log_hour] += 1
# elif event_type == 'EVENT_SWAP_ENDED':
# completed_swaps += 1
# daily_completed[log_date] = daily_completed.get(log_date, 0) + 1
# swap_time = log.payload.get('eventData', {}).get('swapTime')
# if swap_time is not None:
# completed_swap_times.append(swap_time)
# elif event_type == 'EVENT_SWAP_ABORTED':
# aborted_swaps += 1
# daily_aborted[log_date] = daily_aborted.get(log_date, 0) + 1
# reason = log.payload.get('eventData', {}).get('swapAbortReason', 'ABORT_UNKNOWN')
# abort_reason_counts[reason] = abort_reason_counts.get(reason, 0) + 1
# elif event_type == 'EVENT_BATTERY_EXIT':
# slot_id = log.payload.get('eventData', {}).get('slotId')
# if slot_id and slot_id in slot_utilization_counts:
# slot_utilization_counts[slot_id] += 1
# --- 3. REVISED: Process logs to calculate KPIs and chart data ---
swap_starts = {} # Dictionary to store start times by sessionId
completed_swap_times = []
total_swaps, completed_swaps, aborted_swaps = 0, 0, 0
daily_completed, daily_aborted, hourly_swaps, abort_reason_counts = {}, {}, [0] * 24, {}
slot_utilization_counts = {i: 0 for i in range(1, 10)}
print("\n--- STARTING SWAP ANALYSIS ---") # Add this line
for log in event_logs: for log in event_logs:
# (This processing logic is unchanged)
event_type = log.payload.get('eventType') event_type = log.payload.get('eventType')
session_id = log.payload.get('sessionId')
log_date = log.timestamp.date() log_date = log.timestamp.date()
log_hour = log.timestamp.hour log_hour = log.timestamp.hour
if event_type == 'EVENT_SWAP_START': if event_type == 'EVENT_SWAP_START':
total_swaps += 1 total_swaps += 1
hourly_swaps[log_hour] += 1 hourly_swaps[log_hour] += 1
if session_id:
swap_starts[session_id] = log.timestamp # Store start time
print(f"Found START for session '{session_id}' at {log.timestamp}") # Add this line
elif event_type == 'EVENT_SWAP_ENDED': elif event_type == 'EVENT_SWAP_ENDED':
completed_swaps += 1 completed_swaps += 1
daily_completed[log_date] = daily_completed.get(log_date, 0) + 1 daily_completed[log_date] = daily_completed.get(log_date, 0) + 1
swap_time = log.payload.get('eventData', {}).get('swapTime') if session_id and session_id in swap_starts:
if swap_time is not None: # Calculate duration if we have a matching start event
completed_swap_times.append(swap_time) duration = (log.timestamp - swap_starts[session_id]).total_seconds()
completed_swap_times.append(duration)
print(f"Found MATCHING END for session '{session_id}'. Duration: {duration}s") # Add this line
del swap_starts[session_id] # Remove to prevent reuse
else:
print(f"Found END event but could not find matching START for session '{session_id}'") # Add this line
elif event_type == 'EVENT_SWAP_ABORTED': elif event_type == 'EVENT_SWAP_ABORTED':
aborted_swaps += 1 aborted_swaps += 1
daily_aborted[log_date] = daily_aborted.get(log_date, 0) + 1 daily_aborted[log_date] = daily_aborted.get(log_date, 0) + 1
reason = log.payload.get('eventData', {}).get('swapAbortReason', 'ABORT_UNKNOWN') reason = log.payload.get('eventData', {}).get('swapAbortReason', 'ABORT_UNKNOWN')
abort_reason_counts[reason] = abort_reason_counts.get(reason, 0) + 1 abort_reason_counts[reason] = abort_reason_counts.get(reason, 0) + 1
elif event_type == 'EVENT_BATTERY_EXIT': elif event_type == 'EVENT_BATTERY_EXIT':
slot_id = log.payload.get('eventData', {}).get('slotId') slot_id = log.payload.get('eventData', {}).get('slotId')
if slot_id and slot_id in slot_utilization_counts: if slot_id and slot_id in slot_utilization_counts:
slot_utilization_counts[slot_id] += 1 slot_utilization_counts[slot_id] += 1
print(f"--- ANALYSIS COMPLETE ---") # Add this line
print(f"Calculated Durations: {completed_swap_times}") # Add this line
# --- NEW: 4. Calculate Station Uptime --- # --- NEW: 4. Calculate Station Uptime ---
total_period_seconds = (end_datetime - start_datetime).total_seconds() total_period_seconds = (end_datetime - start_datetime).total_seconds()
total_downtime_seconds = 0 total_downtime_seconds = 0
@ -437,9 +490,28 @@ def get_analytics_data():
station_uptime = 100 * (1 - (total_downtime_seconds / total_period_seconds)) station_uptime = 100 * (1 - (total_downtime_seconds / total_period_seconds))
station_uptime = max(0, min(100, station_uptime)) # Ensure value is between 0 and 100 station_uptime = max(0, min(100, station_uptime)) # Ensure value is between 0 and 100
# if not periodic_logs:
# total_downtime_seconds = total_period_seconds
# else:
# first_gap = (periodic_logs[0].timestamp - start_datetime).total_seconds()
# if first_gap > MAX_ONLINE_GAP_SECONDS:
# total_downtime_seconds += first_gap
# for i in range(1, len(periodic_logs)):
# gap = (periodic_logs[i].timestamp - periodic_logs[i-1].timestamp).total_seconds()
# if gap > MAX_ONLINE_GAP_SECONDS:
# total_downtime_seconds += gap
# last_gap = (end_datetime - periodic_logs[-1].timestamp).total_seconds()
# if last_gap > MAX_ONLINE_GAP_SECONDS:
# total_downtime_seconds += last_gap
# station_uptime = 100 * (1 - (total_downtime_seconds / total_period_seconds))
# station_uptime = max(0, min(100, station_uptime))
# 5. Prepare final data structures (KPI section is now updated) # 5. Prepare final data structures (KPI section is now updated)
avg_swap_time_seconds = sum(completed_swap_times) / len(completed_swap_times) if completed_swap_times else 0 avg_swap_time_seconds = sum(completed_swap_times) / len(completed_swap_times) if completed_swap_times else 0
# avg_swap_time_seconds = sum(completed_swap_times) / len(completed_swap_times) if completed_swap_times else None
kpi_data = { kpi_data = {
"total_swaps": total_swaps, "completed_swaps": completed_swaps, "total_swaps": total_swaps, "completed_swaps": completed_swaps,
"aborted_swaps": aborted_swaps, "avg_swap_time_seconds": avg_swap_time_seconds, "aborted_swaps": aborted_swaps, "avg_swap_time_seconds": avg_swap_time_seconds,

View File

@ -205,7 +205,10 @@
</div> </div>
<div class="tile"> <div class="tile">
<p class="text-xs text-gray-400">Avg. Swap Time</p> <p class="text-xs text-gray-400">Avg. Swap Time</p>
<p id="avg-swap-time" class="text-3xl font-extrabold">... <span class="text-lg font-bold text-gray-300">min</span></p> <p id="avg-swap-time" class="text-3xl font-extrabold">
<span id="avg-swap-time-value">...</span>
<span class="text-lg font-bold text-gray-300">min</span>
</p>
</div> </div>
<div class="tile"> <div class="tile">
<p class="text-xs text-gray-400">Station Uptime</p> <p class="text-xs text-gray-400">Station Uptime</p>
@ -225,17 +228,17 @@
<canvas id="hourlyDistributionChart"></canvas> <canvas id="hourlyDistributionChart"></canvas>
</div> </div>
<div class="glass p-4 h-96">
<h3 class="font-extrabold">Swap Abort Reasons</h3>
<canvas id="abortReasonsChart"></canvas>
</div>
<div class="glass p-4 h-96"> <div class="glass p-4 h-96">
<h3 class="font-extrabold mb-4">Slot Utilization Heatmap</h3> <h3 class="font-extrabold mb-4">Slot Utilization Heatmap</h3>
<div id="heatmap-grid" class="grid grid-cols-3 gap-4 h-[calc(100%-2rem)]"> <div id="heatmap-grid" class="grid grid-cols-3 gap-4 h-[calc(100%-2rem)]">
</div> </div>
</div> </div>
<div class="glass p-4 h-96">
<h3 class="font-extrabold">Swap Abort Reasons</h3>
<canvas id="abortReasonsChart"></canvas>
</div>
<!-- <div class="glass p-4 h-96 flex items-center justify-center"> <!-- <div class="glass p-4 h-96 flex items-center justify-center">
<p class="text-slate-500">Future Chart Area</p> <p class="text-slate-500">Future Chart Area</p>
</div> --> </div> -->

View File

@ -148,6 +148,8 @@ document.addEventListener('DOMContentLoaded', () => {
const heatmapGridEl = document.getElementById('heatmap-grid'); const heatmapGridEl = document.getElementById('heatmap-grid');
const avgSwapTimeValueEl = document.getElementById('avg-swap-time-value');
//status elements //status elements
const stationNameEl = document.getElementById('station-name'); const stationNameEl = document.getElementById('station-name');
const stationLocationEl = document.getElementById('station-location'); const stationLocationEl = document.getElementById('station-location');
@ -191,7 +193,7 @@ document.addEventListener('DOMContentLoaded', () => {
successRateEl.textContent = '(...%)'; successRateEl.textContent = '(...%)';
abortedSwapsEl.textContent = '...'; abortedSwapsEl.textContent = '...';
abortRateEl.textContent = '(...%)'; abortRateEl.textContent = '(...%)';
avgSwapTimeEl.innerHTML = '... <span class="text-lg font-bold text-gray-300">min</span>'; avgSwapTimeValueEl.textContent = '...';
stationUptimeEl.textContent = '... %'; stationUptimeEl.textContent = '... %';
return; return;
} }
@ -210,8 +212,9 @@ document.addEventListener('DOMContentLoaded', () => {
const abortRate = total > 0 ? ((aborted / total) * 100).toFixed(1) : 0; const abortRate = total > 0 ? ((aborted / total) * 100).toFixed(1) : 0;
abortRateEl.textContent = `(${abortRate}%)`; abortRateEl.textContent = `(${abortRate}%)`;
const avgTimeInMinutes = data.avg_swap_time_seconds ? (data.avg_swap_time_seconds / 60).toFixed(1) : '—'; const avgTimeInMinutes = data.avg_swap_time_seconds != null ? (data.avg_swap_time_seconds / 60).toFixed(1) : '—';
avgSwapTimeEl.innerHTML = `${avgTimeInMinutes} <span class="text-lg font-bold text-gray-300">min</span>`;
avgSwapTimeValueEl.textContent = avgTimeInMinutes;
stationUptimeEl.textContent = `${data.station_uptime ?? '...'} %`; stationUptimeEl.textContent = `${data.station_uptime ?? '...'} %`;