docs(deploy): create detailed deployment guide for AWS
Adds a DEPLOYMENT.md file with comprehensive instructions for setting up prerequisites (Docker, RDS database) and deploying the application using Docker Compose. Includes an .env.example file to guide the configuration of necessary environment variables for production.deploy
parent
fa41798552
commit
7ad072ff6e
|
|
@ -0,0 +1,16 @@
|
||||||
|
Git files
|
||||||
|
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
Python specific files
|
||||||
|
|
||||||
|
pycache/
|
||||||
|
*.pyc
|
||||||
|
venv/
|
||||||
|
Node.js dependencies
|
||||||
|
|
||||||
|
frontend/node_modules/
|
||||||
|
Environment files and IDE settings
|
||||||
|
|
||||||
|
.env
|
||||||
|
.vscode/
|
||||||
|
|
@ -0,0 +1,39 @@
|
||||||
|
This is an example configuration file.
|
||||||
|
Copy this to a new file named '.env' and fill in the production values before deploying.
|
||||||
|
--- Database Configuration ---
|
||||||
|
The full connection string for your production PostgreSQL database on AWS RDS or another service.
|
||||||
|
|
||||||
|
DATABASE_URL=postgresql://YOUR_DATABASE_USER:YOUR_DATABASE_PASSWORD@YOUR_DATABASE_HOST:5432/YOUR_DATABASE_NAME
|
||||||
|
--- Application Security ---
|
||||||
|
A long, random, and secret string used for signing sessions.
|
||||||
|
You can generate one using: openssl rand -hex 32
|
||||||
|
|
||||||
|
SECRET_KEY=your_super_secret_and_random_string_here
|
||||||
|
--- CORS Configuration ---
|
||||||
|
The full public URL of the server where this application will be hosted.
|
||||||
|
IMPORTANT: Use http or https as appropriate. Do NOT include a trailing slash.
|
||||||
|
Example for an IP: https://www.google.com/search?q=http://54.123.45.67
|
||||||
|
Example for a domain: [suspicious link removed]
|
||||||
|
|
||||||
|
CORS_ALLOWED_ORIGIN=http://YOUR_SERVER_PUBLIC_IP_OR_DOMAIN
|
||||||
|
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
# --- Flask Application Settings ---
|
||||||
|
# This is a secret key used by Flask for session management.
|
||||||
|
# You can generate a new one with: python -c 'import os; print(os.urandom(24).hex())'
|
||||||
|
SECRET_KEY="80473e17c5707e19252ef3736fba32805be21a9b3e914190"
|
||||||
|
|
||||||
|
# --- PostgreSQL Database Connection ---
|
||||||
|
# Replace with your actual database credentials.
|
||||||
|
# Format: postgresql://<user>:<password>@<host>:<port>/<dbname>
|
||||||
|
DATABASE_URL="postgresql://swap_app_user:2004@localhost:5432/swap_station_db"
|
||||||
|
# DATABASE_URL="postgresql://swap_app_user:Vec%40123@localhost:5432/swap_station_db"
|
||||||
|
|
||||||
|
# --- CORS Configuration ---
|
||||||
|
# The public URL or IP address where this application will be hosted.
|
||||||
|
CORS_ALLOWED_ORIGIN=http://54.123.45.67
|
||||||
|
|
||||||
|
# this is an example for domain
|
||||||
|
CORS_ALLOWED_ORIGIN=https://www.swapstation-analytics.com
|
||||||
|
|
@ -1,59 +1,52 @@
|
||||||
# Python
|
#--- Secrets and Environment ---
|
||||||
__pycache__/
|
# Never commit environment files. They contain secrets and local configuration.
|
||||||
|
# The deployment server will have its own .env file as per the DEPLOYMENT.md guide.
|
||||||
|
|
||||||
|
.env
|
||||||
|
*.env
|
||||||
|
|
||||||
|
# --- Python Dependencies & Virtual Environments ---
|
||||||
|
# Ignore the virtual environment folder.
|
||||||
|
|
||||||
|
venv/
|
||||||
|
pycache/
|
||||||
*.pyc
|
*.pyc
|
||||||
*.pyo
|
*.pyo
|
||||||
*.pyd
|
*.pyd
|
||||||
.env
|
|
||||||
*.env
|
|
||||||
*.sqlite3
|
|
||||||
*.db
|
|
||||||
instance/
|
|
||||||
|
|
||||||
# Flask
|
# --- Frontend Dependencies ---
|
||||||
*.log
|
# Ignore Node.js dependency modules. These should be installed via 'npm install'.
|
||||||
*.pot
|
|
||||||
*.mo
|
|
||||||
|
|
||||||
# VS Code
|
|
||||||
*.code-workspace
|
|
||||||
.vscode/
|
|
||||||
|
|
||||||
# Node.js
|
|
||||||
node_modules/
|
node_modules/
|
||||||
|
|
||||||
|
# Note: It is often best practice to COMMIT 'package-lock.json' to ensure all
|
||||||
|
# developers and build environments use the exact same dependency versions.
|
||||||
|
# If you want to include it, remove the line below.
|
||||||
|
|
||||||
package-lock.json
|
package-lock.json
|
||||||
|
|
||||||
# Frontend build
|
# --- Editor & OS Specific ---
|
||||||
/dist/
|
# Ignore IDE and OS-specific files.
|
||||||
/build/
|
|
||||||
*.map
|
|
||||||
|
|
||||||
# OS
|
.vscode/
|
||||||
|
*.code-workspace
|
||||||
.DS_Store
|
.DS_Store
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
# Jupyter
|
# --- Logs and Temporary Files ---
|
||||||
.ipynb_checkpoints/
|
# Ignore log files and other temporary artifacts.
|
||||||
|
|
||||||
# Misc
|
*.log
|
||||||
|
logs/
|
||||||
*.bak
|
*.bak
|
||||||
*.swp
|
*.swp
|
||||||
*.swo
|
*.swo
|
||||||
|
|
||||||
# Protobuf
|
|
||||||
*.pb2.py
|
|
||||||
*.pb2.pyi
|
|
||||||
|
|
||||||
# Tailwind
|
|
||||||
css/tailwind.css
|
|
||||||
|
|
||||||
# Logs
|
|
||||||
*.log
|
|
||||||
logs/
|
|
||||||
|
|
||||||
# Others
|
|
||||||
*.coverage
|
|
||||||
.coverage
|
|
||||||
|
|
||||||
# Ignore test output
|
|
||||||
*.out
|
|
||||||
*.tmp
|
*.tmp
|
||||||
|
*.coverage
|
||||||
|
|
||||||
|
# --- Database Files ---
|
||||||
|
# Ignore local database files.
|
||||||
|
|
||||||
|
*.sqlite3
|
||||||
|
*.db
|
||||||
|
instance/
|
||||||
|
|
@ -0,0 +1,118 @@
|
||||||
|
Swap Station Web Application - AWS Deployment Guide
|
||||||
|
|
||||||
|
This document provides detailed instructions for deploying the Swap Station web application to a Linux server on AWS.
|
||||||
|
|
||||||
|
The application is containerized using Docker and managed with Docker Compose for ease of deployment and configuration.
|
||||||
|
0. Prerequisite: Setting Up the Production Database (AWS RDS)
|
||||||
|
|
||||||
|
Before deploying the application, a PostgreSQL database must be created. The recommended service for this is Amazon RDS. The person deploying the application will need to perform these steps in the AWS Management Console.
|
||||||
|
Steps to Create the Database:
|
||||||
|
|
||||||
|
Launch an RDS Instance:
|
||||||
|
|
||||||
|
Navigate to the RDS service in the AWS Console.
|
||||||
|
|
||||||
|
Choose to "Create database" with the "Standard create" option.
|
||||||
|
|
||||||
|
Select PostgreSQL as the engine type. Choose a recent version (e.g., PostgreSQL 15 or higher).
|
||||||
|
|
||||||
|
Under "Templates", select the "Free tier" option for testing or a suitable production instance size.
|
||||||
|
|
||||||
|
Under "Settings", create a DB instance identifier (e.g., swapstation-db), a Master username, and a Master password. Securely store these credentials.
|
||||||
|
|
||||||
|
Configure Networking & Security:
|
||||||
|
|
||||||
|
Ensure the RDS instance is launched in the same VPC as your EC2 instance (the server that will run the application).
|
||||||
|
|
||||||
|
In the "Connectivity" section, find the Security Group settings. Create a new security group for the database (e.g., rds-sg).
|
||||||
|
|
||||||
|
After the database is created, you must configure this security group to allow inbound traffic from your application server. Go to the security group's "Inbound rules" and add a rule that allows traffic on the PostgreSQL port (5432) from the security group of your EC2 instance. This is a critical step to allow the application to connect to the database.
|
||||||
|
|
||||||
|
Get the Connection Details:
|
||||||
|
|
||||||
|
Once the database is created and available, select it from the RDS dashboard.
|
||||||
|
|
||||||
|
In the "Connectivity & security" tab, you will find the Endpoint (this is the database host) and the Port.
|
||||||
|
|
||||||
|
Constructing the DATABASE_URL:
|
||||||
|
|
||||||
|
Use the details from the steps above to build the full connection string. The format is:
|
||||||
|
postgresql://<user>:<password>@<host>:<port>/<dbname>
|
||||||
|
|
||||||
|
<user>: The Master username you created.
|
||||||
|
|
||||||
|
<password>: The Master password you created.
|
||||||
|
|
||||||
|
<host>: The Endpoint from the RDS console.
|
||||||
|
|
||||||
|
<port>: The Port (usually 5432).
|
||||||
|
|
||||||
|
<dbname>: The initial database name you provided during setup (often postgres if not specified).
|
||||||
|
|
||||||
|
This full string is the value you will use for DATABASE_URL in the .env file.
|
||||||
|
1. Developer Preparation (Completed)
|
||||||
|
|
||||||
|
Backend (main.py): The application is configured to read the DATABASE_URL from an environment variable.
|
||||||
|
|
||||||
|
Frontend (JavaScript files): All API endpoints are relative, making them independent of the server's IP or domain.
|
||||||
|
|
||||||
|
Dockerization: The project includes a Dockerfile and docker-compose.yml for easy deployment.
|
||||||
|
|
||||||
|
2. Server Prerequisites
|
||||||
|
|
||||||
|
The deployer must have an AWS EC2 instance (Ubuntu 22.04 recommended) with the following installed:
|
||||||
|
|
||||||
|
Git
|
||||||
|
|
||||||
|
Docker
|
||||||
|
|
||||||
|
Docker Compose
|
||||||
|
|
||||||
|
Installation on Ubuntu:
|
||||||
|
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y git docker.io docker-compose
|
||||||
|
sudo systemctl start docker
|
||||||
|
sudo systemctl enable docker
|
||||||
|
|
||||||
|
3. Deployment Steps
|
||||||
|
Step 3.1: Clone the Project
|
||||||
|
|
||||||
|
git clone <your-repository-url>
|
||||||
|
cd SWAPSTATION_WEBAPP
|
||||||
|
|
||||||
|
Step 3.2: Create the Production Configuration File
|
||||||
|
|
||||||
|
Copy the example file to a new .env file in the project's root directory:
|
||||||
|
|
||||||
|
cp .env.example .env
|
||||||
|
|
||||||
|
Open .env with a text editor (e.g., nano .env) and fill in the values:
|
||||||
|
|
||||||
|
DATABASE_URL: The full connection string you constructed in Section 0.
|
||||||
|
|
||||||
|
SECRET_KEY: A strong, randomly generated secret key.
|
||||||
|
|
||||||
|
CORS_ALLOWED_ORIGIN: The public http://<YOUR_SERVER_IP_OR_DNS> of the EC2 instance.
|
||||||
|
|
||||||
|
Step 3.3: Build and Run the Application
|
||||||
|
|
||||||
|
# Build the Docker image
|
||||||
|
docker-compose build
|
||||||
|
|
||||||
|
# Run the application in the background
|
||||||
|
docker-compose up -d
|
||||||
|
|
||||||
|
Step 3.4: Verify and Access the Application
|
||||||
|
|
||||||
|
Check that the container is running: docker ps
|
||||||
|
|
||||||
|
Open a browser and navigate to the public IP address of your server: http://<YOUR_SERVER_PUBLIC_IP_OR_DNS>
|
||||||
|
|
||||||
|
4. Managing the Application
|
||||||
|
|
||||||
|
Check Logs: docker-compose logs -f
|
||||||
|
|
||||||
|
Update Application: git pull, then docker-compose build, then docker-compose up -d
|
||||||
|
|
||||||
|
Stop Application: docker-compose down
|
||||||
|
|
@ -0,0 +1,43 @@
|
||||||
|
===== Stage 1: Build the Python Backend =====
|
||||||
|
|
||||||
|
FROM python:3.10-slim as python-builder
|
||||||
|
Set the working directory for the backend code
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
Install system dependencies
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends build-essential
|
||||||
|
Copy ONLY the requirements file first to leverage Docker's layer caching
|
||||||
|
|
||||||
|
COPY backend/requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
Now copy the entire backend application code
|
||||||
|
The source is 'backend/' and the destination is the current WORKDIR ('/app')
|
||||||
|
|
||||||
|
COPY backend/ .
|
||||||
|
===== Stage 2: Final Production Image =====
|
||||||
|
|
||||||
|
FROM nginx:stable-alpine
|
||||||
|
Remove default Nginx config and copy our custom one
|
||||||
|
|
||||||
|
RUN rm /etc/nginx/conf.d/default.conf
|
||||||
|
COPY nginx.conf /etc/nginx/nginx.conf
|
||||||
|
Copy the installed Python environment from the builder stage
|
||||||
|
|
||||||
|
COPY --from=python-builder /usr/local/lib/python3.10/site-packages /usr/local/lib/python3.10/site-packages
|
||||||
|
COPY --from=python-builder /usr/local/bin /usr/local/bin
|
||||||
|
Copy the Python application code from the builder stage
|
||||||
|
|
||||||
|
COPY --from=python-builder /app /app
|
||||||
|
WORKDIR /app
|
||||||
|
Copy the static frontend files into the Nginx public directory
|
||||||
|
This is the key change to match your structure
|
||||||
|
|
||||||
|
COPY frontend/. /usr/share/nginx/html
|
||||||
|
Expose the port Nginx will listen on
|
||||||
|
|
||||||
|
EXPOSE 80
|
||||||
|
The command to start both Gunicorn (for Python) and Nginx (for serving files)
|
||||||
|
Gunicorn will find 'main.py' inside the '/app' WORKDIR
|
||||||
|
|
||||||
|
CMD ["sh", "-c", "gunicorn --worker-class eventlet -w 1 --bind 0.0.0.0:5000 main:app & nginx -g 'daemon off;'"]
|
||||||
16
backend/.env
16
backend/.env
|
|
@ -1,16 +0,0 @@
|
||||||
# --- Flask Application Settings ---
|
|
||||||
# This is a secret key used by Flask for session management.
|
|
||||||
# You can generate a new one with: python -c 'import os; print(os.urandom(24).hex())'
|
|
||||||
SECRET_KEY="80473e17c5707e19252ef3736fba32805be21a9b3e914190"
|
|
||||||
|
|
||||||
# --- PostgreSQL Database Connection ---
|
|
||||||
# Replace with your actual database credentials.
|
|
||||||
# Format: postgresql://<user>:<password>@<host>:<port>/<dbname>
|
|
||||||
# DATABASE_URL="postgresql://swap_app_user:2004@localhost:5432/swap_station_db"
|
|
||||||
DATABASE_URL="postgresql://swap_app_user:Vec%40123@localhost:5432/swap_station_db"
|
|
||||||
|
|
||||||
# --- MQTT Broker Connection ---
|
|
||||||
MQTT_BROKER="mqtt-dev.upgrid.in"
|
|
||||||
MQTT_PORT="1883"
|
|
||||||
MQTT_USER="guest"
|
|
||||||
MQTT_PASSWORD="password"
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
# Use official Python image
|
|
||||||
FROM python:3.11-slim
|
|
||||||
|
|
||||||
# Set work directory
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Install system dependencies
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
build-essential \
|
|
||||||
libpq-dev \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Copy requirements and install
|
|
||||||
COPY requirements.txt ./
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
# Copy backend code
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
# Expose Flask port
|
|
||||||
EXPOSE 5000
|
|
||||||
|
|
||||||
# Set environment variables
|
|
||||||
ENV FLASK_APP=main.py
|
|
||||||
ENV FLASK_ENV=production
|
|
||||||
|
|
||||||
# Start the Flask app (use gunicorn for production)
|
|
||||||
CMD ["gunicorn", "main:app", "--bind", "0.0.0.0:5000", "--worker-class", "eventlet", "--workers", "1"]
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
271
backend/main.py
271
backend/main.py
|
|
@ -5,12 +5,12 @@ import json
|
||||||
import csv
|
import csv
|
||||||
import io
|
import io
|
||||||
import time
|
import time
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta, timezone
|
||||||
from flask import Flask, jsonify, request, Response
|
from flask import Flask, jsonify, request, Response
|
||||||
from flask_socketio import SocketIO, join_room
|
from flask_socketio import SocketIO, join_room
|
||||||
from flask_cors import CORS
|
from flask_cors import CORS
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from sqlalchemy import desc, func, case
|
from sqlalchemy import desc, func, case, String, Integer
|
||||||
|
|
||||||
# Import your custom core modules and the new models
|
# Import your custom core modules and the new models
|
||||||
from core.mqtt_client import MqttClient
|
from core.mqtt_client import MqttClient
|
||||||
|
|
@ -25,6 +25,12 @@ from proto.vec_payload_chgSt_pb2 import (
|
||||||
# --- Load Environment Variables ---
|
# --- Load Environment Variables ---
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
|
# Load the allowed origin for CORS from an environment variable.
|
||||||
|
# Default to a local development URL if the variable is not set.
|
||||||
|
ALLOWED_ORIGIN = os.getenv("CORS_ALLOWED_ORIGIN", "http://127.0.0.1:5500")
|
||||||
|
|
||||||
|
print(f"--- INFO: Configuring CORS to allow requests from: {ALLOWED_ORIGIN} ---")
|
||||||
|
|
||||||
# --- Pre-startup Check for Essential Configuration ---
|
# --- Pre-startup Check for Essential Configuration ---
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||||
if not DATABASE_URL:
|
if not DATABASE_URL:
|
||||||
|
|
@ -35,15 +41,10 @@ if not DATABASE_URL:
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
# CORS(app)
|
# CORS(app)
|
||||||
|
|
||||||
# CORS(app, resources={r"/api/*": {"origins": "http://127.0.0.1:5500"}}, supports_credentials=True)
|
# CORS(app, resources={r"/api/*": {"origins": ["http://10.10.1.169:5500","http://127.0.0.1:5500"]}}, supports_credentials=True, expose_headers='Content-Disposition')
|
||||||
|
|
||||||
# CORS(app, resources={r"/api/*": {"origins": "http://127.0.0.1:5500"}}, supports_credentials=True, expose_headers='Content-Disposition')
|
CORS(app, resources={r"/api/*": {"origins": [ALLOWED_ORIGIN]}}, supports_credentials=True, expose_headers='Content-Disposition')
|
||||||
|
|
||||||
CORS(app, resources={r"/api/*": {"origins": ["http://10.10.2.47:5501","http://127.0.0.1:5501"]}}, supports_credentials=True, expose_headers='Content-Disposition')
|
|
||||||
|
|
||||||
# CORS(app, resources={r"/api/*": {"origins": "http://localhost:5173"}}) , "http://127.0.0.1:5500"
|
|
||||||
# This tells Flask: "For any route starting with /api/, allow requests
|
|
||||||
# from the frontend running on http://localhost:5173".
|
|
||||||
|
|
||||||
# ADD THESE LINES FOR FLASK-LOGIN
|
# ADD THESE LINES FOR FLASK-LOGIN
|
||||||
login_manager = LoginManager()
|
login_manager = LoginManager()
|
||||||
|
|
@ -283,25 +284,25 @@ def get_stations():
|
||||||
@app.route('/api/stations/daily-stats', methods=['GET'])
|
@app.route('/api/stations/daily-stats', methods=['GET'])
|
||||||
def get_all_station_stats():
|
def get_all_station_stats():
|
||||||
"""
|
"""
|
||||||
Calculates the swap statistics for today for all stations.
|
Calculates the swap statistics for the last 24 hours for all stations.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# --- CHANGE THESE TWO LINES ---
|
# --- THIS IS THE FIX ---
|
||||||
today_start = datetime.combine(datetime.utcnow().date(), time.min)
|
# Calculate a rolling 24-hour window instead of a fixed "today"
|
||||||
today_end = datetime.combine(datetime.utcnow().date(), time.max)
|
now_utc = datetime.now(timezone.utc)
|
||||||
|
start_of_period = now_utc - timedelta(hours=24)
|
||||||
|
|
||||||
# This is an efficient query that groups by station_id and counts events in one go
|
# The query now uses the correct .astext syntax for JSONB fields
|
||||||
stats = db.session.query(
|
stats = db.session.query(
|
||||||
MqttLog.station_id,
|
MqttLog.station_id,
|
||||||
func.count(case((MqttLog.payload['eventType'] == 'EVENT_SWAP_START', 1))).label('total_starts'),
|
func.count(case((MqttLog.payload['eventType'].astext == 'EVENT_SWAP_START', 1))).label('total_starts'),
|
||||||
func.count(case((MqttLog.payload['eventType'] == 'EVENT_SWAP_ENDED', 1))).label('completed'),
|
func.count(case((MqttLog.payload['eventType'].astext == 'EVENT_SWAP_ENDED', 1))).label('completed'),
|
||||||
func.count(case((MqttLog.payload['eventType'] == 'EVENT_SWAP_ABORTED', 1))).label('aborted')
|
func.count(case((MqttLog.payload['eventType'].astext == 'EVENT_SWAP_ABORTED', 1))).label('aborted')
|
||||||
).filter(
|
).filter(
|
||||||
MqttLog.topic_type == 'EVENTS',
|
MqttLog.topic_type == 'EVENTS',
|
||||||
MqttLog.timestamp.between(today_start, today_end)
|
MqttLog.timestamp.between(start_of_period, now_utc) # Use the new 24-hour window
|
||||||
).group_by(MqttLog.station_id).all()
|
).group_by(MqttLog.station_id).all()
|
||||||
|
|
||||||
# Convert the list of tuples into a dictionary for easy lookup
|
|
||||||
stats_dict = {
|
stats_dict = {
|
||||||
station_id: {
|
station_id: {
|
||||||
"total_starts": total_starts,
|
"total_starts": total_starts,
|
||||||
|
|
@ -363,155 +364,8 @@ ABORT_REASON_MAP = {
|
||||||
"ABORT_INVALID_BATTERY": "Invalid Battery"
|
"ABORT_INVALID_BATTERY": "Invalid Battery"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#--- Analytics Route ---
|
#--- Analytics Route ---
|
||||||
# @app.route('/api/analytics', methods=['GET'])
|
|
||||||
# def get_analytics_data():
|
|
||||||
# # 1. Get and validate request parameters (same as before)
|
|
||||||
# station_id = request.args.get('station_id')
|
|
||||||
# start_date_str = request.args.get('start_date')
|
|
||||||
# end_date_str = request.args.get('end_date')
|
|
||||||
|
|
||||||
# if not all([station_id, start_date_str, end_date_str]):
|
|
||||||
# return jsonify({"message": "Missing required parameters."}), 400
|
|
||||||
|
|
||||||
# try:
|
|
||||||
# start_date = datetime.strptime(start_date_str, '%Y-%m-%d').date()
|
|
||||||
# end_date = datetime.strptime(end_date_str, '%Y-%m-%d').date()
|
|
||||||
# start_datetime = datetime.combine(start_date, datetime.min.time())
|
|
||||||
# end_datetime = datetime.combine(end_date, datetime.max.time())
|
|
||||||
# except ValueError:
|
|
||||||
# return jsonify({"message": "Invalid date format. Please use YYYY-MM-DD."}), 400
|
|
||||||
|
|
||||||
# # 2. Query for EVENT logs (for swap calculations)
|
|
||||||
# try:
|
|
||||||
# event_logs = MqttLog.query.filter(
|
|
||||||
# MqttLog.station_id == station_id,
|
|
||||||
# MqttLog.topic_type == 'EVENTS',
|
|
||||||
# MqttLog.timestamp.between(start_datetime, end_datetime)
|
|
||||||
# ).order_by(MqttLog.timestamp.asc()).all() # <-- ADD THIS SORTING
|
|
||||||
# except Exception as e:
|
|
||||||
# return jsonify({"message": f"Could not query event logs: {e}"}), 500
|
|
||||||
|
|
||||||
# # --- NEW: Query for PERIODIC logs (for uptime calculation) ---
|
|
||||||
# try:
|
|
||||||
# periodic_logs = MqttLog.query.filter(
|
|
||||||
# MqttLog.station_id == station_id,
|
|
||||||
# MqttLog.topic_type == 'PERIODIC',
|
|
||||||
# MqttLog.timestamp.between(start_datetime, end_datetime)
|
|
||||||
# ).order_by(MqttLog.timestamp.asc()).all()
|
|
||||||
# except Exception as e:
|
|
||||||
# return jsonify({"message": f"Could not query periodic logs: {e}"}), 500
|
|
||||||
|
|
||||||
# # --- 3. REVISED: Process logs to calculate KPIs and chart data ---
|
|
||||||
# swap_starts = {} # Dictionary to store start times by sessionId
|
|
||||||
# completed_swap_times = []
|
|
||||||
|
|
||||||
# total_swaps, completed_swaps, aborted_swaps = 0, 0, 0
|
|
||||||
# daily_completed, daily_aborted, hourly_swaps, abort_reason_counts = {}, {}, [0] * 24, {}
|
|
||||||
# slot_utilization_counts = {i: 0 for i in range(1, 10)}
|
|
||||||
|
|
||||||
# print("\n--- STARTING SWAP ANALYSIS ---") # Add this line
|
|
||||||
# for log in event_logs:
|
|
||||||
# event_type = log.payload.get('eventType')
|
|
||||||
# session_id = log.payload.get('sessionId')
|
|
||||||
# log_date = log.timestamp.date()
|
|
||||||
# log_hour = log.timestamp.hour
|
|
||||||
|
|
||||||
# if event_type == 'EVENT_SWAP_START':
|
|
||||||
# total_swaps += 1
|
|
||||||
# hourly_swaps[log_hour] += 1
|
|
||||||
# if session_id:
|
|
||||||
# swap_starts[session_id] = log.timestamp # Store start time
|
|
||||||
# print(f"Found START for session '{session_id}' at {log.timestamp}") # Add this line
|
|
||||||
|
|
||||||
# elif event_type == 'EVENT_SWAP_ENDED':
|
|
||||||
# completed_swaps += 1
|
|
||||||
# daily_completed[log_date] = daily_completed.get(log_date, 0) + 1
|
|
||||||
# if session_id and session_id in swap_starts:
|
|
||||||
# # Calculate duration if we have a matching start event
|
|
||||||
# duration = (log.timestamp - swap_starts[session_id]).total_seconds()
|
|
||||||
# completed_swap_times.append(duration)
|
|
||||||
# print(f"Found MATCHING END for session '{session_id}'. Duration: {duration}s") # Add this line
|
|
||||||
# del swap_starts[session_id] # Remove to prevent reuse
|
|
||||||
# else:
|
|
||||||
# print(f"Found END event but could not find matching START for session '{session_id}'") # Add this line
|
|
||||||
|
|
||||||
# elif event_type == 'EVENT_SWAP_ABORTED':
|
|
||||||
# aborted_swaps += 1
|
|
||||||
# daily_aborted[log_date] = daily_aborted.get(log_date, 0) + 1
|
|
||||||
# reason = log.payload.get('eventData', {}).get('swapAbortReason', 'ABORT_UNKNOWN')
|
|
||||||
# abort_reason_counts[reason] = abort_reason_counts.get(reason, 0) + 1
|
|
||||||
|
|
||||||
# elif event_type == 'EVENT_BATTERY_EXIT':
|
|
||||||
# slot_id = log.payload.get('eventData', {}).get('slotId')
|
|
||||||
# if slot_id and slot_id in slot_utilization_counts:
|
|
||||||
# slot_utilization_counts[slot_id] += 1
|
|
||||||
|
|
||||||
# print(f"--- ANALYSIS COMPLETE ---") # Add this line
|
|
||||||
# print(f"Calculated Durations: {completed_swap_times}") # Add this line
|
|
||||||
|
|
||||||
# # --- NEW: 4. Calculate Station Uptime ---
|
|
||||||
# total_period_seconds = (end_datetime - start_datetime).total_seconds()
|
|
||||||
# total_downtime_seconds = 0
|
|
||||||
# MAX_ONLINE_GAP_SECONDS = 30 # Assume offline if no message for over 30 seconds
|
|
||||||
|
|
||||||
# if not periodic_logs:
|
|
||||||
# total_downtime_seconds = total_period_seconds
|
|
||||||
# else:
|
|
||||||
# # Check gap from start time to first message
|
|
||||||
# first_gap = (periodic_logs[0].timestamp - start_datetime).total_seconds()
|
|
||||||
# if first_gap > MAX_ONLINE_GAP_SECONDS:
|
|
||||||
# total_downtime_seconds += first_gap
|
|
||||||
|
|
||||||
# # Check gaps between consecutive messages
|
|
||||||
# for i in range(1, len(periodic_logs)):
|
|
||||||
# gap = (periodic_logs[i].timestamp - periodic_logs[i-1].timestamp).total_seconds()
|
|
||||||
# if gap > MAX_ONLINE_GAP_SECONDS:
|
|
||||||
# total_downtime_seconds += gap
|
|
||||||
|
|
||||||
# # Check gap from last message to end time
|
|
||||||
# last_gap = (end_datetime - periodic_logs[-1].timestamp).total_seconds()
|
|
||||||
# if last_gap > MAX_ONLINE_GAP_SECONDS:
|
|
||||||
# total_downtime_seconds += last_gap
|
|
||||||
|
|
||||||
# station_uptime = 100 * (1 - (total_downtime_seconds / total_period_seconds))
|
|
||||||
# station_uptime = max(0, min(100, station_uptime)) # Ensure value is between 0 and 100
|
|
||||||
|
|
||||||
# # 5. Prepare final data structures (KPI section is now updated)
|
|
||||||
# avg_swap_time_seconds = sum(completed_swap_times) / len(completed_swap_times) if completed_swap_times else 0
|
|
||||||
|
|
||||||
# # avg_swap_time_seconds = sum(completed_swap_times) / len(completed_swap_times) if completed_swap_times else None
|
|
||||||
|
|
||||||
# kpi_data = {
|
|
||||||
# "total_swaps": total_swaps, "completed_swaps": completed_swaps,
|
|
||||||
# "aborted_swaps": aborted_swaps, "avg_swap_time_seconds": avg_swap_time_seconds,
|
|
||||||
# "station_uptime": round(station_uptime, 2) # Add uptime to the KPI object
|
|
||||||
# }
|
|
||||||
|
|
||||||
# # (The rest of the chart data preparation is unchanged)
|
|
||||||
# date_labels, completed_data, aborted_data = [], [], []
|
|
||||||
# current_date = start_date
|
|
||||||
# while current_date <= end_date:
|
|
||||||
# date_labels.append(current_date.strftime('%b %d'))
|
|
||||||
# completed_data.append(daily_completed.get(current_date, 0))
|
|
||||||
# aborted_data.append(daily_aborted.get(current_date, 0))
|
|
||||||
# current_date += timedelta(days=1)
|
|
||||||
|
|
||||||
# swap_activity_data = {"labels": date_labels, "completed_data": completed_data, "aborted_data": aborted_data}
|
|
||||||
# hourly_distribution_data = {"labels": [f"{h % 12 if h % 12 != 0 else 12} {'AM' if h < 12 else 'PM'}" for h in range(24)], "swap_data": hourly_swaps}
|
|
||||||
# abort_reasons_data = {"labels": [ABORT_REASON_MAP.get(r, r) for r in abort_reason_counts.keys()], "reason_data": list(abort_reason_counts.values())}
|
|
||||||
# slot_utilization_data = {"counts": [slot_utilization_counts[i] for i in range(1, 10)]} # Return counts as a simple list [_ , _, ...]
|
|
||||||
|
|
||||||
# # 6. Combine all data and return
|
|
||||||
# return jsonify({
|
|
||||||
# "kpis": kpi_data,
|
|
||||||
# "swap_activity": swap_activity_data,
|
|
||||||
# "hourly_distribution": hourly_distribution_data,
|
|
||||||
# "abort_reasons": abort_reasons_data,
|
|
||||||
# "slot_utilization": slot_utilization_data # <-- ADD THIS NEW KEY
|
|
||||||
# })
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/api/analytics', methods=['GET'])
|
@app.route('/api/analytics', methods=['GET'])
|
||||||
def get_analytics_data():
|
def get_analytics_data():
|
||||||
# 1. Get and validate request parameters
|
# 1. Get and validate request parameters
|
||||||
|
|
@ -652,6 +506,52 @@ def get_analytics_data():
|
||||||
"slot_utilization": slot_utilization_data
|
"slot_utilization": slot_utilization_data
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/api/uptime/<string:station_id>', methods=['GET'])
|
||||||
|
def get_station_uptime(station_id):
|
||||||
|
"""
|
||||||
|
A lightweight endpoint to calculate only the station uptime for the last 24 hours.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
end_datetime = datetime.now(timezone.utc)
|
||||||
|
start_datetime = end_datetime - timedelta(hours=24)
|
||||||
|
|
||||||
|
periodic_logs = MqttLog.query.filter(
|
||||||
|
MqttLog.station_id == station_id,
|
||||||
|
MqttLog.topic_type == 'PERIODIC',
|
||||||
|
MqttLog.timestamp.between(start_datetime, end_datetime)
|
||||||
|
).order_by(MqttLog.timestamp.asc()).all()
|
||||||
|
|
||||||
|
total_period_seconds = (end_datetime - start_datetime).total_seconds()
|
||||||
|
total_downtime_seconds = 0
|
||||||
|
MAX_ONLINE_GAP_SECONDS = 30
|
||||||
|
|
||||||
|
if not periodic_logs:
|
||||||
|
total_downtime_seconds = total_period_seconds
|
||||||
|
else:
|
||||||
|
first_gap = (periodic_logs[0].timestamp.replace(tzinfo=timezone.utc) - start_datetime).total_seconds()
|
||||||
|
if first_gap > MAX_ONLINE_GAP_SECONDS:
|
||||||
|
total_downtime_seconds += first_gap
|
||||||
|
|
||||||
|
for i in range(1, len(periodic_logs)):
|
||||||
|
gap = (periodic_logs[i].timestamp - periodic_logs[i-1].timestamp).total_seconds()
|
||||||
|
if gap > MAX_ONLINE_GAP_SECONDS:
|
||||||
|
total_downtime_seconds += gap
|
||||||
|
|
||||||
|
last_gap = (end_datetime - periodic_logs[-1].timestamp.replace(tzinfo=timezone.utc)).total_seconds()
|
||||||
|
if last_gap > MAX_ONLINE_GAP_SECONDS:
|
||||||
|
total_downtime_seconds += last_gap
|
||||||
|
|
||||||
|
uptime_percentage = 100 * (1 - (total_downtime_seconds / total_period_seconds))
|
||||||
|
uptime_percentage = max(0, min(100, uptime_percentage))
|
||||||
|
|
||||||
|
return jsonify({"uptime": round(uptime_percentage, 2)})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error in uptime calculation for {station_id}: {e}")
|
||||||
|
return jsonify({"uptime": "Error"}), 500
|
||||||
|
|
||||||
|
|
||||||
# --- CSV Export route (UPDATED) ---
|
# --- CSV Export route (UPDATED) ---
|
||||||
def _format_periodic_row(payload, num_slots=9):
|
def _format_periodic_row(payload, num_slots=9):
|
||||||
"""
|
"""
|
||||||
|
|
@ -675,12 +575,6 @@ def _format_periodic_row(payload, num_slots=9):
|
||||||
for i in range(1, num_slots + 1):
|
for i in range(1, num_slots + 1):
|
||||||
slot = slot_map.get(i)
|
slot = slot_map.get(i)
|
||||||
if slot:
|
if slot:
|
||||||
# Convert boolean values to readable text
|
|
||||||
# door_status_text = "OPEN" if slot.get("doorStatus", 0) == 1 else "CLOSED"
|
|
||||||
# door_lock_status_text = "UNLOCKED" if slot.get("doorLockStatus", 0) == 1 else "LOCKED"
|
|
||||||
# battery_present_text = "YES" if slot.get("batteryPresent", 0) == 1 else "NO"
|
|
||||||
# charger_present_text = "YES" if slot.get("chargerPresent", 0) == 1 else "NO"
|
|
||||||
|
|
||||||
row.extend([
|
row.extend([
|
||||||
slot.get('batteryIdentification', ''),
|
slot.get('batteryIdentification', ''),
|
||||||
slot.get("batteryPresent", 0),
|
slot.get("batteryPresent", 0),
|
||||||
|
|
@ -877,33 +771,6 @@ def start_single_mqtt_client(station):
|
||||||
mqtt_clients[station.station_id] = client
|
mqtt_clients[station.station_id] = client
|
||||||
|
|
||||||
# --- Main Application Logic ---
|
# --- Main Application Logic ---
|
||||||
# def start_mqtt_clients():
|
|
||||||
# """
|
|
||||||
# Initializes and starts an MQTT client for each station found in the database,
|
|
||||||
# using the specific MQTT credentials stored for each station.
|
|
||||||
# """
|
|
||||||
# try:
|
|
||||||
# with app.app_context():
|
|
||||||
# stations = Station.query.all()
|
|
||||||
# except Exception as e:
|
|
||||||
# print(f"CRITICAL: Could not query stations from the database in MQTT thread: {e}")
|
|
||||||
# return
|
|
||||||
|
|
||||||
# for station in stations:
|
|
||||||
# if station.station_id not in mqtt_clients:
|
|
||||||
# print(f"Creating and starting MQTT client for station: {station.name} ({station.station_id})")
|
|
||||||
|
|
||||||
# client = MqttClient(
|
|
||||||
# broker=station.mqtt_broker,
|
|
||||||
# port=station.mqtt_port,
|
|
||||||
# user=station.mqtt_user,
|
|
||||||
# password=station.mqtt_password,
|
|
||||||
# station_id=station.station_id,
|
|
||||||
# on_message_callback=on_message_handler
|
|
||||||
# )
|
|
||||||
# client.start()
|
|
||||||
# mqtt_clients[station.station_id] = client
|
|
||||||
|
|
||||||
def start_mqtt_clients():
|
def start_mqtt_clients():
|
||||||
"""
|
"""
|
||||||
Initializes and starts an MQTT client for each station found in the database
|
Initializes and starts an MQTT client for each station found in the database
|
||||||
|
|
@ -953,5 +820,5 @@ if __name__ == '__main__':
|
||||||
mqtt_thread = threading.Thread(target=start_mqtt_clients, daemon=True)
|
mqtt_thread = threading.Thread(target=start_mqtt_clients, daemon=True)
|
||||||
mqtt_thread.start()
|
mqtt_thread.start()
|
||||||
|
|
||||||
print(f"Starting Flask-SocketIO server on http://10.10.2.47:5000")
|
print(f"Starting Flask-SocketIO server on http://0.0.0.0:5000")
|
||||||
socketio.run(app, host='10.10.2.47', port=5000)
|
socketio.run(app, host='0.0.0.0', port=5000)
|
||||||
|
|
|
||||||
Binary file not shown.
Binary file not shown.
|
|
@ -1,10 +0,0 @@
|
||||||
Flask
|
|
||||||
Flask-SocketIO
|
|
||||||
Flask-SQLAlchemy
|
|
||||||
Flask-Cors
|
|
||||||
Flask-Login
|
|
||||||
psycopg2-binary
|
|
||||||
paho-mqtt
|
|
||||||
protobuf
|
|
||||||
python-dotenv
|
|
||||||
Werkzeug
|
|
||||||
164
backend/test.py
164
backend/test.py
|
|
@ -1,164 +0,0 @@
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import threading
|
|
||||||
import json
|
|
||||||
import csv
|
|
||||||
import io
|
|
||||||
import time # Import the time module
|
|
||||||
from datetime import datetime
|
|
||||||
from flask import Flask, jsonify, request, Response
|
|
||||||
from flask_socketio import SocketIO, join_room # <-- IMPORTANT: Add join_room
|
|
||||||
from flask_cors import CORS
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
|
|
||||||
# Import your custom core modules and the new models
|
|
||||||
from core.mqtt_client import MqttClient
|
|
||||||
from core.protobuf_decoder import ProtobufDecoder
|
|
||||||
from models import db, Station, User, MqttLog
|
|
||||||
from flask_login import LoginManager, login_required, current_user
|
|
||||||
|
|
||||||
# --- Load Environment Variables ---
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
# --- Pre-startup Check for Essential Configuration ---
|
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
|
||||||
if not DATABASE_URL:
|
|
||||||
print("FATAL ERROR: DATABASE_URL is not set in .env file.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# --- Application Setup ---
|
|
||||||
app = Flask(__name__)
|
|
||||||
CORS(app, resources={r"/api/*": {"origins": "http://127.0.0.1:5500"}}, supports_credentials=True)
|
|
||||||
|
|
||||||
login_manager = LoginManager()
|
|
||||||
login_manager.init_app(app)
|
|
||||||
|
|
||||||
app.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URL
|
|
||||||
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
|
||||||
app.config['SECRET_KEY'] = os.getenv("SECRET_KEY", "a_very_secret_key")
|
|
||||||
db.init_app(app)
|
|
||||||
socketio = SocketIO(app, cors_allowed_origins="*")
|
|
||||||
|
|
||||||
# --- User Loader for Flask-Login ---
|
|
||||||
@login_manager.user_loader
|
|
||||||
def load_user(user_id):
|
|
||||||
return User.query.get(int(user_id))
|
|
||||||
|
|
||||||
# --- Global instances ---
|
|
||||||
decoder = ProtobufDecoder()
|
|
||||||
mqtt_clients = {}
|
|
||||||
last_message_timestamps = {}
|
|
||||||
STATION_TIMEOUT_SECONDS = 90
|
|
||||||
|
|
||||||
# --- MQTT Message Handling ---
|
|
||||||
def on_message_handler(station_id, topic, payload):
|
|
||||||
last_message_timestamps[station_id] = time.time()
|
|
||||||
|
|
||||||
print(f"Main handler received message for station {station_id} on topic {topic}")
|
|
||||||
|
|
||||||
decoded_data = None
|
|
||||||
message_type = topic.split('/')[-1]
|
|
||||||
|
|
||||||
if message_type == 'PERIODIC':
|
|
||||||
decoded_data = decoder.decode_periodic(payload)
|
|
||||||
elif message_type == 'EVENTS':
|
|
||||||
decoded_data = decoder.decode_event(payload)
|
|
||||||
elif message_type == 'REQUEST':
|
|
||||||
decoded_data = decoder.decode_rpc_request(payload)
|
|
||||||
|
|
||||||
if decoded_data:
|
|
||||||
print("DECODED DATA TO BE SENT:", decoded_data)
|
|
||||||
try:
|
|
||||||
with app.app_context():
|
|
||||||
log_entry = MqttLog(
|
|
||||||
station_id=station_id,
|
|
||||||
topic=topic,
|
|
||||||
topic_type=message_type,
|
|
||||||
payload=decoded_data
|
|
||||||
)
|
|
||||||
db.session.add(log_entry)
|
|
||||||
db.session.commit()
|
|
||||||
print(f"Successfully wrote data for {station_id} to PostgreSQL.")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error writing to PostgreSQL: {e}")
|
|
||||||
|
|
||||||
socketio.emit('dashboard_update', {
|
|
||||||
'stationId': station_id,
|
|
||||||
'topic': topic,
|
|
||||||
'data': decoded_data
|
|
||||||
}, room=station_id)
|
|
||||||
|
|
||||||
# --- WebSocket Handlers ---
|
|
||||||
@socketio.on('connect')
|
|
||||||
def handle_connect():
|
|
||||||
print('Client connected to WebSocket')
|
|
||||||
|
|
||||||
# --- NEW: Function to handle joining a room and sending initial data ---
|
|
||||||
@socketio.on('join_station_room')
|
|
||||||
def handle_join_station_room(data):
|
|
||||||
station_id = data['station_id']
|
|
||||||
join_room(station_id)
|
|
||||||
print(f"Client joined room for station: {station_id}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Find the most recent log entry for this station
|
|
||||||
latest_log = MqttLog.query.filter_by(
|
|
||||||
station_id=station_id,
|
|
||||||
topic_type='PERIODIC'
|
|
||||||
).order_by(MqttLog.timestamp.desc()).first()
|
|
||||||
|
|
||||||
if latest_log:
|
|
||||||
# If we have a past log, send it immediately to the new client
|
|
||||||
print(f"Sending initial state for {station_id} to new client.")
|
|
||||||
socketio.emit('dashboard_update', {
|
|
||||||
'stationId': station_id,
|
|
||||||
'topic': latest_log.topic,
|
|
||||||
'data': latest_log.payload
|
|
||||||
}, room=station_id)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error querying or sending initial state for {station_id}: {e}")
|
|
||||||
|
|
||||||
# ... (rest of your API routes remain the same) ...
|
|
||||||
|
|
||||||
# --- API Routes ---
|
|
||||||
@app.route('/api/login', methods=['POST'])
|
|
||||||
def login():
|
|
||||||
# ... (code omitted for brevity)
|
|
||||||
pass
|
|
||||||
|
|
||||||
@app.route('/api/users', methods=['POST'])
|
|
||||||
# @login_required # Temporarily disabled for testing
|
|
||||||
def add_user():
|
|
||||||
# ... (code omitted for brevity)
|
|
||||||
pass
|
|
||||||
|
|
||||||
@app.route('/api/stations', methods=['POST'])
|
|
||||||
# @login_required # Temporarily disabled for testing
|
|
||||||
def add_station():
|
|
||||||
# ... (code omitted for brevity)
|
|
||||||
pass
|
|
||||||
|
|
||||||
@app.route('/api/stations', methods=['GET'])
|
|
||||||
def get_stations():
|
|
||||||
try:
|
|
||||||
stations = Station.query.all()
|
|
||||||
station_list = []
|
|
||||||
for s in stations:
|
|
||||||
last_msg_time = last_message_timestamps.get(s.station_id)
|
|
||||||
is_online = last_msg_time is not None and (time.time() - last_msg_time) < STATION_TIMEOUT_SECONDS
|
|
||||||
|
|
||||||
station_list.append({
|
|
||||||
"id": s.station_id,
|
|
||||||
"name": s.name,
|
|
||||||
"location": s.location,
|
|
||||||
"status": "Online" if is_online else "Offline"
|
|
||||||
})
|
|
||||||
return jsonify(station_list)
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({"error": f"Database query failed: {e}"}), 500
|
|
||||||
|
|
||||||
# ... (your CSV export and MQTT client start functions remain the same) ...
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
# ... (your main startup logic remains the same) ...
|
|
||||||
pass
|
|
||||||
|
|
@ -0,0 +1,20 @@
|
||||||
|
# This file defines how to run your application container.
|
||||||
|
# It specifies the build context, port mappings, and environment variables.
|
||||||
|
|
||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
# 'build: .' tells Docker Compose to look for the Dockerfile in the current directory.
|
||||||
|
build: .
|
||||||
|
# The name for the running container.
|
||||||
|
container_name: swapstation_app
|
||||||
|
# Restart the container automatically if it stops.
|
||||||
|
restart: always
|
||||||
|
# Map port 80 on the host (the AWS server) to port 80 in the container (where Nginx is listening).
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
# This section tells Docker Compose to read environment variables from a file named '.env'.
|
||||||
|
# This is how you will pass your secrets and configuration to the application.
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
|
@ -1,7 +1,12 @@
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
// --- CONFIGURATION ---
|
// --- CONFIGURATION ---
|
||||||
const SOCKET_URL = "http://10.10.2.47:5000";
|
// const SOCKET_URL = "http://10.10.1.169:5000";
|
||||||
const API_BASE = "http://10.10.2.47:5000/api";
|
// const API_BASE = "http://10.10.1.169:5000/api";
|
||||||
|
|
||||||
|
// --- CONFIGURATION ---
|
||||||
|
const SOCKET_URL = window.location.origin; // Connects to the server that served the page
|
||||||
|
const API_BASE = "/api"; // Relative path for API calls
|
||||||
|
|
||||||
|
|
||||||
// --- DOM ELEMENT REFERENCES ---
|
// --- DOM ELEMENT REFERENCES ---
|
||||||
const stationNameEl = document.getElementById('station-name');
|
const stationNameEl = document.getElementById('station-name');
|
||||||
|
|
@ -255,8 +260,9 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||||
|
|
||||||
// --- ADD THIS NEW LISTENER for lightweight status updates ---
|
// --- ADD THIS NEW LISTENER for lightweight status updates ---
|
||||||
socket.on('status_update', (data) => {
|
socket.on('status_update', (data) => {
|
||||||
// data will look like: { status: 'Online' }
|
|
||||||
console.log("Live status update received:", data.status);
|
console.log("Live status update received:", data.status);
|
||||||
|
|
||||||
|
// 1. Update the connection status chip (this part is the same)
|
||||||
if (connChip) {
|
if (connChip) {
|
||||||
if (data.status === 'Online') {
|
if (data.status === 'Online') {
|
||||||
connChip.innerHTML = `<span class="h-2 w-2 rounded-full bg-emerald-400 animate-pulseDot"></span> Online`;
|
connChip.innerHTML = `<span class="h-2 w-2 rounded-full bg-emerald-400 animate-pulseDot"></span> Online`;
|
||||||
|
|
@ -266,6 +272,18 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||||
connChip.className = 'cham_chip cham_chip-rose';
|
connChip.className = 'cham_chip cham_chip-rose';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 2. NEW: Fetch and update the uptime KPI value
|
||||||
|
if (selectedStation && stationUptimeEl) {
|
||||||
|
fetch(`${API_BASE}/uptime/${selectedStation.id}`)
|
||||||
|
.then(res => res.json())
|
||||||
|
.then(uptimeData => {
|
||||||
|
if (uptimeData.uptime !== "Error") {
|
||||||
|
stationUptimeEl.textContent = `${uptimeData.uptime} %`;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch(err => console.error("Failed to fetch live uptime:", err));
|
||||||
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,9 @@
|
||||||
// frontend/js/auth.js
|
// frontend/js/auth.js
|
||||||
|
|
||||||
|
// --- CONFIGURATION ---
|
||||||
|
const SOCKET_URL = window.location.origin; // Connects to the server that served the page
|
||||||
|
const API_BASE = "/api"; // Relative path for API calls
|
||||||
|
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
const loginForm = document.getElementById('login-form');
|
const loginForm = document.getElementById('login-form');
|
||||||
const errorMessageDiv = document.getElementById('error-message');
|
const errorMessageDiv = document.getElementById('error-message');
|
||||||
|
|
@ -11,7 +16,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||||
const password = document.getElementById('password').value;
|
const password = document.getElementById('password').value;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch('http://10.10.2.47:5000/api/login', {
|
const response = await fetch('/api/login', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ username, password }),
|
body: JSON.stringify({ username, password }),
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,12 @@
|
||||||
// frontend/js/common-header.js
|
// frontend/js/common-header.js
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
// --- CONFIGURATION ---
|
// --- CONFIGURATION ---
|
||||||
const SOCKET_URL = "http://10.10.2.47:5000";
|
// const SOCKET_URL = "http://10.10.1.169:5000";
|
||||||
const API_BASE = "http://10.10.2.47:5000/api";
|
// const API_BASE = "http://10.10.1.169:5000/api";
|
||||||
|
|
||||||
|
// --- CONFIGURATION ---
|
||||||
|
const SOCKET_URL = window.location.origin; // Connects to the server that served the page
|
||||||
|
const API_BASE = "/api"; // Relative path for API calls
|
||||||
|
|
||||||
// --- STATE & SELECTED STATION ---
|
// --- STATE & SELECTED STATION ---
|
||||||
let selectedStation = null;
|
let selectedStation = null;
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,11 @@
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
// --- CONFIGURATION ---
|
// --- CONFIGURATION ---
|
||||||
const SOCKET_URL = "http://10.10.2.47:5000";
|
// const SOCKET_URL = "http://10.10.1.169:5000";
|
||||||
const API_BASE = "http://10.10.2.47:5000/api"; // Added for API calls
|
// const API_BASE = "http://10.10.1.169:5000/api"; // Added for API calls
|
||||||
|
|
||||||
|
// --- CONFIGURATION ---
|
||||||
|
const SOCKET_URL = window.location.origin; // Connects to the server that served the page
|
||||||
|
const API_BASE = "/api"; // Relative path for API calls
|
||||||
|
|
||||||
// --- DOM ELEMENT REFERENCES ---
|
// --- DOM ELEMENT REFERENCES ---
|
||||||
const grid = document.getElementById('chambersGrid');
|
const grid = document.getElementById('chambersGrid');
|
||||||
|
|
@ -329,35 +333,6 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||||
logToInstance("Station is offline. Clearing stale data.", "error");
|
logToInstance("Station is offline. Clearing stale data.", "error");
|
||||||
};
|
};
|
||||||
|
|
||||||
// --- NEW: This function polls the API for the true station status ---
|
|
||||||
// const checkStationStatus = async () => {
|
|
||||||
// if (!selectedStation) return;
|
|
||||||
// try {
|
|
||||||
// const response = await fetch(`${API_BASE}/stations`);
|
|
||||||
// if (!response.ok) return;
|
|
||||||
// const stations = await response.json();
|
|
||||||
// const thisStation = stations.find(s => s.id === selectedStation.id);
|
|
||||||
|
|
||||||
// if (thisStation && connChip) {
|
|
||||||
|
|
||||||
// stationNameEl.textContent = thisStation.name;
|
|
||||||
// stationLocationEl.textContent = thisStation.location;
|
|
||||||
|
|
||||||
// if (thisStation.status === 'Online') {
|
|
||||||
// connChip.innerHTML = `<span class="h-2 w-2 rounded-full bg-emerald-400 animate-pulseDot"></span> Online`;
|
|
||||||
// connChip.className = 'cham_chip cham_chip-emerald';
|
|
||||||
// } else {
|
|
||||||
// connChip.innerHTML = `<span class="h-2 w-2 rounded-full bg-rose-500"></span> Offline`;
|
|
||||||
// connChip.className = 'cham_chip cham_chip-rose';
|
|
||||||
// lastUpdateEl.textContent = "Waiting for data...";
|
|
||||||
// resetDashboardUI();
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// } catch (error) {
|
|
||||||
// console.error("Failed to fetch station status:", error);
|
|
||||||
// }
|
|
||||||
// };
|
|
||||||
|
|
||||||
// --- MAIN LOGIC (Your original code is unchanged) ---
|
// --- MAIN LOGIC (Your original code is unchanged) ---
|
||||||
const initializeDashboard = () => {
|
const initializeDashboard = () => {
|
||||||
try {
|
try {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,11 @@
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
// --- CONFIGURATION ---
|
// --- CONFIGURATION ---
|
||||||
const SOCKET_URL = "http://10.10.2.47:5000";
|
// const SOCKET_URL = "http://10.10.1.169:5000";
|
||||||
const API_BASE = "http://10.10.2.47:5000/api";
|
// const API_BASE = "http://10.10.1.169:5000/api";
|
||||||
|
|
||||||
|
// --- CONFIGURATION ---
|
||||||
|
const SOCKET_URL = window.location.origin; // Connects to the server that served the page
|
||||||
|
const API_BASE = "/api"; // Relative path for API calls
|
||||||
|
|
||||||
// --- DOM ELEMENT REFERENCES ---
|
// --- DOM ELEMENT REFERENCES ---
|
||||||
const stationNameEl = document.getElementById('station-name');
|
const stationNameEl = document.getElementById('station-name');
|
||||||
|
|
@ -45,29 +49,6 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||||
textarea.scrollTop = textarea.scrollHeight;
|
textarea.scrollTop = textarea.scrollHeight;
|
||||||
};
|
};
|
||||||
|
|
||||||
// const fetchRecentLogs = async () => {
|
|
||||||
// try {
|
|
||||||
// const response = await fetch(`${API_BASE}/logs/recent/${selectedStation.id}`);
|
|
||||||
// if (!response.ok) throw new Error('Failed to fetch recent logs');
|
|
||||||
// const logs = await response.json();
|
|
||||||
|
|
||||||
// requestLogArea.value = '';
|
|
||||||
// eventLogArea.value = '';
|
|
||||||
|
|
||||||
// logs.forEach(log => {
|
|
||||||
// if (log.topic.endsWith('EVENTS')) {
|
|
||||||
// appendLog(eventLogArea, log.payload, log.topic, log.timestamp);
|
|
||||||
// } else if (log.topic.endsWith('REQUEST')) {
|
|
||||||
// appendLog(requestLogArea, log.payload, log.topic, log.timestamp);
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
// console.log(`Successfully fetched and rendered ${logs.length} recent logs.`);
|
|
||||||
// } catch (error) {
|
|
||||||
// console.error(error);
|
|
||||||
// }
|
|
||||||
// };
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const fetchRecentLogs = async () => {
|
const fetchRecentLogs = async () => {
|
||||||
// Get values from all filters
|
// Get values from all filters
|
||||||
|
|
|
||||||
|
|
@ -1,34 +1,3 @@
|
||||||
// // frontend/js/page-header.js
|
|
||||||
// document.addEventListener('DOMContentLoaded', () => {
|
|
||||||
// // 1. Get the station data from Local Storage
|
|
||||||
// const selectedStation = JSON.parse(localStorage.getItem('selected_station'));
|
|
||||||
|
|
||||||
// // 2. Safety check: If no station is selected, go back to the selection page
|
|
||||||
// if (!selectedStation) {
|
|
||||||
// alert('No station selected. Redirecting to the selection page.');
|
|
||||||
// window.location.href = 'station_selection.html';
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // 3. Find all the display elements in the header
|
|
||||||
// const stationNameEl = document.getElementById('station-name');
|
|
||||||
// const stationLocationEl = document.getElementById('station-location');
|
|
||||||
// const stationIdEl = document.getElementById('station-id-display');
|
|
||||||
// const productIdEl = document.getElementById('product-id-display');
|
|
||||||
|
|
||||||
// // 4. Update the elements with the station's data
|
|
||||||
// if (stationNameEl) stationNameEl.textContent = selectedStation.name;
|
|
||||||
// if (stationLocationEl) stationLocationEl.textContent = selectedStation.location;
|
|
||||||
// if (stationIdEl) stationIdEl.textContent = selectedStation.id;
|
|
||||||
// if (productIdEl) productIdEl.textContent = selectedStation.product_id;
|
|
||||||
// });
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// frontend/js/page-header.js
|
// frontend/js/page-header.js
|
||||||
|
|
||||||
// This function fetches the common header and injects it into the page
|
// This function fetches the common header and injects it into the page
|
||||||
|
|
|
||||||
|
|
@ -1,168 +1,162 @@
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
// In frontend/js/station_selection.js
|
||||||
// --- DOM ELEMENTS ---
|
// const API_BASE = 'http://10.10.1.169:5000/api';
|
||||||
const stationsGrid = document.getElementById('stations-grid');
|
|
||||||
const stationCountEl = document.getElementById('station-count'); // Make sure you have an element with this ID in your HTML
|
|
||||||
|
|
||||||
// --- CONFIG & STATE ---
|
// --- CONFIGURATION ---
|
||||||
const API_BASE = 'http://10.10.2.47:5000/api';
|
const SOCKET_URL = window.location.origin; // Connects to the server that served the page
|
||||||
let allStations = []; // Master list of stations from the API
|
const API_BASE = "/api"; // Relative path for API calls
|
||||||
let pollingInterval = null;
|
|
||||||
|
|
||||||
// --- AUTHENTICATION ---
|
const grid = document.getElementById('stations-grid');
|
||||||
const user = JSON.parse(localStorage.getItem('user'));
|
const addStationCardTmpl = document.getElementById('add-station-card-template');
|
||||||
if (!user) {
|
const stationCardTmpl = document.getElementById('station-card-template');
|
||||||
window.location.href = 'index.html'; // Redirect if not logged in
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// (Your other button listeners for logout, add user, etc., can go here)
|
const searchEl = document.getElementById('search');
|
||||||
// document.getElementById('logoutBtn').onclick = () => { ... };
|
const emptyState = document.getElementById('empty-state');
|
||||||
|
const errorState = document.getElementById('error-state');
|
||||||
|
|
||||||
// --- HELPER FUNCTIONS ---
|
// THEMED STATUS DROPDOWN LOGIC
|
||||||
const getStatusAttributes = (status) => {
|
const statusBtn = document.getElementById('statusBtn');
|
||||||
switch (status) {
|
const statusMenu = document.getElementById('statusMenu');
|
||||||
case 'Online': return { color: 'text-green-500', bgColor: 'bg-green-100/60 dark:bg-green-500/10', icon: 'power' };
|
const statusLabel = document.getElementById('statusLabel');
|
||||||
case 'Offline': return { color: 'text-red-500', bgColor: 'bg-red-100/60 dark:bg-red-500/10', icon: 'power-off' };
|
let statusValue = 'all';
|
||||||
default: return { color: 'text-gray-500', bgColor: 'bg-gray-100/60 dark:bg-gray-500/10', icon: 'help-circle' };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleStationSelect = (stationId) => {
|
// Modals
|
||||||
window.location.href = `dashboard.html?station_id=${stationId}`;
|
const userModal = document.getElementById('userModal');
|
||||||
};
|
const stationModal = document.getElementById('stationModal');
|
||||||
|
|
||||||
// --- UI RENDERING ---
|
const openModal = (el) => { el.classList.remove('hidden'); el.classList.add('block'); };
|
||||||
// This function's only job is to build the HTML. It does not add event listeners.
|
const closeModal = (el) => { el.classList.add('hidden'); el.classList.remove('block'); };
|
||||||
const renderStations = (stations) => {
|
|
||||||
stationsGrid.innerHTML = ''; // Clear the grid
|
|
||||||
stationCountEl.textContent = `${stations.length} stations found.`;
|
|
||||||
|
|
||||||
stations.forEach(station => {
|
// Header buttons
|
||||||
const status = getStatusAttributes(station.status);
|
document.getElementById('addUserBtn').onclick = () => openModal(userModal);
|
||||||
const card = document.createElement('div');
|
document.getElementById('cancelUserBtn').onclick = () => closeModal(userModal);
|
||||||
// Add station ID to the card's dataset for easy access
|
document.getElementById('logoutBtn').onclick = () => { localStorage.clear(); window.location.href = './index.html'; };
|
||||||
card.dataset.stationId = station.id;
|
document.getElementById('cancelStationBtn').onclick = () => closeModal(stationModal);
|
||||||
card.dataset.stationName = station.name;
|
|
||||||
card.className = "station-card group bg-gray-900/60 backdrop-blur-xl rounded-2xl shadow-lg border border-gray-700 transition-transform duration-300 ease-out flex flex-col justify-between hover:-translate-y-1.5 hover:border-emerald-400/60 hover:shadow-[0_0_0_1px_rgba(16,185,129,0.25),0_20px_40px_rgba(0,0,0,0.45)]";
|
|
||||||
|
|
||||||
card.innerHTML = `
|
// Forms
|
||||||
<div class="main-content p-5 flex-grow cursor-pointer" data-station-json='${JSON.stringify(station)}'>
|
document.getElementById('userForm').addEventListener('submit', async (e)=>{
|
||||||
<div class="flex justify-between items-start">
|
e.preventDefault();
|
||||||
<div>
|
const payload = { username: newUsername.value.trim(), password: newPassword.value, is_admin: isAdmin.checked };
|
||||||
<h3 class="text-lg font-bold text-white pr-2">${station.name}</h3>
|
|
||||||
<p class="text-xs text-slate-400 font-mono"># ${station.product_id || 'N/A'}</p>
|
|
||||||
</div>
|
|
||||||
<div class="status-badge flex items-center text-xs font-semibold px-3 py-1 rounded-full ${status.bgColor} ${status.color}">
|
|
||||||
<span class="status-text">${station.status}</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<p class="text-sm text-gray-400 mt-2 font-mono">${station.id}</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="border-t border-gray-700/50 px-5 pt-3 pb-4">
|
|
||||||
<div class="grid grid-cols-3 gap-2 text-center">
|
|
||||||
<div>
|
|
||||||
<p class="text-xs text-slate-400">Total Starts</p>
|
|
||||||
<p class="font-bold text-lg text-white stat-total">0</p>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<p class="text-xs text-slate-400">Completed</p>
|
|
||||||
<p class="font-bold text-lg text-emerald-400 stat-completed">0</p>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<p class="text-xs text-slate-400">Aborted</p>
|
|
||||||
<p class="font-bold text-lg text-rose-400 stat-aborted">0</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="border-t border-gray-700/50 px-5 py-2 flex justify-between items-center bg-black/20 rounded-b-2xl">
|
|
||||||
<button class="open-btn text-sm font-bold bg-emerald-500/80 hover:bg-emerald-500 text-white py-1 px-4 rounded-md transition">
|
|
||||||
Open
|
|
||||||
</button>
|
|
||||||
<button class="remove-btn text-gray-400 hover:text-red-500 transition" title="Remove Station">
|
|
||||||
<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16"></path></svg>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
`;
|
|
||||||
stationsGrid.appendChild(card);
|
|
||||||
});
|
|
||||||
|
|
||||||
if (window.lucide) {
|
|
||||||
lucide.createIcons();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const updateStationStatuses = (stations) => {
|
|
||||||
stations.forEach(station => {
|
|
||||||
const card = stationsGrid.querySelector(`[data-station-id="${station.id}"]`);
|
|
||||||
if (card) {
|
|
||||||
const status = getStatusAttributes(station.status);
|
|
||||||
const statusBadge = card.querySelector('.status-badge');
|
|
||||||
const statusText = card.querySelector('.status-text');
|
|
||||||
const statusIcon = card.querySelector('i[data-lucide]');
|
|
||||||
if (statusBadge && statusText && statusIcon) {
|
|
||||||
statusBadge.className = `status-badge flex items-center text-xs font-semibold px-3 py-1 rounded-full ${status.bgColor} ${status.color}`;
|
|
||||||
statusText.textContent = station.status;
|
|
||||||
statusIcon.setAttribute('data-lucide', status.icon);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (window.lucide) {
|
|
||||||
lucide.createIcons();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
//-- NEW: Fetch and apply daily stats to each card ---
|
|
||||||
const fetchAndApplyStats = async () => {
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`${API_BASE}/stations/daily-stats`);
|
const res = await fetch(`${API_BASE}/users`, {
|
||||||
if (!response.ok) return; // Fail silently if stats aren't available
|
method:'POST',
|
||||||
const stats = await response.json();
|
headers:{'Content-Type':'application/json'},
|
||||||
|
body: JSON.stringify(payload),
|
||||||
|
credentials: 'include'
|
||||||
|
});
|
||||||
|
if(!res.ok) throw new Error('Failed to add user');
|
||||||
|
closeModal(userModal); alert('User added');
|
||||||
|
} catch(err){ alert(err.message); }
|
||||||
|
});
|
||||||
|
|
||||||
// Loop through the stats object and update each card
|
document.getElementById('stationForm').addEventListener('submit', async (e)=>{
|
||||||
for (const stationId in stats) {
|
e.preventDefault();
|
||||||
const stationCard = stationsGrid.querySelector(`.station-card[data-station-id="${stationId}"]`);
|
const payload = {
|
||||||
if (stationCard) {
|
station_id: stationId.value.trim(),
|
||||||
const statData = stats[stationId];
|
product_id: stationProductId.value.trim(),
|
||||||
stationCard.querySelector('.stat-total').textContent = statData.total_starts;
|
name: stationName.value.trim(),
|
||||||
stationCard.querySelector('.stat-completed').textContent = statData.completed;
|
location: stationLocation.value.trim(),
|
||||||
stationCard.querySelector('.stat-aborted').textContent = statData.aborted;
|
mqtt_broker: mqttBroker.value.trim(),
|
||||||
}
|
mqtt_port: Number(mqttPort.value),
|
||||||
}
|
mqtt_user: mqttUsername.value || null,
|
||||||
} catch (error) {
|
mqtt_password: mqttPassword.value || null,
|
||||||
console.error("Could not fetch daily stats:", error);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// --- MAIN EVENT LISTENER ---
|
try {
|
||||||
// This single listener handles all clicks on the grid for efficiency.
|
const res = await fetch(`${API_BASE}/stations`, {
|
||||||
stationsGrid.addEventListener('click', async (event) => {
|
method:'POST',
|
||||||
const mainContent = event.target.closest('.main-content');
|
headers:{'Content-Type':'application/json'},
|
||||||
const removeButton = event.target.closest('.remove-btn');
|
body: JSON.stringify(payload),
|
||||||
|
credentials: 'include'
|
||||||
|
});
|
||||||
|
if(!res.ok) throw new Error('Failed to add station');
|
||||||
|
closeModal(stationModal); await loadStations();
|
||||||
|
} catch(err){ alert(err.message); }
|
||||||
|
});
|
||||||
|
|
||||||
if (mainContent) {
|
function statusStyles(status){
|
||||||
const card = mainContent.closest('[data-station-id]');
|
const online = { dot:'bg-emerald-400 animate-pulseDot', badge:'bg-emerald-500/15 text-emerald-300 border border-emerald-400/20', text:'Online' };
|
||||||
if (card) {
|
const offline = { dot:'bg-rose-500', badge:'bg-rose-500/15 text-rose-300 border border-rose-400/20', text:'Offline' };
|
||||||
handleStationSelect(card.dataset.stationId);
|
return String(status).toLowerCase()==='online'?online:offline;
|
||||||
}
|
}
|
||||||
} else if (removeButton) {
|
|
||||||
event.stopPropagation(); // Prevent main content click
|
|
||||||
const card = removeButton.closest('[data-station-id]');
|
|
||||||
const stationId = card.dataset.stationId;
|
|
||||||
const stationName = card.dataset.stationName;
|
|
||||||
|
|
||||||
|
function setStatus(val, label) {
|
||||||
|
statusValue = val;
|
||||||
|
statusLabel.textContent = label;
|
||||||
|
statusMenu.classList.add('hidden');
|
||||||
|
applyFilters(); // reuse your existing function
|
||||||
|
}
|
||||||
|
|
||||||
|
let allStations = [];
|
||||||
|
|
||||||
|
function render(stations){
|
||||||
|
grid.innerHTML = '';
|
||||||
|
|
||||||
|
if(!stations || stations.length===0){
|
||||||
|
emptyState.classList.remove('hidden');
|
||||||
|
} else {
|
||||||
|
emptyState.classList.add('hidden');
|
||||||
|
for(const s of stations){
|
||||||
|
const node = stationCardTmpl.content.cloneNode(true);
|
||||||
|
const card = node.querySelector('div');
|
||||||
|
card.dataset.stationId = s.id || s.station_id;
|
||||||
|
card.querySelector('.station-name').textContent = s.name ?? `Station ${s.id || s.station_id}`;
|
||||||
|
// const productIdVal = s.product_id || '—';
|
||||||
|
// const productIdEl = card.querySelector('.product-id');
|
||||||
|
// if (productIdEl) {
|
||||||
|
// // Use .innerHTML and add a styled <span> for the title
|
||||||
|
// productIdEl.innerHTML = `<span class="font-semibold text-white-500">Product ID: </span>${productIdVal}`;
|
||||||
|
// }
|
||||||
|
const productIdVal = s.product_id || '—';
|
||||||
|
const productIdEl = card.querySelector('.product-id');
|
||||||
|
if (productIdEl) {
|
||||||
|
productIdEl.textContent = productIdVal;
|
||||||
|
}
|
||||||
|
card.querySelector('.station-location').textContent = s.location ?? '—';
|
||||||
|
const idVal = s.id || s.station_id || '—';
|
||||||
|
const idEl = card.querySelector('.station-id');
|
||||||
|
idEl.textContent = idVal; idEl.setAttribute('title', idVal);
|
||||||
|
|
||||||
|
const styles = statusStyles(s.status);
|
||||||
|
const dot = card.querySelector('.status-dot');
|
||||||
|
dot.className = `status-dot h-2.5 w-2.5 rounded-full ${styles.dot}`;
|
||||||
|
const badge = card.querySelector('.status-badge');
|
||||||
|
badge.className = `status-badge rounded-full px-2 py-0.5 text-[10px] font-semibold uppercase tracking-wide ${styles.badge}`;
|
||||||
|
badge.textContent = styles.text;
|
||||||
|
|
||||||
|
// Metrics
|
||||||
|
const starts = s.total_swaps_started ?? s.metrics?.total_starts ?? 0;
|
||||||
|
const success = s.total_swaps_success ?? s.metrics?.total_completed ?? 0;
|
||||||
|
const aborted = s.total_swaps_aborted ?? s.metrics?.total_aborted ?? 0;
|
||||||
|
card.querySelector('.metric-starts').textContent = starts;
|
||||||
|
card.querySelector('.metric-success').textContent = success;
|
||||||
|
card.querySelector('.metric-aborted').textContent = aborted;
|
||||||
|
|
||||||
|
// Open
|
||||||
|
card.querySelector('.open-btn').addEventListener('click', () => {
|
||||||
|
localStorage.setItem('selected_station', JSON.stringify(s));
|
||||||
|
const id = encodeURIComponent(s.id || s.station_id);
|
||||||
|
window.location.href = `./dashboard.html?stationId=${id}`;
|
||||||
|
});
|
||||||
|
// --- ADD THIS NEW BLOCK FOR THE REMOVE BUTTON ---
|
||||||
|
card.querySelector('.remove-btn').addEventListener('click', async () => {
|
||||||
|
const stationId = s.id || s.station_id;
|
||||||
|
const stationName = s.name;
|
||||||
|
|
||||||
|
// 1. Confirm with the user
|
||||||
if (!confirm(`Are you sure you want to permanently remove "${stationName}"?`)) {
|
if (!confirm(`Are you sure you want to permanently remove "${stationName}"?`)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`${API_BASE}/stations/${stationId}`, { method: 'DELETE' });
|
// 2. Call the DELETE API endpoint
|
||||||
|
const response = await fetch(`${API_BASE}/stations/${stationId}`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
});
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
alert(`Station "${stationName}" removed successfully.`);
|
alert(`Station "${stationName}" removed successfully.`);
|
||||||
allStations = []; // Force a full refresh on next poll
|
// 3. Refresh the entire list from the server
|
||||||
loadAndPollStations();
|
loadStations();
|
||||||
} else {
|
} else {
|
||||||
const error = await response.json();
|
const error = await response.json();
|
||||||
alert(`Failed to remove station: ${error.message}`);
|
alert(`Failed to remove station: ${error.message}`);
|
||||||
|
|
@ -171,35 +165,148 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||||
console.error('Error removing station:', error);
|
console.error('Error removing station:', error);
|
||||||
alert('An error occurred while trying to remove the station.');
|
alert('An error occurred while trying to remove the station.');
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
grid.appendChild(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally, append the Add Station card LAST
|
||||||
|
const addNode = addStationCardTmpl.content.cloneNode(true);
|
||||||
|
const addCard = addNode.querySelector('div');
|
||||||
|
addCard.addEventListener('click', () => openModal(stationModal));
|
||||||
|
grid.appendChild(addNode);
|
||||||
|
|
||||||
|
if (window.lucide) {
|
||||||
|
lucide.createIcons();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
statusBtn.addEventListener('click', () => {
|
||||||
|
statusMenu.classList.toggle('hidden');
|
||||||
|
});
|
||||||
|
statusMenu.querySelectorAll('button').forEach(b=>{
|
||||||
|
b.addEventListener('click', () => setStatus(b.dataset.value, b.textContent.trim()));
|
||||||
|
});
|
||||||
|
|
||||||
|
function applyFilters(){
|
||||||
|
const q = (searchEl.value||'').trim().toLowerCase();
|
||||||
|
const status = statusValue; // 'all' | 'online' | 'offline'
|
||||||
|
const filtered = allStations.filter(s=>{
|
||||||
|
const matchesQ = !q || [s.name, s.id, s.station_id, s.location].filter(Boolean).some(v=>String(v).toLowerCase().includes(q));
|
||||||
|
const matchesStatus = status==='all' || String(s.status).toLowerCase()===status;
|
||||||
|
return matchesQ && matchesStatus;
|
||||||
|
});
|
||||||
|
render(filtered);
|
||||||
|
}
|
||||||
|
|
||||||
|
searchEl.addEventListener('input', ()=> setTimeout(applyFilters,150));
|
||||||
|
|
||||||
|
async function loadStations() {
|
||||||
|
try {
|
||||||
|
// Step 1: Fetch both the station list and the daily stats at the same time.
|
||||||
|
const [stationsResponse, statsResponse] = await Promise.all([
|
||||||
|
fetch(`${API_BASE}/stations`),
|
||||||
|
fetch(`${API_BASE}/stations/daily-stats`)
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!stationsResponse.ok) {
|
||||||
|
throw new Error('Failed to fetch station list');
|
||||||
|
}
|
||||||
|
|
||||||
|
const stationsList = await stationsResponse.json();
|
||||||
|
// It's okay if stats fail; we can just show 0.
|
||||||
|
const statsDict = statsResponse.ok ? await statsResponse.json() : {};
|
||||||
|
|
||||||
|
// Step 2: Merge the stats into the station list.
|
||||||
|
const mergedStations = stationsList.map(station => {
|
||||||
|
const stats = statsDict[station.id] || { total_starts: 0, completed: 0, aborted: 0 };
|
||||||
|
return {
|
||||||
|
...station, // Keep all original station properties
|
||||||
|
// Add the stats properties that the render() function expects
|
||||||
|
metrics: {
|
||||||
|
total_starts: stats.total_starts,
|
||||||
|
total_completed: stats.completed,
|
||||||
|
total_aborted: stats.aborted
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
allStations = Array.isArray(mergedStations) ? mergedStations : [];
|
||||||
|
|
||||||
|
// Hide error message if successful
|
||||||
|
errorState.classList.add('hidden');
|
||||||
|
|
||||||
|
// Step 3: Render the page with the combined data.
|
||||||
|
applyFilters();
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error loading stations:", err);
|
||||||
|
allStations = []; // Clear any old data
|
||||||
|
applyFilters(); // Render the empty state
|
||||||
|
errorState.textContent = 'Failed to load stations. Please ensure the API is running and reachable.';
|
||||||
|
errorState.classList.remove('hidden');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
document.addEventListener('click', (e)=>{
|
||||||
|
if (!document.getElementById('statusFilterWrap').contains(e.target)) statusMenu.classList.add('hidden');
|
||||||
|
});
|
||||||
|
|
||||||
|
async function refreshData() {
|
||||||
|
try {
|
||||||
|
console.log("Refreshing data..."); // For debugging
|
||||||
|
const [stationsResponse, statsResponse] = await Promise.all([
|
||||||
|
fetch(`${API_BASE}/stations`),
|
||||||
|
fetch(`${API_BASE}/stations/daily-stats`)
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!stationsResponse.ok) return; // Fail silently on refresh
|
||||||
|
|
||||||
|
const stationsList = await stationsResponse.json();
|
||||||
|
const statsDict = statsResponse.ok ? await statsResponse.json() : {};
|
||||||
|
|
||||||
|
const mergedStations = stationsList.map(station => {
|
||||||
|
const stats = statsDict[station.id] || { total_starts: 0, completed: 0, aborted: 0 };
|
||||||
|
return {
|
||||||
|
...station,
|
||||||
|
metrics: {
|
||||||
|
total_starts: stats.total_starts,
|
||||||
|
total_completed: stats.completed,
|
||||||
|
total_aborted: stats.aborted
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// If a station has been added or removed, do a full reload to redraw the grid
|
||||||
|
if (mergedStations.length !== allStations.length) {
|
||||||
|
loadStations();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the master list
|
||||||
|
allStations = mergedStations;
|
||||||
|
|
||||||
|
// Update each card in the DOM without rebuilding it
|
||||||
|
allStations.forEach(s => {
|
||||||
|
const card = grid.querySelector(`[data-station-id="${s.id}"]`);
|
||||||
|
if (card) {
|
||||||
|
const styles = statusStyles(s.status);
|
||||||
|
card.querySelector('.status-dot').className = `status-dot h-2.5 w-2.5 rounded-full ${styles.dot}`;
|
||||||
|
const badge = card.querySelector('.status-badge');
|
||||||
|
badge.className = `status-badge rounded-full px-2 py-0.5 text-[10px] font-semibold uppercase tracking-wide ${styles.badge}`;
|
||||||
|
badge.textContent = styles.text;
|
||||||
|
|
||||||
|
card.querySelector('.metric-starts').textContent = s.metrics.total_starts;
|
||||||
|
card.querySelector('.metric-success').textContent = s.metrics.total_completed;
|
||||||
|
card.querySelector('.metric-aborted').textContent = s.metrics.total_aborted;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// --- DATA FETCHING & POLLING ---
|
} catch (err) {
|
||||||
const loadAndPollStations = async () => {
|
console.error("Auto-refresh failed:", err);
|
||||||
try {
|
|
||||||
const response = await fetch(`${API_BASE}/stations`);
|
|
||||||
if (!response.ok) throw new Error('Failed to fetch stations');
|
|
||||||
|
|
||||||
const newStationList = await response.json();
|
|
||||||
|
|
||||||
// If the number of stations has changed, we must do a full re-render.
|
|
||||||
if (newStationList.length !== allStations.length) {
|
|
||||||
allStations = newStationList;
|
|
||||||
renderStations(allStations);
|
|
||||||
} else {
|
|
||||||
// Otherwise, we can do a more efficient status-only update.
|
|
||||||
allStations = newStationList;
|
|
||||||
updateStationStatuses(allStations);
|
|
||||||
fetchAndApplyStats(); // Fetch and update daily stats
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
|
||||||
console.error(error);
|
|
||||||
stationCountEl.textContent = 'Could not load stations. Is the backend running?';
|
|
||||||
if (pollingInterval) clearInterval(pollingInterval);
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
// --- INITIALIZATION ---
|
document.addEventListener('DOMContentLoaded', loadStations);
|
||||||
loadAndPollStations(); // Load immediately on page start
|
|
||||||
pollingInterval = setInterval(loadAndPollStations, 10000);
|
setInterval(refreshData, 15000); // 15000 milliseconds = 15 seconds
|
||||||
});
|
|
||||||
|
|
@ -45,7 +45,7 @@
|
||||||
<div class="mx-auto max-w-7xl px-4 py-4 grid grid-cols-3 items-center gap-3">
|
<div class="mx-auto max-w-7xl px-4 py-4 grid grid-cols-3 items-center gap-3">
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<h1 class="text-xl md:text-2xl font-extrabold tracking-tight">Select a Station</h1>
|
<h1 class="text-xl md:text-2xl font-extratracking-tight">Select a Station</h1>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="flex justify-center">
|
<div class="flex justify-center">
|
||||||
|
|
@ -106,7 +106,7 @@
|
||||||
|
|
||||||
<!-- Station Card Template (with metrics) -->
|
<!-- Station Card Template (with metrics) -->
|
||||||
<template id="station-card-template">
|
<template id="station-card-template">
|
||||||
<div class="group rounded-2xl border border-white/10 bg-white/5 p-4 transition animate-fadeUp hover:-translate-y-1.5 hover:border-emerald-400/60 hover:shadow-[0_0_0_1px_rgba(16,185,129,0.25),0_20px_40px_rgba(0,0,0,0.45)]">
|
<div class="group rounded-2xl border border-white/10 bg-white/5 p-4 transition animate-fadeUp hover:-translate-y-1.5 hover:border-emerald-400/60 hover:shadow-[0_0_0_1px_rgba(16,185,129,0.25),0_20px_40px_rgba(0,0,0,0.45)]" data-station-id="">
|
||||||
<div class="flex items-start justify-between">
|
<div class="flex items-start justify-between">
|
||||||
<div class="min-w-0">
|
<div class="min-w-0">
|
||||||
<div class="flex items-center gap-2">
|
<div class="flex items-center gap-2">
|
||||||
|
|
@ -224,221 +224,7 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<script>
|
<script src="./js/station_selection.js"></script>
|
||||||
const API_BASE = 'http://10.10.2.47:5000/api';
|
|
||||||
|
|
||||||
const grid = document.getElementById('stations-grid');
|
|
||||||
const addStationCardTmpl = document.getElementById('add-station-card-template');
|
|
||||||
const stationCardTmpl = document.getElementById('station-card-template');
|
|
||||||
|
|
||||||
const searchEl = document.getElementById('search');
|
|
||||||
const emptyState = document.getElementById('empty-state');
|
|
||||||
const errorState = document.getElementById('error-state');
|
|
||||||
|
|
||||||
// THEMED STATUS DROPDOWN LOGIC
|
|
||||||
const statusBtn = document.getElementById('statusBtn');
|
|
||||||
const statusMenu = document.getElementById('statusMenu');
|
|
||||||
const statusLabel = document.getElementById('statusLabel');
|
|
||||||
let statusValue = 'all';
|
|
||||||
|
|
||||||
// Modals
|
|
||||||
const userModal = document.getElementById('userModal');
|
|
||||||
const stationModal = document.getElementById('stationModal');
|
|
||||||
|
|
||||||
const openModal = (el) => { el.classList.remove('hidden'); el.classList.add('block'); };
|
|
||||||
const closeModal = (el) => { el.classList.add('hidden'); el.classList.remove('block'); };
|
|
||||||
|
|
||||||
// Header buttons
|
|
||||||
document.getElementById('addUserBtn').onclick = () => openModal(userModal);
|
|
||||||
document.getElementById('cancelUserBtn').onclick = () => closeModal(userModal);
|
|
||||||
document.getElementById('logoutBtn').onclick = () => { localStorage.clear(); window.location.href = './index.html'; };
|
|
||||||
document.getElementById('cancelStationBtn').onclick = () => closeModal(stationModal);
|
|
||||||
|
|
||||||
// Forms
|
|
||||||
document.getElementById('userForm').onsubmit = async (e)=>{
|
|
||||||
e.preventDefault();
|
|
||||||
const payload = { username: newUsername.value.trim(), password: newPassword.value, is_admin: isAdmin.checked };
|
|
||||||
try {
|
|
||||||
const res = await fetch(`${API_BASE}/users`, {
|
|
||||||
method:'POST',
|
|
||||||
headers:{'Content-Type':'application/json'},
|
|
||||||
body: JSON.stringify(payload),
|
|
||||||
credentials: 'include'
|
|
||||||
});
|
|
||||||
if(!res.ok) throw new Error('Failed to add user');
|
|
||||||
closeModal(userModal); alert('User added');
|
|
||||||
} catch(err){ alert(err.message); }
|
|
||||||
}
|
|
||||||
|
|
||||||
document.getElementById('stationForm').onsubmit = async (e)=>{
|
|
||||||
e.preventDefault();
|
|
||||||
const payload = {
|
|
||||||
station_id: stationId.value.trim(),
|
|
||||||
product_id: stationProductId.value.trim(),
|
|
||||||
name: stationName.value.trim(),
|
|
||||||
location: stationLocation.value.trim(),
|
|
||||||
mqtt_broker: mqttBroker.value.trim(),
|
|
||||||
mqtt_port: Number(mqttPort.value),
|
|
||||||
mqtt_user: mqttUsername.value || null,
|
|
||||||
mqtt_password: mqttPassword.value || null,
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await fetch(`${API_BASE}/stations`, {
|
|
||||||
method:'POST',
|
|
||||||
headers:{'Content-Type':'application/json'},
|
|
||||||
body: JSON.stringify(payload),
|
|
||||||
credentials: 'include'
|
|
||||||
});
|
|
||||||
if(!res.ok) throw new Error('Failed to add station');
|
|
||||||
closeModal(stationModal); await loadStations();
|
|
||||||
} catch(err){ alert(err.message); }
|
|
||||||
}
|
|
||||||
|
|
||||||
function statusStyles(status){
|
|
||||||
const online = { dot:'bg-emerald-400 animate-pulseDot', badge:'bg-emerald-500/15 text-emerald-300 border border-emerald-400/20', text:'Online' };
|
|
||||||
const offline = { dot:'bg-rose-500', badge:'bg-rose-500/15 text-rose-300 border border-rose-400/20', text:'Offline' };
|
|
||||||
return String(status).toLowerCase()==='online'?online:offline;
|
|
||||||
}
|
|
||||||
|
|
||||||
function setStatus(val, label) {
|
|
||||||
statusValue = val;
|
|
||||||
statusLabel.textContent = label;
|
|
||||||
statusMenu.classList.add('hidden');
|
|
||||||
applyFilters(); // reuse your existing function
|
|
||||||
}
|
|
||||||
|
|
||||||
let allStations = [];
|
|
||||||
|
|
||||||
function render(stations){
|
|
||||||
grid.innerHTML = '';
|
|
||||||
|
|
||||||
if(!stations || stations.length===0){
|
|
||||||
emptyState.classList.remove('hidden');
|
|
||||||
} else {
|
|
||||||
emptyState.classList.add('hidden');
|
|
||||||
for(const s of stations){
|
|
||||||
const node = stationCardTmpl.content.cloneNode(true);
|
|
||||||
const card = node.querySelector('div');
|
|
||||||
card.querySelector('.station-name').textContent = s.name ?? `Station ${s.id || s.station_id}`;
|
|
||||||
// const productIdVal = s.product_id || '—';
|
|
||||||
// const productIdEl = card.querySelector('.product-id');
|
|
||||||
// if (productIdEl) {
|
|
||||||
// // Use .innerHTML and add a styled <span> for the title
|
|
||||||
// productIdEl.innerHTML = `<span class="font-semibold text-white-500">Product ID: </span>${productIdVal}`;
|
|
||||||
// }
|
|
||||||
const productIdVal = s.product_id || '—';
|
|
||||||
const productIdEl = card.querySelector('.product-id');
|
|
||||||
if (productIdEl) {
|
|
||||||
productIdEl.textContent = productIdVal;
|
|
||||||
}
|
|
||||||
card.querySelector('.station-location').textContent = s.location ?? '—';
|
|
||||||
const idVal = s.id || s.station_id || '—';
|
|
||||||
const idEl = card.querySelector('.station-id');
|
|
||||||
idEl.textContent = idVal; idEl.setAttribute('title', idVal);
|
|
||||||
|
|
||||||
const styles = statusStyles(s.status);
|
|
||||||
const dot = card.querySelector('.status-dot');
|
|
||||||
dot.className = `status-dot h-2.5 w-2.5 rounded-full ${styles.dot}`;
|
|
||||||
const badge = card.querySelector('.status-badge');
|
|
||||||
badge.className = `status-badge rounded-full px-2 py-0.5 text-[10px] font-semibold uppercase tracking-wide ${styles.badge}`;
|
|
||||||
badge.textContent = styles.text;
|
|
||||||
|
|
||||||
// Metrics
|
|
||||||
const starts = s.total_swaps_started ?? s.metrics?.total_starts ?? 0;
|
|
||||||
const success = s.total_swaps_success ?? s.metrics?.total_completed ?? 0;
|
|
||||||
const aborted = s.total_swaps_aborted ?? s.metrics?.total_aborted ?? 0;
|
|
||||||
card.querySelector('.metric-starts').textContent = starts;
|
|
||||||
card.querySelector('.metric-success').textContent = success;
|
|
||||||
card.querySelector('.metric-aborted').textContent = aborted;
|
|
||||||
|
|
||||||
// Open
|
|
||||||
card.querySelector('.open-btn').addEventListener('click', () => {
|
|
||||||
localStorage.setItem('selected_station', JSON.stringify(s));
|
|
||||||
const id = encodeURIComponent(s.id || s.station_id);
|
|
||||||
window.location.href = `./dashboard.html?stationId=${id}`;
|
|
||||||
});
|
|
||||||
// --- ADD THIS NEW BLOCK FOR THE REMOVE BUTTON ---
|
|
||||||
card.querySelector('.remove-btn').addEventListener('click', async () => {
|
|
||||||
const stationId = s.id || s.station_id;
|
|
||||||
const stationName = s.name;
|
|
||||||
|
|
||||||
// 1. Confirm with the user
|
|
||||||
if (!confirm(`Are you sure you want to permanently remove "${stationName}"?`)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
// 2. Call the DELETE API endpoint
|
|
||||||
const response = await fetch(`${API_BASE}/stations/${stationId}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
alert(`Station "${stationName}" removed successfully.`);
|
|
||||||
// 3. Refresh the entire list from the server
|
|
||||||
loadStations();
|
|
||||||
} else {
|
|
||||||
const error = await response.json();
|
|
||||||
alert(`Failed to remove station: ${error.message}`);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error removing station:', error);
|
|
||||||
alert('An error occurred while trying to remove the station.');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
grid.appendChild(node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Finally, append the Add Station card LAST
|
|
||||||
const addNode = addStationCardTmpl.content.cloneNode(true);
|
|
||||||
const addCard = addNode.querySelector('div');
|
|
||||||
addCard.addEventListener('click', () => openModal(stationModal));
|
|
||||||
grid.appendChild(addNode);
|
|
||||||
|
|
||||||
if (window.lucide) {
|
|
||||||
lucide.createIcons();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
statusBtn.addEventListener('click', () => {
|
|
||||||
statusMenu.classList.toggle('hidden');
|
|
||||||
});
|
|
||||||
statusMenu.querySelectorAll('button').forEach(b=>{
|
|
||||||
b.addEventListener('click', () => setStatus(b.dataset.value, b.textContent.trim()));
|
|
||||||
});
|
|
||||||
|
|
||||||
function applyFilters(){
|
|
||||||
const q = (searchEl.value||'').trim().toLowerCase();
|
|
||||||
const status = statusValue; // 'all' | 'online' | 'offline'
|
|
||||||
const filtered = allStations.filter(s=>{
|
|
||||||
const matchesQ = !q || [s.name, s.id, s.station_id, s.location].filter(Boolean).some(v=>String(v).toLowerCase().includes(q));
|
|
||||||
const matchesStatus = status==='all' || String(s.status).toLowerCase()===status;
|
|
||||||
return matchesQ && matchesStatus;
|
|
||||||
});
|
|
||||||
render(filtered);
|
|
||||||
}
|
|
||||||
|
|
||||||
searchEl.addEventListener('input', ()=> setTimeout(applyFilters,150));
|
|
||||||
|
|
||||||
async function loadStations(){
|
|
||||||
try{
|
|
||||||
const res = await fetch(`${API_BASE}/stations`);
|
|
||||||
const data = await res.json();
|
|
||||||
allStations = Array.isArray(data) ? data : (data.stations||[]);
|
|
||||||
applyFilters();
|
|
||||||
}catch(err){
|
|
||||||
errorState.textContent = 'Failed to load stations. Ensure API is running.';
|
|
||||||
errorState.classList.remove('hidden');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
document.addEventListener('click', (e)=>{
|
|
||||||
if (!document.getElementById('statusFilterWrap').contains(e.target)) statusMenu.classList.add('hidden');
|
|
||||||
});
|
|
||||||
|
|
||||||
document.addEventListener('DOMContentLoaded', loadStations);
|
|
||||||
</script>
|
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,42 @@
|
||||||
|
/nginx.conf
|
||||||
|
|
||||||
|
events {
|
||||||
|
worker_connections 1024;
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
upstream gunicorn_server {
|
||||||
|
server 127.0.0.1:5000;
|
||||||
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name localhost;
|
||||||
|
|
||||||
|
# Location for static frontend files
|
||||||
|
location / {
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
try_files $uri /index.html;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Proxy settings for API traffic
|
||||||
|
location /api {
|
||||||
|
proxy_pass http://gunicorn_server;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Proxy settings for WebSocket traffic
|
||||||
|
location /socket.io {
|
||||||
|
proxy_pass http://gunicorn_server/socket.io;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_buffering off;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "Upgrade";
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
# Flask
|
||||||
|
# Flask-SocketIO
|
||||||
|
# Flask-SQLAlchemy
|
||||||
|
# Flask-Cors
|
||||||
|
# Flask-Login
|
||||||
|
# psycopg2-binary
|
||||||
|
# paho-mqtt
|
||||||
|
# protobuf
|
||||||
|
# python-dotenv
|
||||||
|
|
||||||
|
Flask==2.2.2
|
||||||
|
Flask-SocketIO==5.3.3
|
||||||
|
Flask-Cors==3.0.10
|
||||||
|
Flask-Login==0.6.2
|
||||||
|
python-dotenv==0.21.0
|
||||||
|
SQLAlchemy==1.4.41
|
||||||
|
psycopg2-binary==2.9.3
|
||||||
|
gunicorn==20.1.0
|
||||||
|
eventlet==0.33.1
|
||||||
|
protobuf==3.20.1
|
||||||
|
Werkzeug
|
||||||
Loading…
Reference in New Issue